Commit 2d457ac4 authored by Elger Jonker's avatar Elger Jonker

solving various errors in scanning headers

parent c89c3397
Pipeline #17784793 passed with stages
in 10 minutes and 49 seconds
......@@ -33,7 +33,31 @@ Scans for lack of HTTPS. If there is only HTTP and no HTTPS on the standard port
### Scan Security Headers
Scans for HTTP server headers. Results in medium and low risk issues.
###
### Scan TLS Qualys
Runs a TLS scan
### Rebuild Rating
### Onboard
### Subdomains (Certificate Transparency)
### Subdomains (NSEC)
### Discover HTTP(S) Endpoints
### Create Screenshot
Doesn't work.
### Declare dead
......@@ -54,6 +78,11 @@ Server access (server managers only):
ssh -A user@faalserver.faalkaart.nl
sudo su -
journalctl -u docker-failmap-<tab><tab>
or
journalctl -f -u docker-failmap*
failmap-logtail
```
This will allow you to see the output of running services.
......
......@@ -142,7 +142,7 @@ def dnsrecon_default(urls):
logger.debug("DNS results will be stored in file: %s" % path)
# never continue with wildcard domains
p = subprocess.Popen(['python2', dnsrecon,
p = subprocess.Popen(['python', dnsrecon,
'--type', '"rvl,srv,axfr,snoop,zonewalk"'
'--domain', url.url,
'-j', path], stdin=subprocess.PIPE)
......@@ -217,8 +217,9 @@ def discover_wildcard_scan(url: Url):
logger.debug("DNS results will be stored in file: %s" % path)
# never continue with wildcard domains
# solving https://sentry.io/internet-cleanup-foundation/faalkaart/issues/467465408/
randomize_nonsense_subdomains_wordlist()
command = ['python2', dnsrecon,
command = ['python', dnsrecon,
'--domain', url.url,
'-t', 'brt',
'--iw', # --iw: # always try wild card domains.
......@@ -358,7 +359,7 @@ def bruteforce_scan(urls: List[Url], wordlist: str):
logger.debug("DNS results will be stored in file: %s" % path)
# never continue with wildcard domains
p = subprocess.Popen(['python2', dnsrecon,
p = subprocess.Popen(['python', dnsrecon,
'--domain', url.url,
'-t', 'brt',
'-D', wordlist,
......@@ -448,7 +449,7 @@ def nsec_scan(urls: List[Url]):
added = []
for url in urls:
file = settings.TOOLS['dnsrecon']['output_dir'] + "%s_nsec.json" % url.url
command = ['python2', dnsrecon, '-t', 'zonewalk', '-d', url.url, '-z', '-j', file]
command = ['python', dnsrecon, '-t', 'zonewalk', '-d', url.url, '-z', '-j', file]
try:
subprocess.check_output(command)
added += import_dnsrecon_report(url, file)
......
......@@ -377,6 +377,20 @@ def resolves(url: str):
return False
def resolves_on_v4(url: str):
(ip4, ip6) = get_ips(url)
if ip4:
return True
return False
def resolves_on_v6(url: str):
(ip4, ip6) = get_ips(url)
if ip6:
return True
return False
def has_internet_connection(host: str="8.8.8.8", port: int=53, connection_timeout: int=10):
"""
https://stackoverflow.com/questions/3764291/checking-network-connection#3764660
......
......@@ -12,7 +12,8 @@ from celery import Task, group
from failmap.organizations.models import Organization, Url
from failmap.scanners.endpoint_scan_manager import EndpointScanManager
from failmap.scanners.scanner_http import redirects_to_safety, verify_is_secure
from failmap.scanners.scanner_http import (redirects_to_safety, resolves_on_v4, resolves_on_v6,
verify_is_secure)
from ..celery import app
from .models import Endpoint
......@@ -85,6 +86,7 @@ def scan_url(url: Url):
saved_by_the_bell = "Redirects to a secure site, while a secure counterpart on the standard port is missing."
no_https_at_all = "Site does not redirect to secure url, and has no secure alternative on a standard port."
cleaned_up = "Has a secure equivalent, which wasn't so in the past."
not_resolvable_at_all = "Cannot be resolved anymore, seems to be cleaned up."
# The default ports matter for normal humans. All services on other ports are special services.
# we only give points if there is not a normal https site when there is a normal http site.
......@@ -125,35 +127,49 @@ def scan_url(url: Url):
# Some organizations redirect the http site to a non-standard https port.
# occurs more than once... you still have to follow redirects?
if has_http_v4 and not has_https_v4:
log.debug("This url seems to have no https at all: %s" % url)
log.debug("Checking if they exist, to be sure there is nothing.")
# todo: doesn't work anymore, as it's async
# quick fix: run it again after the discovery tasks have finished.
if not verify_is_secure(http_v4_endpoint):
log.info("Checking if the URL redirects to a secure url: %s" % url)
if redirects_to_safety(http_v4_endpoint):
log.info("%s redirects to safety, saved by the bell." % url)
scan_manager.add_scan("plain_https", http_v4_endpoint, "25", saved_by_the_bell)
else:
log.info("%s does not have a https site. Saving/updating scan." % url)
scan_manager.add_scan("plain_https", http_v4_endpoint, "1000", no_https_at_all)
# fixing https://sentry.io/internet-cleanup-foundation/faalkaart/issues/435116126/
if not resolves_on_v4(url.url):
# the endpoint scanner will probably find there is no endpoint anymore as well...
log.debug("Does not resolve at all, so has no insecure endpoints. %s" % url)
scan_manager.add_scan("plain_https", http_v4_endpoint, "0", not_resolvable_at_all)
else:
log.debug("This url seems to have no https at all: %s" % url)
log.debug("Checking if they exist, to be sure there is nothing.")
# todo: doesn't work anymore, as it's async
# quick fix: run it again after the discovery tasks have finished.
if not verify_is_secure(http_v4_endpoint):
log.info("Checking if the URL redirects to a secure url: %s" % url)
if redirects_to_safety(http_v4_endpoint):
log.info("%s redirects to safety, saved by the bell." % url)
scan_manager.add_scan("plain_https", http_v4_endpoint, "25", saved_by_the_bell)
else:
log.info("%s does not have a https site. Saving/updating scan." % url)
scan_manager.add_scan("plain_https", http_v4_endpoint, "1000", no_https_at_all)
else:
# it is secure, and if there was a rating, then reduce it to 0 (with a new rating).
if scan_manager.had_scan_with_points("plain_https", http_v4_endpoint):
scan_manager.add_scan("plain_https", http_v4_endpoint, "0", cleaned_up)
if has_http_v6 and not has_https_v6:
if not verify_is_secure(http_v6_endpoint):
if redirects_to_safety(http_v6_endpoint):
scan_manager.add_scan("plain_https", http_v6_endpoint, "25", saved_by_the_bell)
else:
scan_manager.add_scan("plain_https", http_v6_endpoint, "1000", no_https_at_all)
# fixing https://sentry.io/internet-cleanup-foundation/faalkaart/issues/435116126/
if not resolves_on_v6(url.url):
# the endpoint scanner will probably find there is no endpoint anymore as well...
log.debug("Does not resolve at all, so has no insecure endpoints. %s" % url)
scan_manager.add_scan("plain_https", http_v6_endpoint, "0", not_resolvable_at_all)
else:
if not verify_is_secure(http_v6_endpoint):
if redirects_to_safety(http_v6_endpoint):
scan_manager.add_scan("plain_https", http_v6_endpoint, "25", saved_by_the_bell)
else:
scan_manager.add_scan("plain_https", http_v6_endpoint, "1000", no_https_at_all)
else:
# it is secure, and if there was a rating, then reduce it to 0 (with a new rating).
if scan_manager.had_scan_with_points("plain_https", http_v6_endpoint):
scan_manager.add_scan("plain_https", http_v6_endpoint, "0", cleaned_up)
return 'done'
return
......@@ -7,6 +7,7 @@ from datetime import datetime
import pytz
import requests
import urllib3
from celery import Task, group
from requests import ConnectionError, ConnectTimeout, HTTPError, ReadTimeout, Timeout
......@@ -173,7 +174,25 @@ def get_headers(self, uri_url):
for header in response.headers:
log.debug('Received header: %s' % header)
return response
except (ConnectTimeout, HTTPError, ReadTimeout, Timeout, ConnectionError) as e:
# The amount of possible return states is overwhelming :)
# Solving https://sentry.io/internet-cleanup-foundation/faalkaart/issues/460895712/
# https://sentry.io/internet-cleanup-foundation/faalkaart/issues/460895699/
# ValueError, really don't know how to further handle it.
#
# Solving https://sentry.io/internet-cleanup-foundation/faalkaart/issues/425503689/
# requests.TooManyRedirects
#
# Solving https://sentry.io/internet-cleanup-foundation/faalkaart/issues/425507209/
# LocationValueError - No host specified.
# it redirects to something like https:/// (with three slashes) and then somewhere it crashes
# possibly an error in requests.
#
# Possibly tooManyRedirects could be plotted on the map, given this is a configuration error
except (ConnectTimeout, HTTPError, ReadTimeout, Timeout, ConnectionError, ValueError,
requests.TooManyRedirects, urllib3.exceptions.LocationValueError) as e:
# If an expected error is encountered put this task back on the queue to be retried.
# This will keep the chained logic in place (saving result after successful scan).
# Retry delay and total number of attempts is configured in the task decorator.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment