Commit b36c8f2c authored by Johan Bloemberg's avatar Johan Bloemberg

Merge branch 'master' into '57-implement-security-header-check-worker'

# Conflicts:
#   .gitignore
#   failmap_admin/settings.py
parents f2cdcbb3 e1cef456
......@@ -30,3 +30,4 @@ vendor/Google Chrome.app/
failmap_dataset*
failmap_testdataset*
dev_db/
failmap_debug_dataset*
import logging
from django.core.management.base import BaseCommand
from failmap_admin.map.determineratings import default_ratings
logger = logging.getLogger(__package__)
class Command(BaseCommand):
help = 'Gives a default rating to organizations. If they don\'t have one.'
def handle(self, *args, **options):
default_ratings()
......@@ -443,4 +443,9 @@ path {
float: right;
position: relative;
top: -15px;
}
.organization_points {
font-size: inherit !important;
margin: 0 5px 0 0 !important;
}
\ No newline at end of file
......@@ -57,8 +57,8 @@ var failmap = {
initializemap: function () {
this.map = L.map('map').setView([52.15, 5.8], 8);
this.map.scrollWheelZoom.disable();
let tile_uri_base = 'https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png'
let tile_uri_params = 'access_token={accessToken}'
let tile_uri_base = 'https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png';
let tile_uri_params = 'access_token={accessToken}';
let tile_uri = tile_uri_base + '?' + tile_uri_params;
// allow tiles to be fetched through a proxy to apply our own caching rules
......@@ -106,11 +106,14 @@ var failmap = {
L.DomEvent.disableClickPropagation(this._div);
this._div.innerHTML = '<div class="page-header" id="fullscreenreport" v-if="visible">\n' +
' <div v-if="name" class="fullscreenlayout">\n' +
' <h1>{{ name }}</h1>\n' +
' <h1><span class="organization_points" v-html="organization_points(points)"></span> {{ name }}</h1>\n' +
' <p class="closebutton" onclick="vueFullScreenReport.hide()">X</p>\n' +
' <div>\n' +
' Dit resultaat delen? <span v-html="create_twitter_link(name, twitter_handle, points)"></span><br />' +
' <br />' +
' Gegevens van: {{ humanize(when) }}<br />\n' +
' Score: {{ points }}, congratulations!<br />\n' +
' <br />' +
' Gaat faalkaart niet ver genoeg? <a v-bind:href="\'mailto:incoming+failmap/[email protected]?subject=Pentest%20aanvraag%20voor%20\'+name+\'&body=Beste Faalkaart,%0D%0A%0D%0AWij hebben interesse in een pentest op de outward-facing IT van onze organisatie. Kunnen jullie daar bij helpen?%0D%0A%0D%0AMet vriendelijke groet,%0D%0A%0D%0A\'">Vraag hier een echte pentest aan.</a><br/>\n' +
' Ontbreken er domeinen? <a v-bind:href="\'mailto:incoming+failmap/[email protected]?subject=Nieuwe%20domeinen%20voor%20\'+name+\'&body=Beste Faalkaart,%0D%0A%0D%0AGraag de volgende domeinen toevoegen aan de kaart:%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0ATip: stuur een zonefile mee met alle domeinen.%0D%0A%0D%0AMet vriendelijke groet,%0D%0A%0D%0A\'">Stuur hier domeinen in.</a><br/>\n' +
' <br />\n' +
......@@ -558,7 +561,9 @@ $(document).ready(function () {
data: {
calculation: '',
rating: 0,
points: 0,
when: 0,
twitter_handle: '',
name: "",
urls: Array
},
......@@ -623,6 +628,13 @@ $(document).ready(function () {
marker = points;
return '<span class="total_awarded_points_'+ this.colorize(points) +'">' + marker + '</span>'
},
organization_points: function(points) {
if (points === 0)
marker = "✓ perfect";
else
marker = points;
return '<span class="total_awarded_points_'+ this.colorize(points) +'">' + marker + '</span>'
},
awarded_points: function(points) {
if (points === 0)
marker = "✓ perfect";
......@@ -646,12 +658,22 @@ $(document).ready(function () {
vueReport.points = data.rating;
vueReport.when = data.when;
vueReport.name = data.name;
vueReport.twitter_handle = data.twitter_handle;
});
},
show_in_browser: function(){
// you can only jump once to an anchor, unless you use a dummy
location.hash = "#loading";
location.hash = "#report";
},
create_twitter_link: function(name, twitter_handle, points){
if (twitter_handle) {
if (points) {
return "<a role='button' class='btn btn-xs btn-info' target='_blank' href=\"https://twitter.com/intent/tweet?screen_name=" + twitter_handle + '&text=' + name + ' heeft ' + points + ' punten op Faalkaart! Bescherm mijn gegevens beter! 🥀&hashtags=' + name + ',faal,faalkaart"><img src="/static/images/twitterwhite.png" width="14" /> Tweet!</a>';
} else {
return "<a role='button' class='btn btn-xs btn-info' target='_blank' href=\"https://twitter.com/intent/tweet?screen_name=" + twitter_handle + '&text=' + name + ' heeft alles op orde! 🌹&hashtags=' + name + ',win,faalkaart"><img src="/static/images/twitterwhite.png" width="14" /> Tweet!</a>';
}
}
}
}
});
......@@ -661,8 +683,10 @@ $(document).ready(function () {
data: {
calculation: '',
rating: 0,
points: 0,
when: 0,
name: "",
twitter_handle: '',
urls: Array,
visible: false
},
......@@ -704,6 +728,13 @@ $(document).ready(function () {
second_opinion_links: function(rating, url){
return vueReport.second_opinion_links(rating, url);
},
organization_points: function(points) {
if (points === 0)
marker = "✓ perfect";
else
marker = points;
return '<span class="total_awarded_points_'+ this.colorize(points) +'">' + marker + '</span>'
},
total_awarded_points: function(points) {
if (points === 0)
marker = "✓ perfect";
......@@ -733,7 +764,11 @@ $(document).ready(function () {
vueFullScreenReport.points = data.rating;
vueFullScreenReport.when = data.when;
vueFullScreenReport.name = data.name;
vueFullScreenReport.twitter_handle = data.twitter_handle;
});
},
create_twitter_link: function(name, twitter_handle, points) {
return vueReport.create_twitter_link(name, twitter_handle, points);
}
}
});
......
......@@ -278,12 +278,15 @@
<a name="report" class="jumptonav"></a>
<div v-if="name">
<div class="page-header">
<h1>{% verbatim %}{{ name }}{% endverbatim %}</h1>
<h1><span class="organization_points" v-html="organization_points(points)"></span> {% verbatim %}{{ name }}{% endverbatim %}</h1>
<a href="#" class="backtomap">back to map ↑</a>
</div>
<div>
Dit resultaat delen? {% verbatim %}<span v-html="create_twitter_link(name, twitter_handle, points)"></span>{% endverbatim %}<br />
<br />
{% trans "Data from" %}: {% verbatim %}{{ humanize(when) }}{% endverbatim %}<br />
{% trans "Points" %}: {% verbatim %}{{ points }}{% endverbatim %}, {% trans "congratulations" %}!<br />
<br />
Gaat faalkaart niet ver genoeg? <a v-bind:href="'mailto:incoming+failmap/[email protected]?subject=Pentest%20aanvraag%20voor%20'+name+'&body=Beste Faalkaart,%0D%0A%0D%0AWij hebben interesse in een pentest op de outward-facing IT van onze organisatie. Kunnen jullie daar bij helpen?%0D%0A%0D%0AMet vriendelijke groet,%0D%0A%0D%0A'">Vraag hier een echte pentest aan.</a><br/>
Ontbreken er domeinen? <a v-bind:href="'mailto:incoming+failmap/[email protected]?subject=Nieuwe%20domeinen%20voor%20'+name+'&body=Beste Faalkaart,%0D%0A%0D%0AGraag de volgende domeinen toevoegen aan de kaart:%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0ATip: stuur een zonefile mee met alle domeinen.%0D%0A%0D%0AMet vriendelijke groet,%0D%0A%0D%0A'">Stuur hier domeinen in.</a><br/>
<br />
......
......@@ -23,7 +23,7 @@
<td><a v-on:click="showReport(rank.OrganizationID)">{{ rank.OrganizationName }}</a></td>
<td>{{ rank.Points }}</td>
<!-- https://dev.twitter.com/web/tweet-button/parameters -->
<td><a v-if="rank.OrganizationTwitter" :href="'https://twitter.com/intent/tweet?screen_name=' + rank.OrganizationTwitter + '&text=' + rank.OrganizationTwitter + ' gefeliciteerd! ' + rank.OrganizationName + ' staat positie ' + rank.Rank + ' op Faalkaart! 🥀&hashtags=' + rank.OrganizationName + ',faal,failmap'">Tweet! 📣</a></td>
<td><a v-if="rank.OrganizationTwitter" :href="'https://twitter.com/intent/tweet?screen_name=' + rank.OrganizationTwitter + '&text=' + rank.OrganizationTwitter + ' gefeliciteerd! ' + rank.OrganizationName + ' staat positie ' + rank.Rank + ' op Faalkaart! 🥀&hashtags=' + rank.OrganizationName + ',faal,faalkaart'">Tweet! 📣</a></td>
</tr>
{% endverbatim %}
</tbody>
......
......@@ -65,23 +65,28 @@ def organization_report(request, organization_id, weeks_back=0):
'organizationrating__calculation',
'organizationrating__when',
'name',
'pk').latest('organizationrating__when')
'pk',
'twitter_handle').latest('organizationrating__when')
# latest replaced: order_by('-organizationrating__when')[:1].get()
report_json = """
{
"rating": %s,
"when": "%s",
"name": "%s",
"id": %s,
"twitter_handle": "%s",
"rating": %s,
"when": "%s",
"calculation": %s
}
"""
report_json = report_json % (r['organizationrating__rating'],
r['organizationrating__when'],
r['name'],
r['pk'],
r['organizationrating__calculation'])
report_json = report_json % (
r['name'],
r['pk'],
r['twitter_handle'],
r['organizationrating__rating'],
r['organizationrating__when'],
r['organizationrating__calculation'],
)
# print(report_json)
except Organization.DoesNotExist:
report_json = "{}"
......@@ -427,10 +432,6 @@ def stats_determine_when(stat, weeks_back=0):
return when
def recursively_default_dict():
return collections.defaultdict(recursively_default_dict)
@cache_page(one_hour)
def stats(request, weeks_back=0):
timeframes = {'now': 0, '7 days ago': 0, '2 weeks ago': 0, '3 weeks ago': 0, '1 month ago': 0,
......
......@@ -8,7 +8,7 @@ from failmap_admin.map.determineratings import (OrganizationRating, UrlRating, r
rate_url)
from failmap_admin.scanners.models import Endpoint
from failmap_admin.scanners.scanner_dns import brute_known_subdomains, certificate_transparency
from failmap_admin.scanners.scanner_http import scan_url_list_standard_ports
from failmap_admin.scanners.scanner_http import scan_urls_on_standard_ports
from failmap_admin.scanners.scanner_tls_qualys import ScannerTlsQualys
from .models import Coordinate, Organization, OrganizationType, Url
......@@ -153,7 +153,7 @@ class UrlAdmin(admin.ModelAdmin):
def discover_http_endpoints(self, request, queryset):
urls_to_scan = [url for url in queryset]
scan_url_list_standard_ports(urls_to_scan)
scan_urls_on_standard_ports(urls_to_scan)
self.message_user(request, "URL(s) have been scanned for HTTP")
......
import logging
from django.core.management.base import BaseCommand
from failmap_admin.organizations.models import Organization, Url
from failmap_admin.scanners.models import Endpoint
from failmap_admin.scanners.scanner_http import scan_url, scan_urls
from .support.arguments import add_discover_verify, add_organization_argument
logger = logging.getLogger(__package__)
# todo: add command line arguments: port and protocol.
class Command(BaseCommand):
help = 'Discover http(s) endpoints on well known ports.'
def add_arguments(self, parser):
add_organization_argument(parser)
add_discover_verify(parser)
def handle(self, *args, **options):
# some expansion magic to avoid using eval
func = "verify_existing_endpoints" if options['method'] == "verify" else "discover_endpoints"
functionlist = {"verify_existing_endpoints": verify_existing_endpoints,
"discover_endpoints": discover_endpoints}
if not options['organization']:
functionlist[func]()
return
if options['organization'][0] == "_ALL_":
functionlist[func]()
return
organization = Organization.objects.all().filter(name=options['organization'][0])
functionlist[func](organization=organization)
def verify_existing_endpoints(port=None, protocol=None, organization=None):
"""
Checks all http(s) endpoints if they still exist. This is to monitor changes in the existing
dataset, without contacting an organization too often. It can be checked every few days,
as trying to find new endpoints is more involved and should not be run more than once every
two to four weeks.
The only result this scanner has is the same or less endpoints than we currently have.
:return: None
"""
endpoints = Endpoint.objects.all().filter(is_dead=False,
url__not_resolvable=False,
url__is_dead=False)
if port:
endpoints = endpoints.filter(port=port)
if protocol:
endpoints = endpoints.filter(protocol=protocol)
else:
endpoints = endpoints.filter(protocol__in=['http', 'https'])
if organization:
endpoints = endpoints.filter(url__organization=organization)
for endpoint in endpoints:
scan_url(endpoint.url, endpoint.port, endpoint.protocol)
def discover_endpoints(port=None, protocol=None, organization=None):
"""
:return: None
"""
urls = Url.objects.all().filter(is_dead=False, not_resolvable=False).filter()
if organization:
urls = urls.filter(organization=organization)
if protocol:
protocols = [protocol]
else:
protocols = ['http', 'https']
if port:
ports = [port]
else:
# Yes, HTTP sites on port 443 exist, we've seen many of them. Not just warnings(!).
# Don't underestimate the flexibility of the internet.
ports = [80, 81, 82, 88, 443, 8008, 8080, 8088, 8443, 8888, 9443]
logger.debug("Going to scan %s urls." % urls.count())
scan_urls(urls, ports, protocols)
import logging
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
from failmap_admin.organizations.models import Organization, Url
from failmap_admin.scanners.scanner_http import ScannerHttp
logger = logging.getLogger(__package__)
# todo: when tls scanner ends, it hangs.
# todo: add command line arguments: port and protocol.
class Command(BaseCommand):
help = 'Discover http sites'
def add_arguments(self, parser):
parser.add_argument(
'--organization',
nargs=1
)
def handle(self, *args, **options):
# urls without endpoints
if not options['organization']:
urls_without_endpoints = \
Url.objects.all().filter(
not_resolvable=False, is_dead=False).exclude(
endpoint__protocol__in=['http', 'https'])
logger.debug("Found %s urls that don't have http(s) endpoints yet. "
% urls_without_endpoints.count())
ScannerHttp.scan_url_list_standard_ports(urls_without_endpoints)
else:
if options['organization'] and options['organization'] == "_ALL_":
s = ScannerHttp()
s.scan_multithreaded(port=8443, protocol="https")
else:
logging.debug("Looking for organization: %s" % options['organization'][0])
try:
o = Organization.objects.get(name=options['organization'][0])
urls = Url.objects.all().filter(organization=o)
ScannerHttp.scan_url_list_standard_ports(urls)
except ObjectDoesNotExist:
logging.debug("Organization was not found.")
import argparse
from django.core.exceptions import ObjectDoesNotExist
from failmap_admin.organizations.models import Organization
def add_organization_argument(parser):
return parser.add_argument(
'--organization', '-o',
help="Name of an organization, for example Arnhem. Prefix spaces with a backslash (\\)",
nargs=1,
required=False,
type=valid_organization
)
def add_discover_verify(parser):
return parser.add_argument(
'--method', '-m',
help="verify|discover. Verify checks all existing ones, discover tries to find new ones.",
nargs='?',
required=False,
type=valid_discover_verify,
default="verify"
)
def valid_organization(name):
if "_ALL_" in name:
return "_ALL_"
try:
o = Organization.objects.get(name=name)
return o.name
except ObjectDoesNotExist:
raise argparse.ArgumentTypeError("%s is not a valid organization or _ALL_" % name)
def valid_discover_verify(option):
if option == "verify" or option == "discover":
return option
raise argparse.ArgumentTypeError("Method can be either 'discover' or 'verify'. Given: " % option)
......@@ -33,98 +33,106 @@ import requests
from requests import ConnectTimeout, HTTPError, ReadTimeout, Timeout
from requests.exceptions import ConnectionError
from failmap_admin.organizations.models import Url
from failmap_admin.celery import app
from .models import Endpoint
logger = logging.getLogger(__package__)
# todo: separating finding IP adresses and endpoints.
def scan(self):
# clean url: add http and portnumber 80. Try other ports later.
urls = Url.objects.all()
for url in urls:
scan_url(url, 80, "http")
return
def validate_port(port):
if port > 65535 or port < 0:
logger.error("Invalid port number, must be between 0 and 65535. %s" % port)
raise ValueError("Invalid port number, must be between 0 and 65535. %s" % port)
def validate_protocol(protocol):
if protocol not in ["http", "https"]:
logger.error("Invalid protocol %s, options are: http, https" % protocol)
raise ValueError("Invalid protocol %s, options are: http, https" % protocol)
def scan_url_list_standard_ports(urls):
scan_url_list(urls, 443, 'https')
scan_url_list(urls, 80, 'http')
scan_url_list(urls, 8080, 'http')
scan_url_list(urls, 8443, 'https')
# ScannerHttp.scan_url_list(urls, 8088, 'http')
# ScannerHttp.scan_url_list(urls, 8888, 'http')
# ScannerHttp.scan_url_list(urls, 8008, 'http')
# ScannerHttp.scan_url_list(urls, 9443, 'https')
def scan_urls_on_standard_ports(urls):
scan_url(urls, [80, 81, 82, 88, 443, 8008, 8080, 8088, 8443, 8888, 9443], ['http', 'https'])
def scan_url_list(urls, port=80, protocol="http"):
from multiprocessing import Pool
pool = Pool(processes=8)
def scan_urls(urls, ports, protocols):
if not has_internet_connection():
logger.error("No internet connection! Try again later!")
return
if protocol not in ["http", "https"]:
logger.error("Invalid protocol %s, options are: http, https" % protocol)
return
for port in ports:
validate_port(port)
if port > 65535 or port < 0:
logger.error("Invalid port number, must be between 0 and 65535. %s" % port)
return
for protocol in protocols:
validate_protocol(protocol)
for url in urls:
pool.apply_async(scan_url, [url, port, protocol],
callback=success_callback,
error_callback=error_callback)
logger.debug("Closing pool")
pool.close()
logger.debug("Joining pool")
pool.join()
# put some distance between the times an url is contacted, so it is less pressuring
# therefore, we do this per port and protocol instead of per url.
for port in ports:
for protocol in protocols:
for url in urls:
scan_url(url, port, protocol)
def scan_multithreaded(port=80, protocol="http", only_new=False):
def scan_url(url, port=80, protocol="https"):
task = scan_url_task.s(url, port, protocol)
task.apply_async()
if not only_new:
urls = Url.objects.all() # scans ALL urls.
else:
# todo: only new urls, those that don't have an endpoint on the protocol+port.
# not without _any_ endpoint, given that there will soon be endpoints for it.
# this also re-verifies all domains that explicitly don't have an endpoint on this
# port+protocol, which can be a bit slow. (we're not saving it reversely).
# todo: this is not correct yet.
urls = Url.objects.all().exclude(endpoint__port=port, endpoint__protocol=protocol)
urls = Url.objects.all()
scan_url_list(urls, port, protocol)
def database_debug():
# had the wrong env.
from django.db import connection
from failmap_admin import settings
logger.error(dir(settings))
logger.error(settings.DATABASE)
logger.error(settings.DATABASES)
def success_callback(x):
logger.info("Success!")
sql = "SELECT name FROM sqlite_master WHERE type='table';"
cursor = connection.cursor()
cursor.execute(sql)
rows = cursor.fetchall()
logger.error(rows)
for row in rows:
logger.error(row)
def error_callback(x):
logger.error("Error callback!")
logger.error(x)
logger.error(vars(x))
# Simple: if there is a http response (status code), there is a http server.
# There might be other protocols on standard ports.
# Even if the IP constantly changes, we know that a scanner will find something by url
# todo: check if we can scan https, due to our https lib not supporting "rest of world"
# todo: check headers using another scanner, don't use this one, even though it contacts
# the server (?)
# todo: further look into dig, which at the moment doesn't return more than what we have...
# We don't make endpoints for servers that don't exist: as opposed to qualys, since that
# scanner is slow. (perhaps we should in that case?)
# todo: option to not find IP's, only use existing ip's of endpoints / urls.
@app.task
def scan_url_task(url, port=80, protocol="https"):
"""
Searches for both IPv4 and IPv6 IP addresses / types.
The algorithm is very simple: if there is a http status code, or "a response" there is an
http(s) server. Some servers don't return a status code, others have problems with tls.
So you need either build something extremely robust, or make an easy assumption that there
could be a website there. Given the ports we scan, the probabilty of a website is extremely
high.
def scan_url(url, port=80, protocol="https"):
We don't scan for the obsoleted S-HTTP protocol, only for http and https.
It's possible to have a TLS site on port 80 and a non-TLS site on port 443. We've seen those.
This function does not store all ports it couldn't contact. Would we do that, the amount
of endpoints that are not resolvable explodes. There is not really value in storing the
non-resolvable urls, as you need to re-scan everything an a while anyway.
If we would store this, it would be url * ports endpoints. Now it's roughly urls * 1.8.
TLS does not have to be succesful. We also store https sites where HTTPS completely or
partially fails. As long as there is a "sort of" response we just assume there is a
website there. Other TLS scanners can check what's wrong with the connection. Perhaps
this leads to some false positives or to some other wrong data.
The big question is: would some https sites only respond IF the right protocol (SSL 1) or
something like that is spoken to them? Do we need a "special" TLS implementation on our server?
Todo: futher look if DIG can be of value to us. Until now it seems not so.
Todo: remove IP from endpoints. (change for version 1.1)
"""
domain = "%s://%s:%s" % (protocol, url.url, port)
logger.debug("Scanning http(s) server on: %s" % domain)
......@@ -168,14 +176,22 @@ def scan_url(url, port=80, protocol="https"):
except (ConnectTimeout, Timeout, ReadTimeout) as Ex:
logger.debug("%s: Timeout! - %s" % (url, Ex))
except (ConnectionRefusedError, ConnectionError, HTTPError) as Ex:
# ConnectionRefusedError: [Errno 61] Connection refused
# Some errors have our interests:
# BadStatusLine is an error, which signifies that the server gives an answer.
# Example: returning HTML, incompatible TLS (binary)
# CertificateError
# Example: wrong domain name for certificate
# certificate verify failed
# We don't care about certificate verification errors: it is a valid response.
"""
Some errors really mean there is no site. Example is the ConnectionRefusedError: [Errno 61]
which means the endpoint can be killed.
Yet...
There can be many, many, many errors that still can be translated into an existing site.
Until now we've found in responses:
- BadStatusLine
- CertificateError
- certificate verify failed
This all indicates that there is a service there. So this is stored.
"""
# Nope: EOF occurred in violation of protocol
# Nope: also: fine, a response! :) - youll get an unexpected closed connection.
logger.debug("%s: NOPE! - %s" % (url, Ex))
......@@ -272,7 +288,6 @@ def endpoint_exists(url, port, protocol, ip):
def kill_endpoint(url, port, protocol, ip):
eps = Endpoint.objects.all().filter(url=url,
port=port,
ip=ip,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment