rebuild ratings map not clear, nsec dns, dns rewrite

parent 7ecaba49
[submodule "vendor/dnsrecon"]
path = vendor/dnsrecon
url = https://github.com/darkoperator/dnsrecon.git
url = https://github.com/stitch/dnsrecon.git
[submodule "vendor/theHarvester"]
path = vendor/theHarvester
url = https://github.com/laramies/theHarvester.git
......@@ -13,7 +13,62 @@ from ..celery import app
from .models import OrganizationRating, UrlRating
from .points_and_calculations import points_and_calculation
# from functools import lru_cache
logger = logging.getLogger(__package__)
"""
python -m cProfile -s time `which failmap-admin` rebuild-ratings
mainly SQL queries (still)
ncalls tottime percall cumtime percall filename:lineno(function)
186699 50.724 0.000 61.517 0.000 base.py:324(execute)
30872 31.178 0.001 31.178 0.001 {method 'commit' of 'sqlite3.Connection' objects}
6499892/6498375 11.051 0.000 11.676 0.000 {built-in method builtins.hasattr}
6952977/6930265 9.602 0.000 9.632 0.000 {built-in method builtins.getattr}
1832647/1419376 7.205 0.000 51.009 0.000 compiler.py:368(compile)
213054 6.303 0.000 11.092 0.000 query.py:266(clone)
2860662 6.152 0.000 10.844 0.000 compiler.py:351(quote_name_unless_alias)
8490147/8489630 5.820 0.000 9.672 0.000 {built-in method builtins.isinstance}
1139637 4.941 0.000 5.813 0.000 operations.py:199(get_db_converters)
117238 4.884 0.000 15.216 0.000 compiler.py:523(get_default_columns)
173027/173005 4.818 0.000 43.225 0.000 query.py:1122(build_filter)
186699 4.645 0.000 78.305 0.000 utils.py:77(execute)
125021 4.598 0.000 43.696 0.000 compiler.py:165(get_select)
170439 4.413 0.000 8.271 0.000 base.py:473(__init__)
1308273 4.337 0.000 7.837 0.000 __init__.py:353(get_col)
8322 4.188 0.001 80.269 0.010 rating.py:262(rate_timeline)
349379 3.985 0.000 7.710 0.000 query.py:1284(names_to_path)
1336234 3.972 0.000 14.358 0.000 expressions.py:693(as_sql)
175012 3.905 0.000 4.338 0.000 query.py:128(__init__)
124954 3.698 0.000 18.109 0.000 compiler.py:812(get_converters)
125021/124955 3.487 0.000 85.670 0.001 compiler.py:413(as_sql)
193674/129098 3.486 0.000 59.677 0.000 query.py:1255(_add_q)
265136 3.428 0.000 197.598 0.001 query.py:47(__iter__)
156383 3.403 0.000 3.404 0.000 {method 'sub' of '_sre.SRE_Pattern' objects}
247487 3.176 0.000 8.180 0.000 dateparse.py:85(parse_datetime)
2440240 3.124 0.000 4.241 0.000 __init__.py:471(__eq__)
155826 2.981 0.000 2.981 0.000 {method 'execute' of 'sqlite3.Cursor' objects}
133288 2.888 0.000 171.005 0.001 compiler.py:855(execute_sql)
1539350 2.870 0.000 4.268 0.000 operations.py:147(quote_name)
1131921 2.811 0.000 8.557 0.000 expressions.py:703(get_db_converters)
199980 2.748 0.000 6.912 0.000 {method 'fetchmany' of 'sqlite3.Cursor' objects}
200977 2.562 0.000 12.755 0.000 lookups.py:158(process_lhs)
513049 2.560 0.000 3.892 0.000 related.py:651(foreign_related_fields)
365765 2.280 0.000 2.280 0.000 {method 'replace' of 'datetime.datetime' objects}
214624 2.261 0.000 200.017 0.001 query.py:1116(_fetch_all)
45086 2.256 0.000 2.256 0.000 encoder.py:204(iterencode)
522577 2.193 0.000 3.852 0.000 abc.py:178(__instancecheck__)
8729938 2.179 0.000 2.179 0.000 {method 'append' of 'list' objects}
60022 2.170 0.000 3.349 0.000 __init__.py:145(__init__)
349455 2.046 0.000 5.804 0.000 query.py:161(__init__)
322815 2.032 0.000 2.784 0.000 query_utils.py:63(__init__)
346977 1.943 0.000 1.943 0.000 {method 'match' of '_sre.SRE_Pattern' objects}
1077582 1.873 0.000 1.873 0.000 tree.py:21(__init__)
205326 1.808 0.000 13.385 0.000 query.py:1102(_clone)
988849 1.799 0.000 135.076 0.000 related_descriptors.py:161(__get__)
8322 1.793 0.000 267.225 0.032 rating.py:167(create_timeline)
"""
@app.task
......@@ -60,9 +115,9 @@ def rerate_urls(urls: List[Url]=None):
if not urls:
urls = list(Url.objects.all().filter(is_dead=False).order_by('url'))
UrlRating.objects.all().filter(url__in=urls).delete()
# to not have all ratings empty, do it per url
for url in urls:
UrlRating.objects.all().filter(url=url).delete()
rate_timeline(create_timeline(url), url)
......@@ -70,9 +125,12 @@ def rerate_organizations(organizations: List[Organization]=None):
if not organizations:
organizations = list(Organization.objects.all().order_by('name'))
OrganizationRating.objects.all().filter(organization__in=organizations).delete()
default_ratings()
add_organization_rating(organizations, build_history=True)
# to not clear the whole map at once, do this per organization.
# could be more efficient, but since the process is so slow, you'll end up with people looking at empty maps.
for organization in organizations:
OrganizationRating.objects.all().filter(organization=organization).delete()
default_ratings()
add_organization_rating([organization], build_history=True)
def rerate_urls_of_organizations(organizations: List[Organization]):
......
......@@ -9,7 +9,7 @@ from jet.admin import CompactInline
from failmap_admin.map.rating import OrganizationRating, UrlRating, rate_organization_on_moment
from failmap_admin.scanners.admin import UrlIpInline
from failmap_admin.scanners.models import Endpoint
from failmap_admin.scanners.scanner_dns import brute_known_subdomains, certificate_transparency
from failmap_admin.scanners.scanner_dns import brute_known_subdomains, certificate_transparency_scan
from failmap_admin.scanners.scanner_http import scan_urls_on_standard_ports
from failmap_admin.scanners.scanner_plain_http import scan_urls as plain_http_scan_urls
from failmap_admin.scanners.scanner_screenshot import screenshot_urls
......@@ -154,7 +154,7 @@ class UrlAdmin(admin.ModelAdmin):
for url in queryset:
if url.is_top_level():
brute_known_subdomains([url])
certificate_transparency([url])
certificate_transparency_scan([url])
scan_urls_on_standard_ports([url]) # discover endpoints
plain_http_scan_urls([url]) # see if there is missing https
security_headers_scan_urls([url])
......@@ -168,7 +168,7 @@ class UrlAdmin(admin.ModelAdmin):
onboard.short_description = "🔮 Onboard (dns, endpoints, scans, screenshot)"
def dns_certificate_transparency(self, request, queryset):
certificate_transparency([url for url in queryset])
certificate_transparency_scan([url for url in queryset])
self.message_user(request, "URL(s) have been scanned on known subdomains: Done")
actions.append('dns_certificate_transparency')
dns_certificate_transparency.short_description = "🗺 Discover subdomains (using certificate transparency)"
......
# coding=UTF-8
# from __future__ import unicode_literals
import logging
from datetime import datetime, timedelta
import pytz
......@@ -9,6 +10,10 @@ from django.db import models
from django_countries.fields import CountryField
from jsonfield import JSONField
from failmap_admin.scanners.scanner_http import resolves
logger = logging.getLogger(__package__)
class OrganizationType(models.Model):
name = models.CharField(max_length=255)
......@@ -158,6 +163,30 @@ class Url(models.Model):
return True
return False
def add_subdomain(self, subdomain):
new_url = (subdomain + "." + self.url).lower()
if Url.objects.all().filter(url=new_url, organization__in=self.organization.all()).exists():
logger.debug("Subdomain already in the database: %s" % new_url)
return
if not resolves(new_url):
logger.debug("New subdomain did not resolve on both ipv4 and ipv6: %s" % new_url)
return
u = Url()
# A Url needs to have a value for field "id" before a many-to-many relationship can be used.
u.save()
u.organization = self.organization.all()
u.url = new_url
u.save()
logger.info("Added domain to database: %s" % new_url)
# run standard checks, so you know the
# discover_wildcards([u])
return u
# are open ports based on IP adresses.
# adresses might change (and thus an endpoint changes).
# for the list of endpoints, you want to know what endpoints don't exist
......
......@@ -12,6 +12,11 @@ logger = logging.getLogger(__package__)
# todo: add command line arguments: port and protocol.
# Verify that all endpoints we currently have still exist:
# failmap-admin discover-endpoints-http-https --method=verify
# try to find open ports
# failmap-admin discover-endpoints-http-https --method=discover
class Command(BaseCommand):
help = 'Discover http(s) endpoints on well known ports.'
......@@ -30,7 +35,7 @@ class Command(BaseCommand):
functionlist[func]()
return
if options['organization'][0] == "_ALL_":
if options['organization'][0] == "*":
functionlist[func]()
return
......
......@@ -6,7 +6,7 @@ import pytz
from django.core.management.base import BaseCommand
from failmap_admin.organizations.models import Url
from failmap_admin.scanners.scanner_dns import brute_known_subdomains, certificate_transparency
from failmap_admin.scanners.scanner_dns import brute_known_subdomains, certificate_transparency_scan
from failmap_admin.scanners.scanner_http import scan_urls_on_standard_ports
from failmap_admin.scanners.scanner_plain_http import scan_url
from failmap_admin.scanners.scanner_screenshot import screenshot_urls
......@@ -42,7 +42,7 @@ def onboard():
if url.is_top_level():
# some DNS scans, to find more urls to onboard.
brute_known_subdomains([url])
certificate_transparency([url])
certificate_transparency_scan([url])
scan_urls_on_standard_ports([url])
scan_url(url)
screenshot_urls([url])
......
import argparse
import logging
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
from failmap_admin.organizations.models import Organization
from failmap_admin.scanners.scanner_dns import ScannerDns
from failmap_admin.scanners.scanner_http import ScannerHttp
from failmap_admin.scanners.scanner_dns import (brute_dutch, brute_known_subdomains,
brute_three_letters, certificate_transparency, nsec,
search_engines, standard)
from failmap_admin.scanners.state_manager import StateManager
from .support.arguments import add_organization_argument
logger = logging.getLogger(__package__)
# https://docs.python.org/3/library/argparse.html#required
class Command(BaseCommand):
help = 'Development command'
example_text = """example:
failmap-admin scan_dns --organization '*' --scan_type nsec
failmap-admin scan_dns --organization '*' --scan_type certificate_transparency
"""
def add_arguments(self, parser):
parser.add_argument(
'--organization', '-o',
help="Name of an organization, for example Arnhem. Prefix spaces with a backslash (\\)",
nargs=1,
required=True,
type=self.valid_organization
)
# options:
# brute_known_subdomains,
# brute_three_letters,
# brute_dutch_basic,
# standard
add_organization_argument(parser)
parser.add_argument(
'--scan_type', '-st',
nargs='?',
help="Specify a scan type with --scan_type type. Types available are:",
choices=['brute_known_subdomains', 'brute_three_letters',
'brute_dutch_basic', 'standard', 'search_engines', 'certificate_transparency'],
choices=['brute_known_subdomains',
'brute_three_letters',
'brute_dutch',
'standard',
'search_engines',
'certificate_transparency',
'nsec'],
required=True,
default="brute_known_subdomains"
)
def handle(self, *args, **options):
scan_type = options['scan_type']
desired_organization = options['organization'][0]
logger.debug("Scan type: %s" % scan_type)
logger.debug("Desired organization: %s" % desired_organization)
logger.debug("Targetted organization: %s" % desired_organization)
if '_ALL_' in desired_organization:
if '*' in desired_organization:
organizations = StateManager.create_resumed_organizationlist(scanner="DNS_" + scan_type)
for organization in organizations:
StateManager.set_state("DNS_" + scan_type, organization.name)
......@@ -58,37 +57,29 @@ class Command(BaseCommand):
organization = Organization.objects.get(name=desired_organization)
self.scan_organization(organization, scan_type)
@staticmethod
def valid_organization(name):
if "_ALL_" in name:
return "_ALL_"
try:
o = Organization.objects.get(name=name)
return o.name
except ObjectDoesNotExist:
msg = "%s is not a valid organization or _ALL_" % name
raise argparse.ArgumentTypeError(msg)
def scan_organization(self, organization, scan_type):
s = ScannerDns()
scanfunction = ""
if "brute_known_subdomains" in scan_type:
scanfunction = "organization_brute_knownsubdomains"
if "brute_three_letters" in scan_type:
scanfunction = "organization_brute_threeletters"
if "brute_dutch_basic" in scan_type:
scanfunction = "organization_brute_dutch"
if "standard" in scan_type:
scanfunction = "organization_standard_scan"
if "search_engines" in scan_type:
scanfunction = "organization_search_engines_scan"
if "certificate_transparency" in scan_type:
scanfunction = "organization_certificate_transparency"
logger.debug("Calling %s scan on: %s" % (scanfunction, organization))
added = getattr(s, scanfunction)(organization) # dynamically call function
logger.debug("Added: %s" % added)
if added:
logger.debug("Scanning urls on standard ports")
ScannerHttp.scan_url_list_standard_ports(added)
logger.debug("Calling %s scan on: %s" % (scan_type, organization))
# explicitly written so the imported functions are used, don't use strings as dynamic function names.
if scan_type == "brute_known_subdomains":
brute_known_subdomains(organizations=[organization])
if scan_type == "brute_three_letters":
brute_three_letters(organizations=[organization])
if scan_type == "brute_dutch":
brute_dutch(organizations=[organization])
if scan_type == "standard":
standard(organizations=[organization])
if scan_type == "search_engines":
search_engines(organizations=[organization])
if scan_type == "certificate_transparency":
certificate_transparency(organizations=[organization])
if scan_type == "nsec":
nsec(organizations=[organization])
# we don't do anything with added subdomains, that should be handled at the "added url event" or whatever
......@@ -27,8 +27,8 @@ def add_discover_verify(parser):
def valid_organization(name):
if "_ALL_" in name:
return "_ALL_"
if name in ["_ALL_", "*"]:
return "*"
try:
o = Organization.objects.get(name=name)
return o.name
......
# coding=UTF-8
from django.db import models
from failmap_admin.organizations.models import Url
# from failmap_admin.organizations.models import Url
class Endpoint(models.Model):
......@@ -29,8 +29,10 @@ class Endpoint(models.Model):
ports, protocols, ip-addresses and more of that.
"""
# imported using a string, to avoid circular imports, which happens in complexer models
# https://stackoverflow.com/questions/4379042/django-circular-model-import-issue
url = models.ForeignKey(
Url,
'organizations.Url',
null=True,
blank=True
)
......@@ -115,7 +117,10 @@ class UrlIp(models.Model):
done in the next version as it increases complexity slightly.
"""
url = models.ForeignKey(Url, blank=True, null=True)
url = models.ForeignKey(
'organizations.Url',
blank=True,
null=True)
ip = models.CharField(
max_length=255,
......
This diff is collapsed.
......@@ -34,8 +34,8 @@ from requests import ConnectTimeout, HTTPError, ReadTimeout, Timeout
from requests.exceptions import ConnectionError
from failmap_admin.celery import app
from failmap_admin.scanners.models import Endpoint, UrlIp
from .models import Endpoint, UrlIp
from .timeout import timeout
logger = logging.getLogger(__package__)
......@@ -84,6 +84,7 @@ def scan_url(protocol, url, port):
@app.task
def resolve_and_scan(protocol, url, port):
# vhosts
ips = get_ips(url.url)
......@@ -99,6 +100,19 @@ def resolve_and_scan(protocol, url, port):
url_revive_task = revive_url.s(url)
url_revive_task.apply_async()
"""
import requests; requests.get('https://[2a01:7c8:aac0:56b:5054:ff:fe1f:cce8]', headers={'Host':'faalkaart.nl'})
>>> import requests; requests.get('https://[2a01:7c8:aac0:56b:5054:ff:fe1f:cce8]',
headers={'Host':'faalkaart.nl'}, verify=False)
/usr/lib/python2.7/dist-packages/urllib3/connectionpool.py:732:
InsecureRequestWarning: Unverified HTTPS request is being made.
Adding certificate verification is strongly advised.
See: https://urllib3.readthedocs.org/en/latest/security.html (This warning will only appear once by default.)
InsecureRequestWarning)
<Response [200]>
todo: just change the host headers
"""
# todo: switch between ipv4 and ipv6, make tasks for different workers.
(ipv4, ipv6) = ips
if ipv4:
......@@ -110,7 +124,7 @@ def resolve_and_scan(protocol, url, port):
# v6 is not yet supported, as we don't have v6 workers yet.
def get_ips(url):
def get_ips(url: str):
ip4 = ""
ip6 = ""
......@@ -121,6 +135,8 @@ def get_ips(url):
logger.debug("Get IPv4 error: %s" % ex)
try:
# dig AAAA faalkaart.nl +short
# 2a01:7c8:aac0:56b:5054:ff:fe1f:cce8
x = socket.getaddrinfo(url, None, socket.AF_INET6)
ip6 = x[0][4][0]
except Exception as ex:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment