much more efficient ratings

parent 052262e1
......@@ -2,7 +2,7 @@ import logging
from django.core.management.base import BaseCommand
from failmap_admin.map.determineratings import default_ratings
from failmap_admin.map.rating import default_ratings
logger = logging.getLogger(__package__)
......
from django.core.management.base import BaseCommand
from failmap_admin.map.determineratings import DetermineRatings
from failmap_admin.map.rating import DetermineRatings
class Command(BaseCommand):
......
......@@ -2,7 +2,7 @@ import logging
from failmap_admin.app.management.commands._private import TaskCommand
from ...determineratings import rebuild_ratings
from ...rating import rebuild_ratings
log = logging.getLogger(__name__)
......
......@@ -118,13 +118,15 @@ def significant_moments(organizations: List[Organization]=None, urls: List[Url]=
dead_endpoints = Endpoint.objects.all().filter(url__in=urls, is_dead=True)
dead_scan_dates = [x.is_dead_since for x in dead_endpoints]
# is this relevant? I think we can do without.
non_resolvable_urls = Url.objects.filter(not_resolvable=True, url__in=urls)
non_resolvable_dates = [x.not_resolvable_since for x in non_resolvable_urls]
dead_urls = Url.objects.filter(is_dead=True, url__in=urls)
dead_url_dates = [x.is_dead_since for x in dead_urls]
# reduce this to one moment per day only, otherwise there will be a report for every change
# which is highly inefficient. Using the latest possible time of the day is used.
moments = tls_qualys_scan_dates + generic_scan_dates + non_resolvable_dates + dead_scan_dates
moments = tls_qualys_scan_dates + generic_scan_dates + non_resolvable_dates + dead_scan_dates + dead_url_dates
moments = [latest_moment_of_datetime(x) for x in moments]
moments = sorted(set(moments))
......@@ -134,7 +136,8 @@ def significant_moments(organizations: List[Organization]=None, urls: List[Url]=
'tls_qualys_scans': [],
'generic_scans': [],
'dead_endpoints': [],
'non_resolvable_urls': []
'non_resolvable_urls': [],
'dead_urls': []
}
# make sure you don't save the scan for today at the end of the day (which would make it visible only at the end
......@@ -150,7 +153,8 @@ def significant_moments(organizations: List[Organization]=None, urls: List[Url]=
'tls_qualys_scans': tls_qualys_scans,
'generic_scans': generic_scans,
'dead_endpoints': dead_endpoints,
'non_resolvable_urls': non_resolvable_urls
'non_resolvable_urls': non_resolvable_urls,
'dead_urls': dead_urls
}
return moments, happenings
......@@ -220,10 +224,15 @@ def create_timeline(url: Url):
timeline[moment]["endpoints"].append(endpoint)
timeline[moment]['scans'] += list(scans)
# seems to be incorrect? What exactly is not resolvable here?
# Any endpoint from this point on should be removed. If the url becomes alive again, add it again, so you can
# see there are gaps in using the url over time. Which is more truthful.
for moment in [not_resolvable_url.not_resolvable_since for not_resolvable_url in happenings['non_resolvable_urls']]:
moment = moment.date()
timeline[moment]["not_resolvable"] = True
timeline[moment]["url_not_resolvable"] = True
for moment in [dead_url.is_dead_since for dead_url in happenings['dead_urls']]:
moment = moment.date()
timeline[moment]["url_is_dead"] = True
for moment in [dead_endpoint.is_dead_since for dead_endpoint in happenings['dead_endpoints']]:
moment = moment.date()
......@@ -234,7 +243,7 @@ def create_timeline(url: Url):
if ep not in timeline[moment]["dead_endpoints"]:
timeline[moment]["dead_endpoints"].append(ep)
# unique endpoints only, always in the same order
# unique endpoints only
for moment in moments:
timeline[moment.date()]["endpoints"] = list(set(timeline[moment.date()]["endpoints"]))
......@@ -257,8 +266,8 @@ def rate_timeline(timeline, url: Url):
scores = []
given_ratings = {}
if 'not_resolvable' in timeline[moment].keys():
logger.debug('Url became non-resolvable. Adding an empty rating to lower the score of'
if 'url_not_resolvable' in timeline[moment].keys() or 'url_is_dead' in timeline[moment].keys():
logger.debug('Url became non-resolvable or dead. Adding an empty rating to lower the score of'
'this domain if it had a score. It has been cleaned up. (hooray)')
# this is the end for the domain.
default_calculation = {
......@@ -457,9 +466,12 @@ def show_timeline_console(timeline, url: Url):
for endpoint in timeline[moment]['dead_endpoints']:
message += "| | |- %s" % endpoint + newline
if 'not_resolvable' in timeline[moment].keys():
if 'url_not_resolvable' in timeline[moment].keys():
message += "| |- url became not resolvable" + newline
if 'url_is_dead' in timeline[moment].keys():
message += "| |- url died" + newline
message += "" + newline
# support this on command line
print(message)
......
This diff is collapsed.
......@@ -6,8 +6,8 @@ from django.contrib import admin
from django.urls import reverse
from jet.admin import CompactInline
from failmap_admin.map.determineratings import (OrganizationRating, UrlRating, add_url_rating,
rate_organization_on_moment)
from failmap_admin.map.rating import (OrganizationRating, UrlRating, add_url_rating,
rate_organization_on_moment)
from failmap_admin.scanners.admin import UrlIpInline
from failmap_admin.scanners.models import Endpoint
from failmap_admin.scanners.scanner_dns import brute_known_subdomains, certificate_transparency
......@@ -250,7 +250,7 @@ class UrlAdmin(admin.ModelAdmin):
actions.append('declare_dead')
def timeline_debug(self, request, queryset):
from failmap_admin.map.determineratings import create_timeline, show_timeline_console
from failmap_admin.map.rating import create_timeline, show_timeline_console
from django.http import HttpResponse
content = "<pre>"
......
from django.contrib import admin
from jet.admin import CompactInline
from failmap_admin.map.determineratings import rate_url
from failmap_admin.map.rating import rate_url
from failmap_admin.scanners.scanner_tls_qualys import scan
from .models import (Endpoint, EndpointGenericScan, EndpointGenericScanScratchpad, Screenshot,
......
......@@ -2,8 +2,8 @@ import logging
from django.core.management.base import BaseCommand
from failmap_admin.map.determineratings import (add_organization_rating, create_timeline,
rebuild_ratings, rerate_urls, show_timeline_console)
from failmap_admin.map.rating import (add_organization_rating, create_timeline, rebuild_ratings,
rerate_urls, show_timeline_console)
from failmap_admin.organizations.models import Organization, Url
from failmap_admin.scanners.models import Endpoint
from failmap_admin.scanners.scanner_security_headers import scan as scan_headers
......
from django.core.management.base import BaseCommand
from failmap_admin.map.determineratings import DetermineRatings
from failmap_admin.map.rating import DetermineRatings
from failmap_admin.scanners.scanner_tls_qualys import ScannerTlsQualys
......
......@@ -2,7 +2,7 @@ import logging
from django.core.management.base import BaseCommand
from failmap_admin.map.determineratings import add_organization_rating, rerate_urls
from failmap_admin.map.rating import add_organization_rating, rerate_urls
from failmap_admin.scanners.models import Url
from failmap_admin.scanners.scanner_tls_qualys import scan, scan_task
......
......@@ -35,7 +35,7 @@ import requests
from celery import group
from django.core.exceptions import ObjectDoesNotExist
from failmap_admin.map.determineratings import add_organization_rating, rerate_urls
from failmap_admin.map.rating import add_organization_rating, rerate_urls
from failmap_admin.organizations.models import Organization, Url
from failmap_admin.scanners.models import (Endpoint, EndpointGenericScan, TlsQualysScan,
TlsQualysScratchpad)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment