Verified Commit 91fe2089 authored by Elger Jonker's avatar Elger Jonker

Add report after onboarding, improve reporting speed, don't rebuild all reports every day

parent 20722d26
import logging
from failmap.app.management.commands._private import ScannerTaskCommand
from ... import rebuild_report
log = logging.getLogger(__name__)
class Command(ScannerTaskCommand):
"""Remove all organization and url ratings, then rebuild them from scratch."""
help = __doc__
def handle(self, *args, **options):
try:
self.scanner_module = rebuild_report
return super().handle(self, *args, **options)
except KeyboardInterrupt:
log.info("Received keyboard interrupt. Stopped.")
......@@ -2,7 +2,7 @@ import logging
from failmap.app.management.commands._private import TaskCommand
from ...rating import rerate_organizations
from ...rating import rebuild_organization_ratings
log = logging.getLogger(__name__)
......@@ -12,4 +12,4 @@ class Command(TaskCommand):
help = __doc__
task = rerate_organizations
task = rebuild_organization_ratings
......@@ -2,7 +2,7 @@ import logging
from django.core.management.base import BaseCommand
from failmap.map.rating import create_timeline, show_timeline_console
from failmap.map.rating import create_timeline, inspect_timeline
from failmap.organizations.models import Url
log = logging.getLogger(__package__)
......@@ -27,4 +27,4 @@ class Command(BaseCommand):
urls = Url.objects.all().filter(url__iregex=regex, is_dead=False)
for url in urls:
print(show_timeline_console(create_timeline(url), url))
print(inspect_timeline(create_timeline(url), url))
This diff is collapsed.
import logging
from celery import group
from failmap.map.rating import (calculate_map_data, calculate_vulnerability_statistics,
rebuild_organization_ratings, rebuild_url_ratings)
from failmap.organizations.models import Organization, Url
from failmap.scanners.scanner.scanner import q_configurations_to_report
from ..celery import Task
log = logging.getLogger(__package__)
def compose_task(
organizations_filter: dict = dict(),
urls_filter: dict = dict(),
endpoints_filter: dict = dict(),
) -> Task:
"""
Compose taskset to rebuild specified organizations/urls.
"""
if endpoints_filter:
raise NotImplementedError('This scanner does not work on a endpoint level.')
log.info("Organization filter: %s" % organizations_filter)
log.info("Url filter: %s" % urls_filter)
# Only displayed configurations are reported. Because why have reports on things you don't display?
# apply filter to organizations (or if no filter, all organizations)
organizations = Organization.objects.filter(q_configurations_to_report('organization'), **organizations_filter)
log.debug("Organizations: %s" % len(organizations))
# Create tasks for rebuilding ratings for selected organizations and urls.
# Wheneven a url has been (re)rated the organization for that url need to
# be (re)rated as well to propagate the result of the url rate. Tasks will
# be created per organization to first rebuild all of this organizations
# urls (depending on url filters) after which the organization rating will
# be rebuild.
tasks = []
for organization in organizations:
urls = Url.objects.filter(q_configurations_to_report(), organization=organization, **urls_filter)
if not urls:
continue
tasks.append(rebuild_url_ratings.si(urls)
| rebuild_organization_ratings.si([organization]))
if not tasks:
log.error("Could not rebuild reports, filters resulted in no tasks created.")
log.debug("Organization filter: %s" % organizations_filter)
log.debug("Url filter: %s" % urls_filter)
log.debug("urls to display: %s" % q_configurations_to_report())
log.debug("organizatins to display: %s" % q_configurations_to_report('organization'))
return group()
log.debug("Number of tasks: %s" % len(tasks))
# Given this is a complete rebuild, also rebuild the statistics for the past year. (only a year is shown max)
tasks.append(calculate_vulnerability_statistics.si(366))
tasks.append(calculate_map_data.si(366))
task = group(tasks)
return task
......@@ -528,12 +528,12 @@ class UrlAdmin(ActionMixin, ImportExportModelAdmin, nested_admin.NestedModelAdmi
actions.append('declare_dead')
def timeline_debug(self, request, queryset):
from failmap.map.rating import create_timeline, show_timeline_console
from failmap.map.rating import create_timeline, inspect_timeline
from django.http import HttpResponse
content = "<pre>"
for url in queryset:
content += show_timeline_console(create_timeline(url), url)
content += inspect_timeline(create_timeline(url), url)
content += "</pre>"
......
......@@ -2,8 +2,8 @@ import logging
from django.core.management.base import BaseCommand
from failmap.map.rating import (add_organization_rating, create_timeline, rerate_urls,
show_timeline_console)
from failmap.map.rating import (add_organization_rating, create_timeline, inspect_timeline,
rebuild_url_ratings)
from failmap.organizations.models import Organization, Url
from failmap.scanners.models import Endpoint
......@@ -62,11 +62,11 @@ def test_osaft():
def rebuild_ratings():
from failmap.map.rating import rerate_organizations
from failmap.map.rating import rebuild_organization_ratings
organization = Organization.objects.filter(name="Arnhem").get()
rerate_urls(list(Url.objects.all().filter(organization=organization)))
rerate_organizations(organizations=[organization])
rebuild_url_ratings(list(Url.objects.all().filter(organization=organization)))
rebuild_organization_ratings(organizations=[organization])
def tasking():
......@@ -99,14 +99,14 @@ def develop_timeline():
urls = Url.objects.all().filter(organization=organization)
for url in urls:
data = create_timeline(url=url)
show_timeline_console(data, url)
rerate_urls([url])
inspect_timeline(data, url)
rebuild_url_ratings([url])
add_organization_rating(organizations=[organization], create_history=True)
if False:
organizations = Organization.objects.all().order_by('name')
for organization in organizations:
rerate_urls(Url.objects.all().filter(organization=organization))
rebuild_url_ratings(Url.objects.all().filter(organization=organization))
if False:
# url = Url.objects.all().filter(url='www.amersfoort.nl').get()
......@@ -123,8 +123,8 @@ def develop_timeline():
url = Url.objects.all().filter(url='geo.aaenhunze.nl').get()
url = Url.objects.all().filter(url='webserver03.bloemendaal.nl').get()
data = create_timeline(url=url)
show_timeline_console(data, url)
rerate_urls([url])
inspect_timeline(data, url)
rebuild_url_ratings([url])
# OrganizationRating.objects.all().delete()
# for organization in url.organization.all():
......@@ -180,8 +180,8 @@ def develop_determineratings():
# pyflakes when = datetime(2016, 12, 31, 0, 0, tzinfo=pytz.utc)
# when = datetime.now(pytz.utc)
# organization = Organization.objects.filter(name="Zederik").get()
# rerate_urls(Url.objects.all().filter(organization=organization))
# rerate_organizations(organizations=[organization])
# rebuild_url_ratings(Url.objects.all().filter(organization=organization))
# rebuild_organization_ratings(organizations=[organization])
# ratings are always different since we now also save last scan date.
# only creates things for near midnight. Should check if today, and then save for now.
# add_organization_rating(organization, create_history=True)
......
......@@ -6,6 +6,7 @@ from celery import group
from django.utils import timezone
from failmap.celery import Task, app
from failmap.map.rating import update_report_tasks
from failmap.organizations.models import Url
from failmap.scanners.scanner.scanner import q_configurations_to_scan, url_filters
from failmap.scanners.tasks import crawl_tasks, explore_tasks, scan_tasks
......@@ -76,6 +77,7 @@ def compose_task(
log.info("Scanning on: %s", url)
update_stage(url, "scans_running")
tasks.append(scan_tasks(url)
| update_report_tasks(url)
| update_stage.si(url, "scans_finished"))
elif url.onboarding_stage == "scans_finished":
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment