Commit 7c0031a9 authored by Elger Jonker's avatar Elger Jonker

end of refactoring


Former-commit-id: ff4ccbdf
parent 79acd800
import logging
from datetime import datetime
import pytz
from constance import config
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
from failmap.organizations.models import Organization, OrganizationType, Url
from failmap.map.models import Configuration
import logging
from django.conf import settings
from constance import config
from failmap.organizations.models import Organization, OrganizationType, Url
log = logging.getLogger(__package__)
......@@ -65,9 +67,6 @@ def test_map_views(organization_type, organization):
Todo: how is this done in other tests? -> via a docker IP. So these tests will have to be rewritten.
"""
address = "http://localhost:8000/"
from django.test.client import RequestFactory
rf = RequestFactory()
request = rf.get('/')
......@@ -138,6 +137,7 @@ def test_hypersh_commands():
def test_map_commands(organization, url):
# todo: also test game in next game iteration
# this is going to take a while
call_debug_command('rebuild_reports', '-o', organization.name)
call_debug_command('report', '-o', organization.name)
......
import pytest
from django.contrib.auth.models import User
from failmap.celery import app
from failmap.app.models import Job
from failmap.celery import app
@pytest.fixture
......
......@@ -5,7 +5,8 @@ from django.utils.html import format_html
from django_fsm_log.admin import StateLogInline
from import_export.admin import ImportExportModelAdmin
from failmap.hypersh.models import ContainerConfiguration, ContainerEnvironment, ContainerGroup, Credential
from failmap.hypersh.models import (ContainerConfiguration, ContainerEnvironment, ContainerGroup,
Credential)
log = logging.getLogger(__name__)
......
......@@ -8,8 +8,8 @@ from import_export.admin import ImportExportModelAdmin
from failmap.app.models import Job
from failmap.celery import PRIO_HIGH, app
from failmap.map.geojson import import_from_scratch, update_coordinates
from failmap.map.models import (AdministrativeRegion, Configuration, MapDataCache, OrganizationRating,
UrlRating, VulnerabilityStatistic)
from failmap.map.models import (AdministrativeRegion, Configuration, MapDataCache,
OrganizationRating, UrlRating, VulnerabilityStatistic)
@admin.register(OrganizationRating)
......
......@@ -21,9 +21,8 @@ from rdp import rdp
from wikidata.client import Client
from failmap.celery import app
from failmap.organizations.models import Coordinate, Organization, OrganizationType, Url
from failmap.map.models import AdministrativeRegion
from failmap.organizations.models import Coordinate, Organization, OrganizationType, Url
log = logging.getLogger(__package__)
......
import logging
from failmap.app.management.commands._private import ScannerTaskCommand
from failmap.map import rebuild_report
log = logging.getLogger(__name__)
......
import logging
from failmap.app.management.commands._private import ScannerTaskCommand
from failmap.map import report
log = logging.getLogger(__name__)
......
......@@ -2,13 +2,11 @@ import logging
from celery import group
from failmap.map.report import (calculate_map_data, calculate_vulnerability_statistics,
rebuild_organization_ratings, rebuild_url_ratings)
from failmap.celery import Task
from failmap.map.report import rebuild_organization_ratings, rebuild_url_ratings
from failmap.organizations.models import Organization, Url
from failmap.scanners.scanner.scanner import q_configurations_to_report
from failmap.celery import Task
log = logging.getLogger(__package__)
......
......@@ -8,18 +8,18 @@ from constance import config
from deepdiff import DeepDiff
from django.db.models import Q
from failmap.celery import Task, app
from failmap.map.calculate import get_calculation
from failmap.map.models import (Configuration, MapDataCache, OrganizationRating, UrlRating,
VulnerabilityStatistic)
from failmap.map.views import get_map_data
from failmap.organizations.models import Organization, OrganizationType, Url
from failmap.scanners.models import Endpoint, EndpointGenericScan, UrlGenericScan
from failmap.scanners.scanner.scanner import q_configurations_to_report
from failmap.celery import Task, app
from failmap.map.calculate import get_calculation
from failmap.map.models import Configuration, MapDataCache, OrganizationRating, UrlRating, VulnerabilityStatistic
from failmap.scanners.types import ALL_SCAN_TYPES, ENDPOINT_SCAN_TYPES, URL_SCAN_TYPES
log = logging.getLogger(__package__)
from failmap.scanners.types import ENDPOINT_SCAN_TYPES, URL_SCAN_TYPES, ALL_SCAN_TYPES
FAILMAP_STARTED = datetime(year=2016, month=1, day=1, hour=13, minute=37, second=42, tzinfo=pytz.utc)
......
......@@ -5,8 +5,8 @@ from django.conf.urls import url
from django.urls import path, register_converter
from django.views.i18n import JavaScriptCatalog
from failmap.map import views
from failmap import converters
from failmap.map import views
# todo: organization type converter doesn't work yet... using slug as an alternative.
register_converter(converters.OrganizationTypeConverter, 'ot')
......
......@@ -4,6 +4,7 @@ import re
from datetime import datetime, timedelta
from math import ceil
import iso3166
import pytz
import simplejson as json
from constance import config
......@@ -25,17 +26,14 @@ from django.views.decorators.cache import cache_page
from django_celery_beat.models import PeriodicTask
from import_export.resources import modelresource_factory
from failmap import __version__
from failmap.app.common import JSEncoder
from failmap.map.calculate import get_calculation
from failmap.map.models import (Configuration, MapDataCache, OrganizationRating, UrlRating,
VulnerabilityStatistic)
from failmap.organizations.models import Coordinate, Organization, OrganizationType, Promise, Url
from failmap.scanners.models import EndpointGenericScan, UrlGenericScan
from failmap import __version__
from failmap.app.common import JSEncoder
from failmap.map.calculate import get_calculation
from failmap.scanners.types import ENDPOINT_SCAN_TYPES, URL_SCAN_TYPES, ALL_SCAN_TYPES
import iso3166
from failmap.scanners.types import ALL_SCAN_TYPES, ENDPOINT_SCAN_TYPES, URL_SCAN_TYPES
log = logging.getLogger(__package__)
......@@ -1249,8 +1247,6 @@ def ticker(request, country: str = "NL", organization_type: str = "municipality"
""" % {"when": when, "OrganizationTypeId": get_organization_type(organization_type),
"country": get_country(country)}
print(sql)
newest_urlratings = list(OrganizationRating.objects.raw(sql))
# this of course doesn't work with the first day, as then we didn't measure
......@@ -1887,13 +1883,13 @@ class LatestScanFeed(Feed):
# print("args: %s" % kwargs['scan_type'])
return kwargs.get('scan_type', '')
def title(self, scan_type: str=""):
def title(self, scan_type: str = ""):
if scan_type:
return "%s Scan Updates" % scan_type
else:
return "Vulnerabilities Feed"
def link(self, scan_type: str=""):
def link(self, scan_type: str = ""):
if scan_type:
return "/data/feed/%s" % scan_type
else:
......
......@@ -17,15 +17,14 @@ from leaflet.admin import LeafletGeoAdminMixin
import failmap.scanners.scanner.http as scanner_http
from failmap import types
from failmap.app.models import Job
from failmap.celery import PRIO_HIGH
from failmap.map.report import OrganizationRating, UrlRating
from failmap.organizations.models import Coordinate, Organization, OrganizationType, Promise, Url
from failmap.scanners.admin import UrlIp
from failmap.scanners.models import Endpoint, EndpointGenericScan, TlsQualysScan, UrlGenericScan
from failmap.scanners.scanner import dns, dnssec, onboard, plain_http, security_headers, tls_qualys
from failmap.app.models import Job
from failmap.celery import PRIO_HIGH
from failmap.organizations.models import Coordinate, Organization, OrganizationType, Promise, Url
log = logging.getLogger(__name__)
......
import logging
from datetime import datetime
from typing import List
import pytz
from django.core.management.base import BaseCommand
from failmap.organizations.models import Organization, Url
from failmap.scanners.scanner.http import resolves
from failmap.scanners.scanner.dns import discover_wildcard
from failmap.scanners.scanner.http import resolves
log = logging.getLogger(__package__)
from typing import List
class Command(BaseCommand):
......
......@@ -2,10 +2,10 @@ import datetime
import logging
from django.core.management.base import BaseCommand
from django.db import transaction
from failmap.organizations.models import Url
from failmap.scanners.models import Endpoint, EndpointGenericScan, Screenshot
from django.db import transaction
log = logging.getLogger(__package__)
......
......@@ -7,8 +7,8 @@ from failmap.game.models import Contest, OrganizationSubmission, Team, UrlSubmis
from failmap.map.models import OrganizationRating, UrlRating
from failmap.organizations.models import Coordinate, Organization, OrganizationType, Promise, Url
from failmap.scanners.models import (Endpoint, EndpointGenericScan, EndpointGenericScanScratchpad,
Screenshot, TlsQualysScratchpad, TlsScan,
UrlGenericScan, UrlIp)
Screenshot, TlsQualysScratchpad, TlsScan, UrlGenericScan,
UrlIp)
log = logging.getLogger(__package__)
......
......@@ -4,7 +4,8 @@ from datetime import datetime
import pytz
from django.core.management.commands.dumpdata import Command as DumpDataCommand
from failmap.organizations.management.commands.support.datasethelpers import check_referential_integrity
from failmap.organizations.management.commands.support.datasethelpers import \
check_referential_integrity
log = logging.getLogger(__package__)
......
......@@ -4,8 +4,9 @@ from import_export.admin import ImportExportModelAdmin
from jet.admin import CompactInline
from jet.filters import RelatedFieldAjaxListFilter
from failmap.scanners.models import (Endpoint, EndpointGenericScan, EndpointGenericScanScratchpad, InternetNLScan,
Screenshot, TlsQualysScan, TlsQualysScratchpad, TlsScan, UrlGenericScan, UrlIp)
from failmap.scanners.models import (Endpoint, EndpointGenericScan, EndpointGenericScanScratchpad,
InternetNLScan, Screenshot, TlsQualysScan, TlsQualysScratchpad,
TlsScan, UrlGenericScan, UrlIp)
class TlsQualysScanAdminInline(CompactInline):
......
import logging
from failmap.app.management.commands._private import VerifyTaskCommand
from failmap.scanners.scanner import dns, ftp, http
log = logging.getLogger(__name__)
......
import logging
from failmap.app.management.commands._private import ScannerTaskCommand
from failmap.scanners.scanner import debug, dnssec, dummy, ftp, mail, onboard, plain_http, security_headers, tls_qualys
from failmap.scanners.scanner import (debug, dnssec, dummy, ftp, mail, onboard, plain_http,
security_headers, tls_qualys)
log = logging.getLogger(__name__)
......
import logging
from failmap.app.management.commands._private import ScannerTaskCommand
from failmap.scanners.scanner import (screenshot, tls_osaft)
from failmap.scanners.scanner import screenshot, tls_osaft
log = logging.getLogger(__name__)
......
......@@ -562,14 +562,14 @@ class InternetNLScan(models.Model):
--
-- Create model InternetNLScan
--
CREATE TABLE `scanners_internetnlscan`
CREATE TABLE `scanners_internetnlscan`
(
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`success` bool NOT NULL,
`started` bool NOT NULL,
`started_on` datetime(6) NULL,
`finished` bool NOT NULL,
`finished_on` datetime(6) NULL, `url` varchar(500) NULL,
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`success` bool NOT NULL,
`started` bool NOT NULL,
`started_on` datetime(6) NULL,
`finished` bool NOT NULL,
`finished_on` datetime(6) NULL, `url` varchar(500) NULL,
`message` varchar(500) NULL
);
COMMIT;
......@@ -577,7 +577,7 @@ class InternetNLScan(models.Model):
It crashes on the (6) NULL at `started_on`. So what's up?
This? https://django-mysql.readthedocs.io/en/latest/management_commands/fix_datetime_columns.html
Server version: 5.5.62-0+deb8u1 (Debian)
That version doesn't support datetime(6) yet...?
That version doesn't support datetime(6) yet...?
i guess they dropped 5.5 support in this version of django?
Python version: 3.6.6
......@@ -585,15 +585,15 @@ class InternetNLScan(models.Model):
Failmap version: 1.1+b9ef27fe
https://dev.mysql.com/doc/refman/5.6/en/fractional-seconds.html
MySQL 5.6.4 and up expands fractional seconds support for TIME, DATETIME, and TIMESTAMP values, with up to
microseconds (6 digits) precision:
MySQL 5.6.4 and up expands fractional seconds support for TIME, DATETIME, and TIMESTAMP values, with up to
microseconds (6 digits) precision:
With MySQL 5.5, Django uses datetime for DateTimeField, from 5.6 onwards it uses datetime(6) with microseconds.
super_stitch@faalserver:~# mysql --version
mysql Ver 14.14 Distrib 5.5.62, for debian-linux-gnu (x86_64) using readline 6.3
So this seems to be a bug. I guess we have to upgrade to 5.6?
I think the django script sees Distrib 5.5.62 which contains "5.6" :) Which would mean we didn't create datetime
I think the django script sees Distrib 5.5.62 which contains "5.6" :) Which would mean we didn't create datetime
since july this year or the bug was freshly introduced.
So the issue is probably somewhere in Django.
......@@ -603,7 +603,7 @@ class InternetNLScan(models.Model):
https://code.djangoproject.com/ticket/28552
And there you also see the microsecond_precision flag dropped.
'The end of upstream support for MySQL 5.5 is December 2018. Therefore, Django 2.1 (released a couple months
'The end of upstream support for MySQL 5.5 is December 2018. Therefore, Django 2.1 (released a couple months
earlier in August 2018) may set MySQL 5.6 as the minimum version that it supports.'
Which is also in the release notes: https://docs.djangoproject.com/en/2.1/releases/2.1/
......
......@@ -4,7 +4,7 @@ from datetime import datetime
import pytz
from django.core.exceptions import ObjectDoesNotExist
from failmap.scanners.models import Endpoint, Url, EndpointGenericScan, UrlGenericScan
from failmap.scanners.models import Endpoint, EndpointGenericScan, Url, UrlGenericScan
log = logging.getLogger(__package__)
......
......@@ -16,8 +16,8 @@ from tenacity import before_log, retry, wait_fixed
from failmap.celery import app
from failmap.organizations.models import Organization, Url
from failmap.scanners.scanner.http import get_ips
from failmap.scanners.scanner.scanner import (allowed_to_discover, q_configurations_to_scan, url_filters)
from failmap.scanners.scanner.scanner import (allowed_to_discover, q_configurations_to_scan,
url_filters)
# Include DNSRecon code from an external dependency. This is cloned recursively and placed outside the django app.
sys.path.append(settings.VENDOR_DIR + '/dnsrecon/')
......@@ -159,7 +159,6 @@ def handle_resolves(resolves, url):
return
def toplevel_urls(organizations):
return Url.objects.all().filter(organization__in=organizations,
computed_subdomain="")
......
......@@ -13,7 +13,7 @@ from celery import Task, group
from failmap.celery import app
from failmap.organizations.models import Organization, Url
from failmap.scanners.models import Endpoint
from failmap.scanners.scanmanager import store_endpoint_scan_result, endpoint_has_scans
from failmap.scanners.scanmanager import endpoint_has_scans, store_endpoint_scan_result
from failmap.scanners.scanner.http import (can_connect, connect_result, redirects_to_safety,
resolves_on_v4, resolves_on_v6)
from failmap.scanners.scanner.scanner import allowed_to_scan, q_configurations_to_scan
......
......@@ -254,11 +254,12 @@ def determine_grade(report, url):
log.error('No report given: %s' % report)
return
raise NotImplementedError
try:
# log.debug('untangle.parse("%s")' % report)
# obj = untangle.parse(report)
# removed untangle, as this was the only file using it, and this is here for legacy purposes.
raise NotImplemented
obj = {}
except Exception:
log.error('Something wrong with report file: %s' % report)
return
......
......@@ -9,7 +9,8 @@ import tempfile
from celery.signals import celeryd_init, worker_shutdown
from django.conf import settings
from failmap.celery.worker import tls_client_certificate, worker_configuration, worker_verify_role_capabilities
from failmap.celery.worker import (tls_client_certificate, worker_configuration,
worker_verify_role_capabilities)
log = logging.getLogger(__name__)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment