Verified Commit 4e8360a8 authored by Elger Jonker's avatar Elger Jonker

autoscaling, scaling bugfixes, fips, small bugfixes

parent d3dee71b
......@@ -175,12 +175,13 @@ texinfo_documents = [
celery_task_prefix = '(task)' # < default
import sys, os
sys.path.insert(0, os.path.join(os.path.abspath('.'), '../../../failmap'))
os.environ['DJANGO_SETTINGS_MODULE'] = 'failmap.settings'
# from django.conf import settings
# settings.configure() # missing things... such as TOOLS. This is a default config file which is useless.
import django
django.setup()
# Doesn't work on readthedocs yet...
# import sys, os
#
# sys.path.insert(0, os.path.join(os.path.abspath('.'), '../../../failmap'))
# os.environ['DJANGO_SETTINGS_MODULE'] = 'failmap.settings'
# # from django.conf import settings
# # settings.configure() # missing things... such as TOOLS. This is a default config file which is useless.
#
# import django
# django.setup()
# Data Model
This is an autogenerated page about the FailMap data model.
It has been generated with 'failmap docs'.
## organizations
![Data Model](data_model/organizations_models.png)
......
import logging
import re
import subprocess
from django.core.management.base import BaseCommand
log = logging.getLogger(__name__)
# sumulating quota's given their API doesn't support that query (i mean wtf!)
HYPER_MAX_CONTAINERS = 20
HYPER_MAX_IMAGES = 20
HYPER_MAX_VOLUMES = 40
HYPER_MAX_FIPS = 10
"""
Hyper.sh manual control command.
Requires hyper.sh to be installed, see hyper.sh for installation instructions (pip install hyper).
Requires a client.p12 file in the failmap directory. Will only install things in the frankfurt region for now.
Usage:
failmap hype -a status
failmap hype -a clear - Removes all containers, images and volumes
failmap hype -a up qualys (number)
failmap hype -a down qualys (number)
failmap hype -a status - amount of containers vs max, images and volumes.
"""
class Command(BaseCommand):
"""Run a Failmap production server."""
def add_arguments(self, parser):
parser.add_argument('-a', '--action',
help='The specific action you want to perform')
super().add_arguments(parser)
def handle(self, *args, **options):
if options['action'] == "status":
self.run("Hyper Version", ["hyper", "version"])
self.run("Current Images", ["hyper", "images"])
self.run("Current containers", ["hyper", "ps"])
self.run("Current Volumes", ["hyper", "volume", "ls"])
self.run("Current Floating IP's", ["hyper", "fip", "ls"])
if options['action'] == "clear":
# -v = also remove the associated volume
# -f = sigkill the contents of the container
#
# hyper rm -f -v failmap-worker-scanner-$id
# todo: what about dead containers? get those too!
containers = self.run("List all containers", ["hyper", "ps", "-a", "-q"])
if containers:
self.run("Kill all containers", ["hyper", "rm", "-v", "-f"] +
list(filter(None, containers.split('\n'))))
volumes = self.run("List all volumes", "hyper volume ls -q".split(" "))
if volumes:
self.run("Removing any remaining volumes", ["hyper", "volume", "rm"] +
list(filter(None, volumes.split('\n'))))
images = self.run("List all containers", ["hyper", "images", "-q"])
if images:
self.run("Removing any images", ["hyper", "rmi", ] + list(filter(None, images.split('\n'))))
log.info("All Clear!")
if options['action'] == "scantls":
# this will use a 1 volume, 1 fip and 1 container. Will re-use a single image, which might be available.
# we're not going to check if there already is a scanner: we'll be using a single image now for all
# commands. So there are more than enough images.
# get number of containers with similar names, and +1 this one.
container_running, container_max, container_available = self.check_container_quota()
fips_running, fips_max, fips_available = self.check_fips_quota()
image_running, image_max, images_available = self.check_image_quota()
volume_running, volume_max, volume_available = self.check_volume_qouta()
if not fips_available:
raise ValueError("No FIPS available. Get more or try again later.")
if not container_available:
raise ValueError("No containers available. Get more or try again later.")
if not images_available:
raise ValueError("No images available. Get more or try again later.")
if not volume_available:
raise ValueError("No volumes available. Get more or try again later.")
exit()
# todo: this scanner requires a FIP, so map one and check beforehand if there is one available.
# MAX is also an option.
id = running + 1
from django.conf import settings
from constance import config
cmd = """
hyper run --size=s3 -d --name 'failmap-worker-scanner-qualys-%(id)s'
-e WORKER_ROLE="scanner_qualys" -e BROKER=redis://%(url)s:1337/0
-e PASSPHRASE=geheim -e HOST_HOSTNAME='hyperdotsh_qualys_%(id)s'
-e SENTRY_DSN='' -e C_FORCE_ROOT='true'
-v '%(path)s/../client.p12:/client.p12'
registry.gitlab.com/failmap/failmap:latest
celery worker --loglevel info --without-gossip --without-mingle --pool eventlet --concurrency='1'
""" % ({'id': id, 'path': settings.BASE_DIR, 'url': config.SITE_BASE_ADDRESS})
cmd = self.text_to_command(cmd)
self.run("Starting tls worker %s." % id, cmd)
# attach a FIP to this scanner
cmd = "hyper fip attach qualys-ip-%(id)s failmap-worker-scanner-qualys-%(id)s" % {'id': id}
self.run("Assigning FIP to scanner.", cmd)
def text_to_command(self, cmd: str):
# remove whitespaces and invisible characters
return list(filter(lambda x: not re.match(r'^\s*$', x), cmd.split(' ')))
def check_image_quota(self):
"""Returns the number of createable images"""
lines = self.run("Current images", ["hyper", "images"])
running = lines.count('\n') - 1
available = HYPER_MAX_IMAGES - running
log.debug("Images: Running: %s, Max: %s, Free: %s" % (running, HYPER_MAX_IMAGES, available))
return running, HYPER_MAX_IMAGES, available
# there is no quota function, as they use on their site, which is annoying as shit.
def check_container_quota(self):
"""Return the number of creatable containers"""
lines = self.run("Current containers", ["hyper", "ps"])
running = lines.count('\n') - 1
available = HYPER_MAX_CONTAINERS - running
log.debug("Containers: Running: %s, Max: %s, Free: %s" % (running, HYPER_MAX_CONTAINERS, available))
return running, HYPER_MAX_CONTAINERS, available
def check_fips_quota(self):
lines = self.run("Current Floating IP's", ["hyper", "fip", "ls"])
running = lines.count('\n') - 1
available = HYPER_MAX_FIPS - running
log.debug("Fips: Running: %s, Max: %s, Free: %s" % (running, HYPER_MAX_FIPS, available))
return running, HYPER_MAX_FIPS, available
def check_volume_qouta(self):
lines = self.run("Current Images", ["hyper", "images"])
running = lines.count('\n') - 1
available = HYPER_MAX_VOLUMES - running
log.debug("Columes: Running: %s, Max: %s, Free: %s" % (running, HYPER_MAX_VOLUMES, available))
return running, HYPER_MAX_VOLUMES, available
def debug_quota(self):
self.check_container_quota()
self.check_image_quota()
def run(self, label, cmd):
log.info(label)
log.debug(cmd)
out = subprocess.check_output(cmd)
pretty = out.decode('utf-8').replace('\\n', '\n')
log.info(pretty)
return pretty
# start gewone scanner, ipv4 only:
"""
hyper run --size=s3 -d \
--name "failmap-worker-scanner-v4-"$id \
-e WORKER_ROLE="scanner_v4" \
-e BROKER=redis://failmap.co.uk:1337/0 \
-e PASSPHRASE=geheim \
-e HOST_HOSTNAME="hyperdotsh_"$id \
-e SENTRY_DSN="" \
-e C_FORCE_ROOT="true" \
-v "/Applications/XAMPP/xamppfiles/htdocs/failmap/hyper/co.uk/client.p12:/client.p12" \
registry.gitlab.com/failmap/failmap:latest \
celery worker --loglevel info --without-gossip --without-mingle --pool eventlet --concurrency="4"
"""
......@@ -3,6 +3,7 @@ import logging
from django.contrib import admin
from django.utils.html import format_html
from django_fsm_log.admin import StateLogInline
from import_export.admin import ImportExportModelAdmin
from .models import ContainerConfiguration, ContainerEnvironment, ContainerGroup, Credential
......@@ -14,11 +15,11 @@ def environment_strings(obj):
@admin.register(Credential)
class CredentialAdmin(admin.ModelAdmin):
class CredentialAdmin(ImportExportModelAdmin, admin.ModelAdmin):
list_display = ('name', 'enabled', 'valid', 'last_validated', 'used_by_group')
readonly_fields = ('valid',)
actions = ('validate',)
actions = ('validate', 'nuke', 'status')
def used_by_group(self, obj):
return ",".join("{0.__class__.__name__}({0.name})".format(x) for x in obj.containergroup_set.all())
......@@ -29,9 +30,31 @@ class CredentialAdmin(admin.ModelAdmin):
m.validate()
validate.short_description = "Validate against Hyper.sh API."
def nuke(self, request, queryset):
"""Remove all containers, images and volumes using these credentials."""
for m in queryset:
m.nuke()
nuke.short_description = "💥 Remove all containers, volumes and images."
def status(self, request, queryset):
"""Get a status report on all containers etc"""
from django.http import HttpResponse
content = "<pre>"
for m in queryset:
content += "Status for credentials set %s:" % m.pk
content += m.hyper_status()
content += "</pre>"
return HttpResponse(content)
status.short_description = "Get status report"
@admin.register(ContainerEnvironment)
class ContainerEnvironmentAdmin(admin.ModelAdmin):
class ContainerEnvironmentAdmin(ImportExportModelAdmin, admin.ModelAdmin):
list_display = ('name', 'value', 'used_by_group', 'used_by_configuration')
readonly_fields = ('configuration', 'group')
......@@ -43,7 +66,7 @@ class ContainerEnvironmentAdmin(admin.ModelAdmin):
@admin.register(ContainerGroup)
class ContainerGroupAdmin(admin.ModelAdmin):
class ContainerGroupAdmin(ImportExportModelAdmin, admin.ModelAdmin):
list_display = (
'name',
'credential',
......@@ -109,8 +132,8 @@ class ContainerGroupAdmin(admin.ModelAdmin):
@admin.register(ContainerConfiguration)
class ContainerConfigurationAdmin(admin.ModelAdmin):
list_display = ('name', 'image', 'command', environment_strings, 'used_by_group')
class ContainerConfigurationAdmin(ImportExportModelAdmin, admin.ModelAdmin):
list_display = ('name', 'image', 'command', environment_strings, 'used_by_group', 'requires_unique_ip')
def used_by_group(self, obj):
return ",".join("{0.__class__.__name__}({0.name})".format(x) for x in obj.containergroup_set.all())
import logging
import flower.utils.broker
from failmap.celery import app
from failmap.celery.worker import WORKER_QUEUE_CONFIGURATION
from failmap.hypersh.models import ContainerGroup
log = logging.getLogger(__name__)
@app.task
def autoscale():
"""Calculates the number of needed scanners based on the number of tasks in the queue.
It will currently only work for Qualys tasks to see how / if it works. The ContainerGroup that is
autoscaled has to be named "Qualys scanners".
While calculation is fast, scaling is pretty slow. Just run this once every hour and see if you can speed it up.
"""
cg = ContainerGroup.objects.all().get(name="Qualys scanners")
if 'redis://' in app.conf.broker_url:
queue_names = [q.name for q in WORKER_QUEUE_CONFIGURATION['default']]
# use flower to not reinvent the wheel on querying queue statistics
broker = flower.utils.broker.Broker(app.conf.broker_url, broker_options=app.conf.broker_transport_options)
queue_stats = broker.queues(queue_names).result()
for stat in queue_stats:
if stat['name'] == "scanners.qualys" and stat['messages'] > 0:
cg.desired = cg.maximum
cg.save(update_fields=['desired'])
log.info("Qualys scanners are scaling up to maximum.")
elif stat['name'] == "scanners.qualys" and stat['messages'] == 0:
cg.desired = 0
cg.save(update_fields=['desired'])
log.info("Qualys scanners are scaling back to 0.")
# Generated by Django 2.0.8 on 2018-09-25 12:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hypersh', '0007_auto_20180405_1402'),
]
operations = [
migrations.AddField(
model_name='credential',
name='communication_certificate',
field=models.TextField(blank=True, help_text="A Base64 representation of a valid failmap .p12 certificate. You can create these yourself and use these certificates to enter the admin interface. This certificate encrypts traffic between failmap and it's workers. Do not share this certificate with anyone else, as they might get access to the admin interface. (this feature will be better / more easily implemented someday hopefully).", max_length=8000, null=True),
),
migrations.AlterField(
model_name='credential',
name='region',
field=models.CharField(
help_text='Currently choose between: eu-central-1 and us-west-1. See https://docs.hyper.sh/hyper/Introduction/region.html', max_length=64),
),
]
# Generated by Django 2.0.8 on 2018-09-25 15:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hypersh', '0008_auto_20180925_1259'),
]
operations = [
migrations.AlterField(
model_name='credential',
name='communication_certificate',
field=models.TextField(blank=True, help_text="A Base64 representation of a valid failmap .p12 certificate. You can create these yourself and use these certificates to enter the admin interface. This certificate encrypts traffic between failmap and it's workers. Do not share this certificate with anyone else, as they might get access to the admin interface. (this feature will be better / more easily implemented someday hopefully). You can create this value by running base64 filename. Do not use an online base64 service as that will leak your certificate :).", max_length=8000, null=True),
),
]
# Generated by Django 2.0.8 on 2018-09-26 10:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hypersh', '0009_auto_20180925_1502'),
]
operations = [
migrations.AddField(
model_name='containerconfiguration',
name='requires_unique_ip',
field=models.BooleanField(
default=False, help_text='When set to true, a FIP is connected to this container. Make sure those are available.'),
),
migrations.AlterField(
model_name='containerconfiguration',
name='instance_type',
field=models.CharField(
default='S1', help_text='Container sizes are described here: https://hyper.sh/hyper/pricing.html - In most cases S3 will suffice. The smaller, the cheaper.', max_length=2),
),
]
This diff is collapsed.
......@@ -36,7 +36,7 @@ https://stackoverflow.com/questions/41723087/get-administrative-borders-with-ove
"""
def get_resampling_resolution(country: str="NL", organization_type: str="municipality"):
def get_resampling_resolution(country: str = "NL", organization_type: str = "municipality"):
resolution = AdministrativeRegion.objects.all().filter(
country=country,
organization_type__name=organization_type).values_list('resampling_resolution', flat=True).first()
......@@ -47,7 +47,7 @@ def get_resampling_resolution(country: str="NL", organization_type: str="municip
return resolution
def get_region(country: str="NL", organization_type: str="municipality"):
def get_region(country: str = "NL", organization_type: str = "municipality"):
return AdministrativeRegion.objects.all().filter(
country=country,
organization_type__name=organization_type).values_list('admin_level', flat=True).first()
......@@ -59,7 +59,7 @@ def get_region(country: str="NL", organization_type: str="municipality"):
# better to validate that the region doesn't exist and then add it...
# @transaction.atomic
@app.task(queue="scanners")
def import_from_scratch(countries: List[str]=None, organization_types: List[str]=None, when=None):
def import_from_scratch(countries: List[str] = None, organization_types: List[str] = None, when=None):
"""
Run this when you have nothing on the organization type in that country. It will help bootstrapping a
certain region.
......@@ -108,7 +108,7 @@ def import_from_scratch(countries: List[str]=None, organization_types: List[str]
# @transaction.atomic
@app.task(queue="scanners")
def update_coordinates(country: str = "NL", organization_type: str="municipality", when=None):
def update_coordinates(country: str = "NL", organization_type: str = "municipality", when=None):
if not osmtogeojson_available():
raise FileNotFoundError("osmtogeojson was not found. Please install it and make sure python can access it. "
......@@ -139,7 +139,7 @@ def update_coordinates(country: str = "NL", organization_type: str="municipality
log.info("Resampling and update tasks have been created.")
def resample(feature: Dict, resampling_resolution: float=0.001):
def resample(feature: Dict, resampling_resolution: float = 0.001):
# downsample the coordinates using the rdp algorithm, mainly to reduce 50 megabyte to a about 150 kilobytes.
# The code is a little bit dirty, using these counters. If you can refactor, please do :)
......@@ -166,7 +166,7 @@ def resample(feature: Dict, resampling_resolution: float=0.001):
return feature
def store_new(feature: Dict, country: str="NL", organization_type: str="municipality", when=None):
def store_new(feature: Dict, country: str = "NL", organization_type: str = "municipality", when=None):
properties = feature["properties"]
coordinates = feature["geometry"]
......@@ -297,7 +297,7 @@ def store_new(feature: Dict, country: str="NL", organization_type: str="municipa
log.info("Also found a top level website for this organization: %s" % website)
def store_updates(feature: Dict, country: str="NL", organization_type: str="municipality", when=None):
def store_updates(feature: Dict, country: str = "NL", organization_type: str = "municipality", when=None):
properties = feature["properties"]
coordinates = feature["geometry"]
......@@ -407,7 +407,7 @@ def store_updates(feature: Dict, country: str="NL", organization_type: str="muni
log.info("Stored new coordinates!")
def get_osm_data_wambachers(country: str= "NL", organization_type: str= "municipality"):
def get_osm_data_wambachers(country: str = "NL", organization_type: str = "municipality"):
# uses https://wambachers-osm.website/boundaries/ to download map data. Might not be the most updated, but it has
# a more complete and better set of queries. For example; it DOES get northern ireland and it clips out the sea,
# which makes it very nice to look at.
......@@ -479,7 +479,7 @@ def get_osm_data_wambachers(country: str= "NL", organization_type: str= "municip
return data
def get_osm_data(country: str= "NL", organization_type: str= "municipality"):
def get_osm_data(country: str = "NL", organization_type: str = "municipality"):
"""
Runs an overpass query that results in a set with administrative borders and points with metadata.
......
......@@ -94,7 +94,7 @@ def rerate_organizations(organizations: List):
@app.task(queue='storage')
def add_organization_rating(organizations: List[Organization], build_history: bool=False, when: datetime=None):
def add_organization_rating(organizations: List[Organization], build_history: bool = False, when: datetime = None):
"""
:param organizations: List of organization
:param build_history: Optional. Find all relevant moments of this organization and create a rating
......@@ -116,7 +116,7 @@ def add_organization_rating(organizations: List[Organization], build_history: bo
rate_organization_on_moment(organization, when)
def add_url_rating(urls: List[Url], build_history: bool=False, when: datetime=None):
def add_url_rating(urls: List[Url], build_history: bool = False, when: datetime = None):
if when:
isinstance(when, datetime)
......@@ -136,7 +136,7 @@ def delete_organization_ratings(organization: Organization):
OrganizationRating.objects.all().filter(organization=organization).delete()
def significant_moments(organizations: List[Organization]=None, urls: List[Url]=None):
def significant_moments(organizations: List[Organization] = None, urls: List[Url] = None):
"""
Searches for all significant point in times that something changed. The goal is to save
unneeded queries when rebuilding ratings. When you know when things changed, you know
......@@ -729,13 +729,13 @@ def rate_timeline(timeline, url: Url):
def save_url_rating(url: Url, date: datetime, high: int, medium: int, low: int, calculation,
total_issues: int=0, total_endpoints: int=0,
high_endpoints: int=0, medium_endpoints: int=0, low_endpoints: int=0,
total_url_issues: int=0, total_endpoint_issues: int=0,
url_issues_high: int=0, url_issues_medium: int=0, url_issues_low: int=0,
endpoint_issues_high: int=0, endpoint_issues_medium: int=0, endpoint_issues_low: int=0,
total_issues: int = 0, total_endpoints: int = 0,
high_endpoints: int = 0, medium_endpoints: int = 0, low_endpoints: int = 0,
total_url_issues: int = 0, total_endpoint_issues: int = 0,
url_issues_high: int = 0, url_issues_medium: int = 0, url_issues_low: int = 0,
endpoint_issues_high: int = 0, endpoint_issues_medium: int = 0, endpoint_issues_low: int = 0,
explained_high: int=0, explained_medium: int=0, explained_low: int=0,
explained_high: int = 0, explained_medium: int = 0, explained_low: int = 0,
explained_total_issues: int = 0, explained_high_endpoints: int = 0,
explained_medium_endpoints: int = 0, explained_low_endpoints: int = 0,
explained_total_url_issues: int = 0, explained_total_endpoint_issues: int = 0,
......@@ -861,7 +861,7 @@ def show_timeline_console(timeline, url: Url):
# also callable as admin action
# this is 100% based on url ratings, just an aggregate of the last status.
# make sure the URL ratings are up to date, they will check endpoints and such.
def rate_organization_on_moment(organization: Organization, when: datetime=None):
def rate_organization_on_moment(organization: Organization, when: datetime = None):
# If there is no time slicing, then it's today.
if not when:
when = datetime.now(pytz.utc)
......@@ -1068,7 +1068,7 @@ def get_latest_urlratings_fast(urls: List[Url], when):
# but this will give the correct score, possibly on the wrong endpoints (why?)
def rate_url(url: Url, when: datetime=None):
def rate_url(url: Url, when: datetime = None):
if not when:
when = datetime.now(pytz.utc)
......@@ -1100,7 +1100,7 @@ def rate_url(url: Url, when: datetime=None):
log.warning("The calculation is still the same, not creating a new UrlRating")
def get_url_score_modular(url: Url, when: datetime=None):
def get_url_score_modular(url: Url, when: datetime = None):
if not when:
when = datetime.now(pytz.utc)
......
......@@ -127,7 +127,7 @@ def get_default_category(request, ):
return JsonResponse([organization_type], safe=False, encoder=JSEncoder)
def get_default_category_for_country(request, country: str="NL"):
def get_default_category_for_country(request, country: str = "NL"):
organization_type = Configuration.objects.all().filter(
is_displayed=True,
......@@ -156,7 +156,7 @@ def get_countries(request,):
return JsonResponse(list, safe=False, encoder=JSEncoder)
def get_categories(request, country: str="NL"):
def get_categories(request, country: str = "NL"):
categories = Configuration.objects.all().filter(
country=get_country(country),
......@@ -166,7 +166,7 @@ def get_categories(request, country: str="NL"):
return JsonResponse(list(categories), safe=False, encoder=JSEncoder)
def generic_export(query, set, country: str="NL", organization_type="municipality"):
def generic_export(query, set, country: str = "NL", organization_type="municipality"):
"""
This dataset can be imported in another instance blindly using the admin interface.
......@@ -195,7 +195,7 @@ def generic_export(query, set, country: str="NL", organization_type="municipalit
@cache_page(one_day)
def export_urls_only(request, country: str="NL", organization_type="municipality",):
def export_urls_only(request, country: str = "NL", organization_type="municipality",):
query = Url.objects.all().filter(
is_dead=False,
not_resolvable=False,
......@@ -208,7 +208,7 @@ def export_urls_only(request, country: str="NL", organization_type="municipality
@cache_page(one_day)
def export_organizations(request, country: str="NL", organization_type="municipality",):
def export_organizations(request, country: str = "NL", organization_type="municipality",):
query = Organization.objects.all().filter(
country=get_country(country),
type=get_organization_type(organization_type),
......@@ -219,13 +219,13 @@ def export_organizations(request, country: str="NL", organization_type="municipa
@cache_page(one_day)
def export_organization_types(request, country: str="NL", organization_type="municipality"):
def export_organization_types(request, country: str = "NL", organization_type="municipality"):
query = OrganizationType.objects.all().values('name')
return generic_export(query, 'organization_types', country, organization_type)
@cache_page(one_day)
def export_coordinates(request, country: str="NL", organization_type="municipality",):
def export_coordinates(request, country: str = "NL", organization_type="municipality",):
organizations = Organization.objects.all().filter(
country=get_country(country),
type=get_organization_type(organization_type))
......@@ -238,7 +238,7 @@ def export_coordinates(request, country: str="NL", organization_type="municipali
@cache_page(one_day)
def export_urls(request, country: str="NL", organization_type="municipality"):
def export_urls(request, country: str = "NL", organization_type="municipality"):
query = Url.objects.all().filter(
organization__in=Organization.objects.all().filter(
country=get_country(country),
......@@ -348,7 +348,7 @@ def organizationtype_exists(request, organization_type_name):
@cache_page(ten_minutes)
def organization_report(request, country: str="NL", organization_type="municipality",
def organization_report(request, country: str = "NL", organization_type="municipality",
organization_id=None, organization_name=None, weeks_back=0):
# urls with /data/report// (two slashes)
if not organization_id and not organization_name:
......@@ -414,7 +414,7 @@ def string_to_delta(string_delta):
@cache_page(one_day)
def terrible_urls(request, country: str="NL", organization_type="municipality", weeks_back=0):
def terrible_urls(request, country: str = "NL", organization_type="municipality", weeks_back=0):
# this would only work if the latest endpoint is actually correct.
# currently this goes wrong when the endpoints are dead but the url still resolves.
# then there should be an url rating of 0 (as no endpoints). But we don't save that yet.
......@@ -502,7 +502,7 @@ def terrible_urls(request, country: str="NL", organization_type="municipality",
@cache_page(one_hour)
def top_fail(request, country: str="NL", organization_type="municipality", weeks_back=0):
def top_fail(request, country: str = "NL", organization_type="municipality", weeks_back=0):
if not weeks_back:
when = datetime.now(pytz.utc)
......@@ -585,7 +585,7 @@ def top_fail(request, country: str="NL", organization_type="municipality", weeks
# @cache_page(cache_time)
def top_win(request, country: str="NL", organization_type="municipality", weeks_back=0):
def top_win(request, country: str = "NL", organization_type="municipality", weeks_back=0):
if not weeks_back:
when = datetime.now(pytz.utc)
......@@ -685,7 +685,7 @@ def stats_determine_when(stat, weeks_back=0):