lint

parent 14a68b12
......@@ -3,9 +3,9 @@ import proxy.views
from django.conf import settings
from django.conf.urls import url
from failmap_admin.map.views import (index, latest_scans, manifest_json, map_data,
from failmap_admin.map.views import (LatestScanFeed, index, latest_scans, manifest_json, map_data,
organization_report, robots_txt, security_txt, stats,
terrible_urls, topfail, topwin, vulnstats, wanted_urls, LatestScanFeed)
terrible_urls, topfail, topwin, vulnstats, wanted_urls)
urlpatterns = [
url(r'^security.txt$', security_txt),
......
......@@ -5,6 +5,7 @@ from datetime import datetime, timedelta
import pytz
from dateutil.relativedelta import relativedelta
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.db import connection
from django.db.models import Count
from django.http import JsonResponse
......@@ -12,9 +13,6 @@ from django.shortcuts import render
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_page
from django.contrib.syndication.views import Feed
from django.urls import reverse
from failmap_admin.map.models import OrganizationRating, UrlRating
from failmap_admin.organizations.models import Organization, Promise, Url
from failmap_admin.scanners.models import EndpointGenericScan, TlsQualysScan
......
......@@ -264,8 +264,8 @@ def can_connect(protocol: str, url: Url, port: int, ip: str):
This can lead to interesting behavior: the browser times out.
https://stackoverflow.com/questions/43156023/what-is-http-host-header#43156094
# Certificate did not match expected hostname: 85.119.104.84.
# Certificate did not match expected hostname: 85.119.104.84.
Certificate: {'subject': ((('commonName', 'webdiensten.drechtsteden.nl'),),)
"""
r = requests.get(uri, timeout=(30, 30),
......
......@@ -70,12 +70,12 @@ def scan_urls(urls: List[Url], execute: bool=True, priority: int=PRIO_NORMAL):
def scan_multithreaded(urls: List[Url]):
from multiprocessing import Pool
pool = Pool(processes=4)
from multiprocessing import Pool
pool = Pool(processes=4)
for url in urls:
pool.apply_async(scan_task, [url])
sleep(60)
for url in urls:
pool.apply_async(scan_task, [url])
sleep(60)
def compose(organizations: List[Organization]=None, urls: List[Url]=None):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment