added new logo, deduped onboarding

parent fd97a480
<svg xmlns="http://www.w3.org/2000/svg" width="567" height="567"><path fill="red" d="M56 56v454h454V56H56zM356 206h-99v63h57v41h-57V404h-47V165H356"/></svg>
\ No newline at end of file
# Concept
Color: red is often seen as a "no", "stop". For example in traffic.
The "F" is used to denote failure in the american grading system, which is the first letter of Fail Map.
The square is an abstraction of a map.
# Design
Dimensions:
20x20cm
Square shape:
16x16cm (using 2 cm padding)
Text:
Font: News Gothic MT Bold
Font size: 320pt
Value: Capital letter F
Placement: Center (perceptive)
Color:
Red
# Optimize
Round off the paths:
Settings: Extreme, with customization: 0 decimal places.
http://www.scriptcompress.com/SVG-minifier.htm
Then:
Remove: xmlns:xlink="http://www.w3.org/1999/xlink" (file does not contain xlinks)
Remove: version="1.1" (nobody uses this)
Remove: id="Layer_1" (there are no layers)
Remove: x="0px" y="0px" (implied)
Remove: viewBox="0 0 566.93 566.93" (??? can do without)
Remove: enable-background="new 0 0 566.93 566.93" (??? can do without)
Remove: xml:space="preserve" (??? can do without)
Remove: the last Z from the path, which is implied.
Remove: V206 from the end, as a path closes itself.
Remove: whitespace, newlines
Remove the decimals from Width and Height.
Reduce color bytes from "#ED1E24" to "red".
# Result
Resulting in an 156 byte logo, consisting of a single path and SVG bloat.
Whitespace is added for readability:
<svg xmlns="http://www.w3.org/2000/svg" width="567" height="567">
<path fill="red" d="M56 56v454h454V56H56zM356 206h-99v63h57v41h-57V404h-47V165H356"/>
</svg>
......@@ -9,12 +9,13 @@ from jet.admin import CompactInline
from failmap_admin.map.rating import OrganizationRating, UrlRating, rate_organization_on_moment
from failmap_admin.scanners.admin import UrlIpInline
from failmap_admin.scanners.models import Endpoint
from failmap_admin.scanners.scanner_dns import brute_known_subdomains, certificate_transparency_scan
from failmap_admin.scanners.scanner_http import scan_urls_on_standard_ports
from failmap_admin.scanners.scanner_dns import brute_known_subdomains, certificate_transparency, nsec
import failmap_admin.scanners.scanner_http as scanner_http
from failmap_admin.scanners.scanner_plain_http import scan_urls as plain_http_scan_urls
from failmap_admin.scanners.scanner_screenshot import screenshot_urls
from failmap_admin.scanners.scanner_security_headers import scan_urls as security_headers_scan_urls
from failmap_admin.scanners.scanner_tls_qualys import scan_urls as tls_qualys_scan_urls
from failmap_admin.scanners.onboard import onboard_urls
from ..app.models import Job
from ..celery import PRIO_HIGH
......@@ -149,38 +150,31 @@ class UrlAdmin(admin.ModelAdmin):
actions = []
def onboard(self, request, queryset):
# todo, sequentially doesn't matter if you only use tasks :)
# currently it might crash given there are no endpoints yet to process...
for url in queryset:
if url.is_top_level():
brute_known_subdomains([url])
certificate_transparency_scan([url])
scan_urls_on_standard_ports([url]) # discover endpoints
plain_http_scan_urls([url]) # see if there is missing https
security_headers_scan_urls([url])
screenshot_urls([url])
url.onboarded = True
url.onboarded_on = datetime.now(pytz.utc)
url.save()
self.message_user(request, "Onboard: Done")
onboard_urls(urls=list(queryset))
self.message_user(request, "URL(s) have been scanned on known subdomains: Done")
actions.append('onboard')
onboard.short_description = "🔮 Onboard (dns, endpoints, scans, screenshot)"
onboard.short_description = "🔮 Onboard (discover subdomains and endpoints, http scans, screenshot)"
def dns_certificate_transparency(self, request, queryset):
certificate_transparency_scan([url for url in queryset])
certificate_transparency(urls=list(queryset))
self.message_user(request, "URL(s) have been scanned on known subdomains: Done")
actions.append('dns_certificate_transparency')
dns_certificate_transparency.short_description = "🗺 Discover subdomains (using certificate transparency)"
dns_certificate_transparency.short_description = "🗺 +subdomains (certificate transparency)"
def dns_known_subdomains(self, request, queryset):
brute_known_subdomains([url for url in queryset])
brute_known_subdomains(urls=list(queryset))
self.message_user(request, "Discover subdomains (using known subdomains): Done")
dns_known_subdomains.short_description = "🗺 Discover subdomains (using known subdomains)"
dns_known_subdomains.short_description = "🗺 +subdomains (known subdomains)"
actions.append('dns_known_subdomains')
def dns_nsec(self, request, queryset):
nsec(urls=list(queryset))
self.message_user(request, "Discover subdomains (using nsec): Done")
dns_known_subdomains.short_description = "🗺 +subdomains (nsec)"
actions.append('dns_nsec')
def discover_http_endpoints(self, request, queryset):
scan_urls_on_standard_ports([url for url in queryset])
scanner_http.discover_endpoints(urls=list(queryset))
self.message_user(request, "Discover http(s) endpoints: Done")
discover_http_endpoints.short_description = "🗺 Discover http(s) endpoints"
actions.append('discover_http_endpoints')
......@@ -189,7 +183,7 @@ class UrlAdmin(admin.ModelAdmin):
# create a celery task and use Job object to keep track of the status
urls = list(queryset)
task = tls_qualys_scan_urls(urls=urls, execute=False)
name = "Scan TLS Qualys (%s) " % str(urls)
name = "Scan TLS Qualys (%s) " % str(urls)
job = Job.create(task, name, request, priority=PRIO_HIGH)
link = reverse('admin:app_job_change', args=(job.id,))
self.message_user(request, '%s: job created, id: <a href="%s">%s</a>' % (name, link, str(job)))
......
import logging
from datetime import datetime
from time import sleep
import pytz
from django.core.management.base import BaseCommand
import failmap_admin.scanners.scanner_http as scanner_http
import failmap_admin.scanners.scanner_plain_http as scanner_plain_http
from failmap_admin.organizations.models import Url
from failmap_admin.scanners.scanner_dns import (brute_known_subdomains,
certificate_transparency_scan, nsec_scan)
from failmap_admin.scanners.scanner_screenshot import screenshot_urls
from failmap_admin.scanners.onboard import onboard_new_urls
logger = logging.getLogger(__package__)
......@@ -26,7 +19,7 @@ def runservice():
try:
logger.info("Started onboarding.")
while True:
onboard()
onboard_new_urls()
logger.info("Waiting for urls to be onboarded. Sleeping for 60 seconds.")
sleep(60)
except KeyboardInterrupt:
......@@ -36,50 +29,3 @@ def runservice():
runservice()
else:
logger.info("Stopped onboarding.")
def onboard():
urls = gather()
for url in urls:
# scan for http/https endpoints
if url.is_top_level():
# some DNS scans, to find more urls to onboard.
brute_known_subdomains([url])
certificate_transparency_scan([url])
nsec_scan([url])
scanner_http.discover_endpoints(urls=[url])
scanner_plain_http.scan_urls([url])
screenshot_urls([url])
# tls scans are picked up by scanner_tls_qualys and may take a while.
# other scans the same. They will do the ratings.
url.onboarded = True
url.onboarded_on = datetime.now(pytz.utc)
url.save()
def gather():
never_onboarded = Url.objects.all().filter(onboarded=False)
if never_onboarded.count() > 0:
cyber = """
................................................................................
.......-:////:.....:-.......::...-///////:-......://////////:..../////////:.....
...../mMMMMMMMN...NMM+.....hMMy..+MMMMMMMMMNy-...dMMMMMMMMMMMN..-MMMMMMMMMMNy...
....+MMMhsssss/...MMMd-.../NMMd..+MMMyssssmMMN-..dMMNssssssss/..-MMMdsssssNMMy..
...+MMMy........../mMMNo-yMMMh-..+MMM:....:MMM+..dMMm...........-MMMy+++++NMMh..
../MMMy.............sMMMMMMm/....+MMMo+++odMMM:..dMMm+++/.......-MMMMMMMMMMMd-..
..hMMN...............:dMMMy......+MMMMMMMMMMMo...dMMMMMMM/......-MMMhhMMMd+-....
../MMMy...............oMMM-......+MMMo++++dMMM:..dMMm+++/.......-MMMo.sMMMs.....
...+MMMy..............oMMM-......+MMM:....:MMM+..dMMm...........-MMMo..+MMMh....
....+MMMdsssss/.......oMMM-......+MMMysssymMMN-..dMMNssssssss/..-MMMo.../NMMm-..
...../dMMMMMMMN......./MMN.......+MMMMMMMMMNy-...dMMMMMMMMMMMN...NMM+....-mMMs..
.......-::::::.........-:........-::::::::-......::::::::::::.....:-.......::...
................................................................................
"""
logger.info("There are %s new urls to onboard! %s" % (never_onboarded.count(), cyber))
return never_onboarded
import logging
from datetime import datetime
import pytz
from typing import List
import failmap_admin.scanners.scanner_http as scanner_http
import failmap_admin.scanners.scanner_plain_http as scanner_plain_http
from failmap_admin.organizations.models import Url
from failmap_admin.scanners.scanner_dns import (brute_known_subdomains, certificate_transparency, nsec_scan)
from failmap_admin.scanners.scanner_screenshot import screenshot_urls
from ..celery import app
logger = logging.getLogger(__package__)
def onboard_new_urls():
never_onboarded = Url.objects.all().filter(onboarded=False)
if never_onboarded.count() > 0:
cyber = """
................................................................................
.......-:////:.....:-.......::...-///////:-......://////////:..../////////:.....
...../mMMMMMMMN...NMM+.....hMMy..+MMMMMMMMMNy-...dMMMMMMMMMMMN..-MMMMMMMMMMNy...
....+MMMhsssss/...MMMd-.../NMMd..+MMMyssssmMMN-..dMMNssssssss/..-MMMdsssssNMMy..
...+MMMy........../mMMNo-yMMMh-..+MMM:....:MMM+..dMMm...........-MMMy+++++NMMh..
../MMMy.............sMMMMMMm/....+MMMo+++odMMM:..dMMm+++/.......-MMMMMMMMMMMd-..
..hMMN...............:dMMMy......+MMMMMMMMMMMo...dMMMMMMM/......-MMMhhMMMd+-....
../MMMy...............oMMM-......+MMMo++++dMMM:..dMMm+++/.......-MMMo.sMMMs.....
...+MMMy..............oMMM-......+MMM:....:MMM+..dMMm...........-MMMo..+MMMh....
....+MMMdsssss/.......oMMM-......+MMMysssymMMN-..dMMNssssssss/..-MMMo.../NMMm-..
...../dMMMMMMMN......./MMN.......+MMMMMMMMMNy-...dMMMMMMMMMMMN...NMM+....-mMMs..
.......-::::::.........-:........-::::::::-......::::::::::::.....:-.......::...
................................................................................
"""
logger.info("There are %s new urls to onboard! %s" % (never_onboarded.count(), cyber))
onboard_urls(never_onboarded)
@app.task
def onboard_urls(urls: List[Url]):
for url in urls:
if url.is_top_level():
brute_known_subdomains(urls=[url])
certificate_transparency(urls=[url])
nsec_scan(urls=[url])
scanner_http.discover_endpoints(urls=[url])
scanner_plain_http.scan_urls(urls=[url])
screenshot_urls(urls=[url])
# todo: add qualys tasks.
url.onboarded = True
url.onboarded_on = datetime.now(pytz.utc)
url.save()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment