Commit fe044f5a authored by Elger Jonker's avatar Elger Jonker

CLI documentation, autogen now available, added --list to import coordinates, added debug scanner


Former-commit-id: 9b167766
parent b0763971
......@@ -39,7 +39,7 @@ source_parsers = {
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc']
extensions = ['sphinx.ext.autodoc', 'celery.contrib.sphinx', 'sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_documentation_templates']
......@@ -173,4 +173,14 @@ texinfo_documents = [
]
celery_task_prefix = '(task)' # < default
import sys, os
sys.path.insert(0, os.path.join(os.path.abspath('.'), '../../../failmap'))
os.environ['DJANGO_SETTINGS_MODULE'] = 'failmap.settings'
# from django.conf import settings
# settings.configure() # missing things... such as TOOLS. This is a default config file which is useless.
import django
django.setup()
......@@ -10,10 +10,12 @@ End User Documentation
:maxdepth: 2
:caption: End User Documentation:
topics/usage/installation
topics/usage/scanning_policy
topics/usage/faq_end_users
topics/usage/user_guide
topics/usage/thanks
topics/usage/command_line
topics/thanks
Volunteer documentation
......@@ -54,6 +56,16 @@ Advanced Topics
topics/development/task_processing_system
Museum
========================
.. toctree::
:maxdepth: 2
:caption: Museum:
topics/usage/history
Indices and tables
==================
......
# Data Model
This is an autogenerated page about the FailMap data model.
# organizations
## organizations
![Data Model](data_model/organizations_models.png)
# scanners
## scanners
![Data Model](data_model/scanners_models.png)
# map
## map
![Data Model](data_model/map_models.png)
# game
## game
![Data Model](data_model/game_models.png)
# app
## app
![Data Model](data_model/app_models.png)
# hypersh
## hypersh
![Data Model](data_model/hypersh_models.png)
# All in one
## All in one
![Data Model](data_model/failmap_models.png)
## Manual scans
### Command line
The Scan command can help you:
```bash
failmap scan 'scanner name'
```
The message returned will tell you what scanners you can run manually. All scanners have the same set of options.
### Admin interface
It's possible to run manual scans, at the bottom of a selection.
Note that this is beta functionality and please don't do this too much as the "priority" scanning queue is not functioning.
You can try out a scan or two, some take a lot of time.
![admin_actions](scanners_scanning_and_ratings/admin_actions.png)
This diff is collapsed.
# History (Dutch)
These texts are stored for archival purposes. They have been removed from the website in order to save people from
translating stuff that is already in the manual.
# Welke grote veranderingen zijn er geweest?
## Welke grote veranderingen zijn er geweest?
December 2017
......@@ -49,7 +51,7 @@ Scores tussen 0 tot en met 1000.
# Wat is de historie van Faalkaart?
## Wat is de historie van Faalkaart?
**28 augustus 2017**: Er wordt op een nieuwe manier beoordeeld. Per beveiligingsfout worden punten uitgedeeld. Heeft een organisatie geen punten, dan hebben we geen fouten kunnen vinden: perfect! Er is nu dus ook een top win!
......
# Installation
## Development
If you just want to run failmap for development, read [development/getting_started.html](../development/getting_started.html).
## Self-hosted
If you want to host failmap with all of it's dependencies on a dedicated server, visit:
[https://gitlab.com/failmap/server/blob/master/documentation/hosting.md](https://gitlab.com/failmap/server/blob/master/documentation/hosting.md)
### Operations
Operation instruction for the failmap server are documented here:
[https://gitlab.com/failmap/server/blob/master/documentation/operations.md](https://gitlab.com/failmap/server/blob/master/documentation/operations.md)
### Server architecture
Read more about the failmap server architecture here:
[https://gitlab.com/failmap/server](https://gitlab.com/failmap/server)
......@@ -17,8 +17,10 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "failmap.settings")
app = Celery(__name__)
app.config_from_object('django.conf:settings', namespace='CELERY')
# use same result backend as we use for broker url
# If broker_url is None, you might not have started failmap correctly. See docs/source/conf.py for a correct example.
app.conf.result_backend = app.conf.broker_url.replace('amqp://', 'rpc://')
# autodiscover all celery tasks in tasks.py files inside failmap modules
appname = __name__.split('.', 1)[0]
app.autodiscover_tasks([app for app in settings.INSTALLED_APPS if app.startswith(appname)])
......
......@@ -5,6 +5,7 @@ from datetime import datetime
from django.core.management.base import BaseCommand
from failmap.map.geojson import import_from_scratch
from failmap.map.models import AdministrativeRegion
log = logging.getLogger(__package__)
......@@ -32,12 +33,29 @@ class Command(BaseCommand):
required=False,
type=valid_date)
parser.add_argument("--list",
help="Lists the currently available regions and countries.",
required=False,
action='store_true')
# https://nl.wikipedia.org/wiki/Gemeentelijke_herindelingen_in_Nederland#Komende_herindelingen
def handle(self, *app_labels, **options):
import_from_scratch(
countries=[options["country"]],
organization_types=[options["region"]],
when=options["date"])
if options['list']:
log.info("Currently available administrative regions:")
log.info("Hint: add the via the admin interface.")
x = AdministrativeRegion.objects.all()
if not x:
log.info("-- None found. Add them via the admin interface.")
for z in x:
log.info("%-3s %-72s, %-5s" % (z.country, z.organization_type, z.admin_level))
else:
import_from_scratch(
countries=[options["country"]],
organization_types=[options["region"]],
when=options["date"])
def valid_date(s):
......
......@@ -76,6 +76,9 @@ class OrganizationRating(models.Model):
index_together = [
["when", "id"],
]
app_label = 'map' # added for sphinx autodoc
verbose_name = _('Organization Rating')
verbose_name_plural = _('Organization Ratings')
def __str__(self):
return '🔴%s 🔶%s 🍋%s | %s' % (self.high, self.medium, self.low, self.when.date(),)
......@@ -127,6 +130,9 @@ class UrlRating(models.Model):
class Meta:
managed = True
app_label = 'map' # added for sphinx autodoc
verbose_name = _('Url Rating')
verbose_name_plural = _('Url Ratings')
def __str__(self):
return '%s,%s,%s - %s' % (self.high, self.medium, self.low, self.when.date(),)
......@@ -178,6 +184,7 @@ class AdministrativeRegion(models.Model):
class Meta:
verbose_name = _('administrative_region')
verbose_name_plural = _('administrative_regions')
app_label = 'map' # added for sphinx autodoc
def __str__(self):
return '%s/%s' % (self.country, self.organization_type,)
......@@ -215,3 +222,4 @@ class Configuration(models.Model):
verbose_name = _('configuration')
verbose_name_plural = _('configurations')
ordering = ('display_order', )
app_label = 'map' # added for sphinx autodoc
......@@ -23,6 +23,7 @@ class OrganizationType(models.Model):
managed = True
verbose_name = _('organization_type')
verbose_name_plural = _('organization_types')
app_label = 'organizations' # added for sphinx autodoc
def __str__(self):
return self.name
......@@ -96,6 +97,7 @@ class Organization(models.Model):
db_table = 'organization'
verbose_name = _('organization')
verbose_name_plural = _('organizations')
app_label = 'organizations' # added for sphinx autodoc
# todo: find a smarter way to get the organization type name, instead of a related query... cached enums?
def __str__(self):
......@@ -171,6 +173,7 @@ class Coordinate(models.Model):
db_table = 'coordinate'
verbose_name = _('coordinate')
verbose_name_plural = _('coordinates')
app_label = 'organizations' # added for sphinx autodoc
class Url(models.Model):
......@@ -289,6 +292,7 @@ class Url(models.Model):
managed = True
db_table = 'url'
unique_together = (('organization_old', 'url'),)
app_label = 'organizations' # added for sphinx autodoc
def __str__(self):
if self.is_dead:
......@@ -414,3 +418,4 @@ class Promise(models.Model):
class Meta:
verbose_name = _('promise')
verbose_name_plural = _('promises')
app_label = 'organizations' # added for sphinx autodoc
......@@ -2,13 +2,13 @@ import logging
from failmap.app.management.commands._private import ScannerTaskCommand
from failmap.scanners.scanner import (dnssec, dummy, ftp, http, onboard, plain_http, screenshot,
security_headers, tls_osaft, tls_qualys)
security_headers, tls_osaft, tls_qualys, debug)
log = logging.getLogger(__name__)
class Command(ScannerTaskCommand):
"""Can perform a host of scans. Run like: failmap scan [scanner_name] and then options."""
""" Can perform a host of scans. Run like: failmap scan [scanner_name] and then options."""
help = __doc__
......@@ -22,7 +22,8 @@ class Command(ScannerTaskCommand):
'ftp': ftp,
'screenshot': screenshot,
'onboard': onboard,
'dummy': dummy
'dummy': dummy,
'debug': debug
}
def add_arguments(self, parser):
......
......@@ -4,6 +4,7 @@ from datetime import datetime
import pytz
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.utils.translation import gettext_lazy as _
from failmap.organizations.models import Url
......@@ -140,6 +141,11 @@ class Endpoint(models.Model):
def autocomplete_search_fields():
return 'url__url',
class Meta:
verbose_name = _('endpoint')
verbose_name_plural = _('endpoint')
app_label = 'scanners' # added for sphinx autodoc
class UrlIp(models.Model):
"""
......@@ -194,6 +200,11 @@ class UrlIp(models.Model):
def __str__(self):
return "%s %s" % (self.ip, self.discovered_on.date())
class Meta:
verbose_name = _('urlip')
verbose_name_plural = _('urlip')
app_label = 'scanners' # added for sphinx autodoc
class TlsQualysScan(models.Model):
"""
......@@ -225,6 +236,7 @@ class TlsQualysScan(models.Model):
class Meta:
managed = True
db_table = 'scanner_tls_qualys'
app_label = 'scanners' # added for sphinx autodoc
def __str__(self):
return "%s - %s" % (self.scan_date, self.qualys_rating)
......@@ -267,6 +279,11 @@ class TlsScan(models.Model):
def __str__(self):
return "%s - %s" % (self.scan_date, self.rating)
class Meta:
verbose_name = _('tlsscan')
verbose_name_plural = _('tlsscan')
app_label = 'scanners' # added for sphinx autodoc
# https://docs.djangoproject.com/en/dev/topics/db/models/#id6
class GenericScanMixin(models.Model):
......@@ -321,6 +338,7 @@ class GenericScanMixin(models.Model):
another abstract base class. You just need to remember to explicitly set abstract=True each time.
"""
abstract = True
app_label = 'scanners' # added for sphinx autodoc
class EndpointGenericScan(GenericScanMixin):
......@@ -376,6 +394,9 @@ class EndpointGenericScanScratchpad(models.Model):
help_text="Whatever data to dump for debugging purposes."
)
class Meta:
app_label = 'scanners' # added for sphinx autodoc
class Screenshot(models.Model):
endpoint = models.ForeignKey(
......@@ -387,6 +408,9 @@ class Screenshot(models.Model):
height_pixels = models.IntegerField(default=0)
created_on = models.DateTimeField(auto_now_add=True, db_index=True)
class Meta:
app_label = 'scanners' # added for sphinx autodoc
# A debugging table to help with API interactions.
# This can be auto truncated after a few days.
......@@ -400,3 +424,6 @@ class TlsQualysScratchpad(models.Model):
domain = models.CharField(max_length=255)
when = models.DateTimeField(auto_now_add=True)
data = models.TextField()
class Meta:
app_label = 'scanners' # added for sphinx autodoc
import logging
from ftplib import FTP, FTP_TLS, error_perm, error_proto, error_reply, error_temp
from celery import Task, group
from django.utils import timezone
from failmap.celery import ParentFailed, app
from failmap.organizations.models import Url
from failmap.scanners.models import Endpoint
from failmap.scanners.scanmanager.endpoint_scan_manager import EndpointScanManager
from failmap.scanners.scanner.scanner import (allowed_to_scan, endpoint_filters,
q_configurations_to_scan, url_filters)
from constance import config
log = logging.getLogger(__name__)
@app.task(queue='storage')
def compose_task(
organizations_filter: dict = dict(),
urls_filter: dict = dict(),
endpoints_filter: dict = dict(),
) -> Task:
"""
Helps with identifying issues with scanners. It shows the relevant permissions, configurations and lists the
organizations, urls and endpoints in a convenient way. This can only run in direct mode and will not result in tasks.
All messages are returned as log messages.
:param organizations_filter:
:param urls_filter:
:param endpoints_filter:
:return:
"""
log.info("Debug info for scanners:")
# done: list allowed_to_scan
vars = dir(config)
log.info("")
log.info("Scan permissions:")
log.info("Can be adjusted in the admin interface at Configuration")
for var in vars:
if var[0:5] == "SCAN_":
log.info("%-30s: %-5s" % (var, getattr(config, var)))
# done: list q_configurations_to_scan on all levels.
log.info("")
log.info("Scan configurations (regions set allowed to be scanned)")
log.info("Can be adjusted in the admin interface at __MAP__ Configuration")
log.info("Empty means nothing will be scanned (basically exceptions)")
log.info("Organizations: %s" % q_configurations_to_scan(level='organization'))
log.info("Urls: %s" % q_configurations_to_scan(level='url'))
log.info("Endpoints: %s" % q_configurations_to_scan(level='endpoint'))
# todo: show list of selected urls, endpoints and organizations.
log.info("")
log.info("Endpoints that are selected based on parameters:")
log.info("Other filters may apply depending on selected scanner. For example: scan ftp only selects ftp endpoints")
endpoints = Endpoint.objects.all().filter(q_configurations_to_scan(level='endpoint'), **endpoints_filter)
endpoints = endpoint_filters(endpoints, organizations_filter, urls_filter, endpoints_filter)
for endpoint in endpoints:
log.info("%-3s %-20s %-30s: IPv%-1s %s/%s" % (endpoint.url.organization.first().country,
endpoint.url.organization.first().name,
endpoint.url.url,
endpoint.ip_version, endpoint.protocol, endpoint.port))
log.info("")
log.info("End of scan debug")
log.info("")
# return nothing.
return group()
......@@ -81,7 +81,7 @@ INSTALLED_APPS = [
'django.contrib.staticfiles',
'import_export',
'failmap.fail',
'failmap.organizations.apps.OrganizationsConfig',
'failmap.organizations.apps.OrganizationsConfig', # because some signals need this.
'failmap.scanners',
'failmap.map',
'failmap.game',
......
"""Generic Types for type hinting."""
from .celery import Task
import datetime
def compose_task(
......@@ -14,8 +15,8 @@ def compose_task(
:param urls_filter: dict: limit urls to these filters, see below
:param endpoints_filter: dict: limit endpoints to these filters, see below
*This is an abstract of the `compose_discover_task` function which is used throughout this codebase, search for
`compose_discover_task` to find implementations which can be used as example.*
*This is an abstract of the `compose_task` function which is used throughout this codebase, search for
`compose_task` to find implementations which can be used as example.*
Composition of a task is building a task from primitives (task, group, chain) and other composed tasks in order
to create a 'collection' of work that as a whole can be scheduled for execution in the task processing system.
......@@ -90,7 +91,7 @@ def compose_task(
For example, to scan all urls/endpoints for one organization named 'example' run:
>>> task = compose_discover_task(organizations={'name__iexact': 'example'})
>>> task = compose_task(organizations={'name__iexact': 'example'})
>>> result = task.apply_async()
>>> print(result.get())
......@@ -98,10 +99,26 @@ def compose_task(
Multiple filters can be applied, to scan only port 80 for organizations added today run:
>>> task = compose_discover_task(
>>> task = compose_task(
... organizations={'date_added__day': datetime.datetime.today().day},
... endpoints={'port': 80}
... )
"""
raise NotImplementedError()
def compose_discover_task(
organizations_filter: dict = dict(),
urls_filter: dict = dict(),
endpoints_filter: dict = dict(),
) -> Task:
"""
See Compose Task
:param organizations_filter:
:param urls_filter:
:param endpoints_filter:
:return:
"""
raise NotImplementedError()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment