[cli] Add audit log to all relevant cli commands

parent 25481808
......@@ -4,5 +4,4 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export PYTHONPATH="${DIR}/src:${PYTHONPATH:-}"
python "${DIR}/src/cli/main.py" $@
python "${DIR}/../src/cli/main.py" $@
......@@ -3,9 +3,9 @@ import logging
import json
from vardb.datamodel import DB, sample
from api.v1 import resources
from api.util.delete_analysis import delete_analysis
from vardb.deposit.deposit_analysis import import_filterconfigs
from cli.decorators import cli_logger, session
@click.group(help="Analyses actions")
......@@ -15,7 +15,9 @@ def analyses():
@analyses.command("delete")
@click.argument("analysis_id", type=int)
def cmd_analysis_delete(analysis_id):
@session
@cli_logger
def cmd_analysis_delete(logger, session, analysis_id):
"""
Deletes an analysis, removing it's samples and genotypes
in the process. Any alleles that were imported as part of the
......@@ -23,46 +25,37 @@ def cmd_analysis_delete(analysis_id):
that only belongs to the analysis and which alleles that
were also imported by other means.
"""
logging.basicConfig(level=logging.INFO)
db = DB()
db.connect()
aname = (
db.session.query(sample.Analysis.name).filter(sample.Analysis.id == analysis_id).one()[0]
)
aname = session.query(sample.Analysis.name).filter(sample.Analysis.id == analysis_id).one()[0]
answer = input(
"Are you sure you want to delete analysis {}?\nType 'y' to confirm.\n".format(aname)
)
if answer == "y":
try:
delete_analysis(db.session, analysis_id)
db.session.commit()
click.echo("Analysis {} ({}) deleted successfully".format(analysis_id, aname))
delete_analysis(session, analysis_id)
session.commit()
logger.echo("Analysis {} ({}) deleted successfully".format(analysis_id, aname))
except Exception:
logging.exception("Something went wrong while deleting analysis {}".format(analysis_id))
logger.exception("Something went wrong while deleting analysis {}".format(analysis_id))
else:
click.echo("Lacking confirmation, aborting...")
logger.echo("Lacking confirmation, aborting...")
@analyses.command("update_filterconfig")
@click.argument("filterconfig", type=click.File("r"))
def cmd_analysis_updatefilterconfig(filterconfig):
@session
@cli_logger
def cmd_analysis_updatefilterconfig(logger, session, filterconfig):
"""
Updates filterconfigs from the input JSON file.
"""
logging.basicConfig(level=logging.INFO)
db = DB()
db.connect()
filterconfigs = json.load(filterconfig)
result = import_filterconfigs(db.session, filterconfigs)
result = import_filterconfigs(session, filterconfigs)
db.session.commit()
logging.info(
session.commit()
logger.echo(
"Created {} and updated {} filter configurations".format(
result["created"], result["updated"]
)
......
import click
import logging
import datetime
from vardb.datamodel import DB, broadcast as broadcast_model
from cli.decorators import cli_logger, session
@click.group(help="Broadcast")
......@@ -14,19 +11,15 @@ def broadcast():
@broadcast.command("list", help="List all active messages")
@click.option("--all", is_flag=True, default=False, help="List all messages")
@click.option("--tail", is_flag=True, default=False, help="List last 10 messages")
def cmd_list_active(all, tail):
@session
def cmd_list_active(session, all, tail):
"""
Print all active broadcast messages to console
"""
if tail:
all = True
logging.basicConfig(level=logging.INFO)
db = DB()
db.connect()
session = db.session
filters = []
if not all:
filters.append(broadcast_model.Broadcast.active.is_(True))
......@@ -67,32 +60,27 @@ def cmd_list_active(all, tail):
@broadcast.command("new", help="Create new message. Activated immediately.")
@click.argument("message", nargs=-1, type=click.UNPROCESSED)
def cmd_new_message(message):
logging.basicConfig(level=logging.INFO)
@session
@cli_logger
def cmd_new_message(logger, session, message):
message = " ".join(message)
db = DB()
db.connect()
session = db.session
if not message:
click.echo("Message empty")
logger.echo("Message empty")
return
new_message = broadcast_model.Broadcast(message=message, active=True)
session.add(new_message)
session.commit()
click.echo("Message with id {} added".format(new_message.id))
logger.echo("Message with id {} added".format(new_message.id))
@broadcast.command("deactivate", help="Deactivate a message.")
@click.argument("message_id", type=click.INT)
def cmd_deactivate_message(message_id):
logging.basicConfig(level=logging.INFO)
db = DB()
db.connect()
session = db.session
@session
@cli_logger
def cmd_deactivate_message(logger, session, message_id):
message = (
session.query(broadcast_model.Broadcast)
......@@ -101,9 +89,9 @@ def cmd_deactivate_message(message_id):
)
if not message:
click.echo("Found no message with id {}".format(message_id))
logger.echo("Found no message with id {}".format(message_id))
return
message.active = False
session.commit()
click.echo("Message with id {} set as inactive".format(message.id))
logger.echo("Message with id {} set as inactive".format(message.id))
......@@ -23,6 +23,7 @@ from .migration_db import (
migration_history,
migration_compare,
)
from cli.decorators import cli_logger, session
DEFAULT_WARNING = "THIS WILL WIPE OUT {} COMPLETELY! \nAre you sure you want to proceed? Type 'CONFIRM' to confirm.\n".format(
......@@ -31,17 +32,18 @@ DEFAULT_WARNING = "THIS WILL WIPE OUT {} COMPLETELY! \nAre you sure you want to
@contextmanager
def confirm(success_msg, force=False, warning=DEFAULT_WARNING):
def confirm(echo_func, success_msg, force=False, warning=DEFAULT_WARNING):
if not force:
confirmation = input(warning)
if confirmation == "CONFIRM":
yield
click.echo(success_msg)
echo_func(success_msg)
else:
click.echo("Lacking confirmation, aborting...")
echo_func("Lacking confirmation, aborting...")
raise RuntimeError("Lacking confirmation, aborting...")
else:
yield
click.echo(success_msg)
echo_func(success_msg)
@click.group(help="Database management (create/drop/migrate/etc)")
......@@ -52,7 +54,7 @@ def database():
@database.command("drop", help="Drops all data in database.")
@click.option("-f", is_flag=True, help="Do not ask for confirmation.")
def cmd_drop_db(f=None):
with confirm("Database dropped!", force=f):
with confirm(click.echo, "Database dropped!", force=f):
db = DB()
db.connect()
drop_db(db)
......@@ -61,7 +63,7 @@ def cmd_drop_db(f=None):
@database.command("make", help="Creates all tables in database.")
@click.option("-f", is_flag=True, help="Do not ask for confirmation.")
def cmd_make_db(f=None):
with confirm("Tables should now have been created.", force=f):
with confirm(click.echo, "Tables should now have been created.", force=f):
db = DB()
db.connect()
make_db(db)
......@@ -70,14 +72,15 @@ def cmd_make_db(f=None):
@database.command("refresh", help="Refresh shadow tables in database.")
@click.option("-f", is_flag=True, help="Do not ask for confirmation.")
def cmd_refresh(f=None):
@cli_logger
def cmd_refresh(logger, f=None):
warning = "This will refresh all shadow tables in the database. Do not run this command while app is running.\nType 'CONFIRM' to confirm.\n"
with confirm("Tables should now have been refreshed.", force=f, warning=warning):
with confirm(logger.echo, "Tables should now have been refreshed.", force=f, warning=warning):
db = DB()
db.connect()
click.echo("Refreshing tables...")
logger.echo("Refreshing tables...")
refresh(db)
click.echo("Done!")
logger.echo("Done!")
db.session.commit()
ast_count = list(db.session.execute("SELECT COUNT(*) FROM annotationshadowtranscript"))[0][
0
......@@ -89,8 +92,8 @@ def cmd_refresh(f=None):
conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
conn.cursor().execute("VACUUM(ANALYZE)")
click.echo("AnnotationShadowTranscript count: {}".format(ast_count))
click.echo("AnnotationShadowFrequency count: {}".format(asf_count))
logger.echo("AnnotationShadowTranscript count: {}".format(ast_count))
logger.echo("AnnotationShadowFrequency count: {}".format(asf_count))
@database.command(
......@@ -100,7 +103,7 @@ def cmd_refresh(f=None):
)
@click.option("-f", is_flag=True, help="Do not ask for confirmation.")
def cmd_make_migration_base(f=None):
with confirm("Tables should now have been created.", force=f):
with confirm(click.echo, "Tables should now have been created.", force=f):
make_migration_base_db()
......@@ -111,7 +114,7 @@ def cmd_make_migration_base(f=None):
)
@click.option("-f", is_flag=True, help="Do not ask for confirmation.")
def cmd_ci_migration(f=None):
with confirm("Migrations completed successfully", force=f):
with confirm(click.echo, "Migrations completed successfully", force=f):
ci_migration_upgrade_downgrade()
......@@ -122,14 +125,14 @@ def cmd_ci_migration(f=None):
)
@click.option("-f", is_flag=True, help="Do not ask for confirmation.")
def cmd_ci_migration_head(f=None):
with confirm("Migrations to newest revision was successful.", force=f):
with confirm(click.echo, "Migrations to newest revision was successful.", force=f):
ci_migration_head()
@database.command("ci-migration-base", help="Creates base database tables")
@click.option("-f", is_flag=True, help="Do not ask for confirmation.")
def cmd_ci_migration_base(f=None):
with confirm("Base database tables created successfully.", force=f):
with confirm(click.echo, "Base database tables created successfully.", force=f):
ci_migration_db_remake()
......@@ -139,7 +142,8 @@ def cmd_ci_migration_base(f=None):
short_help="Upgrade to version",
)
@click.argument("revision")
def cmd_upgrade(revision):
@cli_logger
def cmd_upgrade(logger, revision):
migration_upgrade(revision)
......@@ -149,7 +153,8 @@ def cmd_upgrade(revision):
short_help="Downgrade to version",
)
@click.argument("revision")
def cmd_downgrade(revision):
@cli_logger
def cmd_downgrade(logger, revision):
migration_downgrade(revision)
......
......@@ -13,6 +13,7 @@ from vardb.deposit.deposit_alleles import DepositAlleles
from vardb.deposit.deposit_genepanel import DepositGenepanel
from vardb.datamodel.analysis_config import AnalysisConfigData
from cli.decorators import cli_logger, session
VCF_FIELDS_RE = re.compile(
"(?P<analysis_name>.+[.-](?P<genepanel_name>.+)[-_](?P<genepanel_version>.+))\.vcf"
......@@ -34,18 +35,16 @@ def deposit():
@click.option("--report", type=click.Path(exists=True))
@click.option("--warnings", type=click.Path(exists=True))
@click.option("--priority", type=click.INT, default=1)
def cmd_deposit_analysis(vcf, ped=None, report=None, warnings=None, priority=None):
@session
@cli_logger
def cmd_deposit_analysis(logger, session, vcf, ped=None, report=None, warnings=None, priority=None):
"""
Deposit an analysis given input vcf.
File should be in format of {analysis_name}.{genepanel_name}-{genepanel_version}.vcf
"""
logging.basicConfig(level=logging.DEBUG)
matches = re.match(VCF_FIELDS_RE, os.path.basename(vcf))
db = DB()
db.connect()
da = DepositAnalysis(db.session)
da = DepositAnalysis(session)
report_data = warnings_data = None
if report:
......@@ -66,8 +65,9 @@ def cmd_deposit_analysis(vcf, ped=None, report=None, warnings=None, priority=Non
report=report_data,
warnings=warnings_data,
)
da.import_vcf(analysis_config_data)
db.session.commit()
analysis = da.import_vcf(analysis_config_data)
session.commit()
logger.echo("Analysis {} deposited successfully".format(analysis.name))
@deposit.command("exists")
......@@ -80,79 +80,74 @@ def all_exists(fs):
@click.argument("vcf", nargs=-1, type=click.Path(exists=True))
@click.option("--genepanel_name")
@click.option("--genepanel_version")
def cmd_deposit_alleles(vcf, genepanel_name, genepanel_version):
@session
@cli_logger
def cmd_deposit_alleles(logger, session, vcf, genepanel_name, genepanel_version):
"""
Deposit alleles given input vcf.
If genepanel not given by options, get it from the filename assuming
format of {something}.{genepanel_name}_{genepanel_version}.vcf
"""
logging.basicConfig(level=logging.DEBUG)
db = DB()
db.connect()
da = DepositAlleles(db.session)
da = DepositAlleles(session)
for f in vcf:
if not genepanel_name:
matches = re.match(VCF_FIELDS_RE, os.path.basename(f))
genepanel_name = matches.group("genepanel_name")
genepanel_version = matches.group("genepanel_version")
da.import_vcf(f, genepanel_name, genepanel_version)
db.session.commit()
click.echo("Deposited " + str(len(vcf)) + " files.")
session.commit()
logger.echo("Deposited " + str(len(vcf)) + " files.")
@deposit.command("annotation")
@click.argument("vcf")
def cmd_deposit_annotation(vcf):
@session
@cli_logger
def cmd_deposit_annotation(logger, session, vcf):
"""
Update/deposit alleles with annotation only given input vcf.
No analysis/variant interpretation is created.
File should be in format of {something}.{genepanel_name}_{genepanel_version}.vcf
"""
logging.basicConfig(level=logging.DEBUG)
matches = re.match(VCF_FIELDS_RE, os.path.basename(vcf))
db = DB()
db.connect()
da = DepositAlleles(db.session)
da = DepositAlleles(session)
da.import_vcf(
vcf,
matches.group("genepanel_name"),
matches.group("genepanel_version"),
annotation_only=True,
)
db.session.commit()
session.commit()
logger.echo("Annotation imported successfully")
@deposit.command("references")
@click.argument("references_json")
def cmd_deposit_references(references_json):
@session
@cli_logger
def cmd_deposit_references(logger, session, references_json):
"""
Deposit/update a set of references into database given by DB_URL.
Input is a line separated JSON file, with one reference object per line.
"""
logging.basicConfig(level=logging.INFO)
db = DB()
db.connect()
import_references(db.session, references_json)
import_references(session, references_json)
logger.echo("References imported successfully")
@deposit.command("custom_annotation")
@click.argument("custom_annotation_json")
def cmd_deposit_custom_annotations(custom_annotation_json):
@session
@cli_logger
def cmd_deposit_custom_annotations(logger, session, custom_annotation_json):
"""
Deposit/update a set of custom annotations into database given by DB_URL.
Input is a line separated JSON file, with one custom annotation object per line.
"""
logging.basicConfig(level=logging.INFO)
db = DB()
db.connect()
import_custom_annotations(db.session, custom_annotation_json)
import_custom_annotations(session, custom_annotation_json)
logger.echo("Custom annotation imported successfully")
@deposit.command("genepanel")
......@@ -162,14 +157,21 @@ def cmd_deposit_custom_annotations(custom_annotation_json):
@click.option("--phenotypes_path")
@click.option("--replace", is_flag=True)
@click.option("--folder", help="Folder to look for files assuming standard filenames")
@session
@cli_logger
def cmd_deposit_genepanel(
genepanel_name, genepanel_version, transcripts_path, phenotypes_path, replace, folder
logger,
session,
genepanel_name,
genepanel_version,
transcripts_path,
phenotypes_path,
replace,
folder,
):
"""
Create or replace genepanel. If replacing genepanel, use --replace flag.
"""
logging.basicConfig(level=logging.DEBUG)
if folder:
prefix = folder.split("/")[-1]
transcripts_path = folder + "/" + prefix + ".transcripts.csv"
......@@ -177,19 +179,22 @@ def cmd_deposit_genepanel(
genepanel_name, genepanel_version = prefix.split("_", 1)
assert genepanel_version.startswith("v")
db = DB()
db.connect()
dg = DepositGenepanel(db.session)
dg = DepositGenepanel(session)
dg.add_genepanel(
transcripts_path, phenotypes_path, genepanel_name, genepanel_version, replace=replace
)
logger.echo("Genepanel {}_{} imported successfully".format(genepanel_name, genepanel_version))
@deposit.command("append_genepanel_to_usergroup")
@click.argument("genepanel_name", required=True)
@click.argument("genepanel_version", required=True)
@click.argument("user_group_name", required=True)
def cmd_append_genepanel_to_usergroup(genepanel_name, genepanel_version, user_group_name):
@session
@cli_logger
def cmd_append_genepanel_to_usergroup(
logger, session, genepanel_name, genepanel_version, user_group_name
):
"""
Append a genepanel to the given user group.
:param genepanel_name:
......@@ -197,21 +202,16 @@ def cmd_append_genepanel_to_usergroup(genepanel_name, genepanel_version, user_gr
:param user_group_name:
:return:
"""
db = DB()
db.connect()
user_group = (
db.session.query(user.UserGroup).filter(user.UserGroup.name == user_group_name).one()
)
user_group = session.query(user.UserGroup).filter(user.UserGroup.name == user_group_name).one()
gp = (
db.session.query(gene.Genepanel)
session.query(gene.Genepanel)
.filter(gene.Genepanel.name == genepanel_name, gene.Genepanel.version == genepanel_version)
.one()
)
if gp in user_group.genepanels:
click.echo(
logger.echo(
"Genepanel ({gp_name},{gp_version}) already exists in user group {user_group}".format(
gp_name=genepanel_name, gp_version=genepanel_version, user_group=user_group_name
)
......@@ -220,9 +220,9 @@ def cmd_append_genepanel_to_usergroup(genepanel_name, genepanel_version, user_gr
user_group.genepanels.append(gp)
db.session.commit()
session.commit()
click.echo(
logger.echo(
"Appended genepanel ({gp_name},{gp_version}) to user group {user_group}".format(
gp_name=genepanel_name, gp_version=genepanel_version, user_group=user_group_name
)
......
......@@ -6,6 +6,8 @@ import datetime
from vardb.datamodel import DB, user, sample
from vardb.export import export_sanger_variants, dump_classification
from cli.decorators import cli_logger, session
FILENAME_REPORT_DEFAULT = "non-started-analyses-variants-{timestamp}"
FILENAME_TIMESTAMP_FORMAT = "%Y-%m-%d_%H%M" # 2017-11-10_1337
......@@ -27,21 +29,19 @@ def export():
is_flag=True,
help="Include name(s) of analysis where a variant is found",
)
def cmd_export_classifications(filename, with_analysis_names):
@session
@cli_logger
def cmd_export_classifications(logger, session, filename, with_analysis_names):
"""
Exports all current classifications into an excel file.
"""
logging.basicConfig(level=logging.INFO)
today = datetime.datetime.now()
timestamp = today.strftime(FILENAME_TIMESTAMP_FORMAT)
output_name = (
filename if filename else "variant-classifications-{timestamp}".format(timestamp=timestamp)
)
db = DB()
db.connect()
dump_classification.dump_alleleassessments(db.session, output_name, with_analysis_names)
click.echo("Exported variants to " + output_name + ".xlsx/csv")
dump_classification.dump_alleleassessments(session, output_name, with_analysis_names)
logger.echo("Exported variants to " + output_name + ".xlsx/csv")
@export.command("sanger", help="Export variants that needs to be Sanger verified")
......@@ -51,7 +51,9 @@ def cmd_export_classifications(filename, with_analysis_names):
help="The name of the file to create. Suffix .xlsx and .csv will be automatically added.\n"
"Default: '" + FILENAME_REPORT_DEFAULT.format(timestamp="YYYY-MM-DD_hhmm") + ".xlsx/csv'",
)
def cmd_export_sanger(user_group, filename):
@session
@cli_logger
def cmd_export_sanger(logger, session, user_group, filename):
"""
Export alleles from non-started analysis to file
"""
......@@ -59,12 +61,9 @@ def cmd_export_sanger(user_group, filename):
today = datetime.datetime.now()
timestamp = today.strftime(FILENAME_TIMESTAMP_FORMAT)
output_name = filename if filename else (FILENAME_REPORT_DEFAULT).format(timestamp=timestamp)
click.echo("Exporting variants to " + output_name + ".xlsx/csv")
db = DB()
db.connect()
logger.echo("Exporting variants to " + output_name + ".xlsx/csv")
usergroup = db.session.query(user.UserGroup).filter(user.UserGroup.name == user_group).one()
usergroup = session.query(user.UserGroup).filter(user.UserGroup.name == user_group).one()
genepanels = [(g.name, g.version) for g in usergroup.genepanels]
......@@ -75,17 +74,17 @@ def cmd_export_sanger(user_group, filename):
start = datetime.datetime.now()
filter_config_id = (
db.session.query(sample.FilterConfig.id)
session.query(sample.FilterConfig.id)
.filter(sample.FilterConfig.usergroup_id == usergroup.id)
.scalar()
)
has_content = export_sanger_variants.export_variants(
db.session, genepanels, filter_config_id, excel_file_obj, csv_file_obj=csv_file_obj
session, genepanels, filter_config_id, excel_file_obj, csv_file_obj=csv_file_obj
)
if has_content:
end = datetime.datetime.now()
click.echo(
logger.echo(
"Exported variants to " + output_name + ".xlsx/csv" + " in {}".format(str(end - start))
)
else:
......
This diff is collapsed.
import getpass
import sys
import logging
from functools import update_wrapper
import click
from vardb.util import DB
from vardb.datamodel import log
logging.basicConfig(level=logging.INFO)
class CliLogger(object):
def __init__(self, ctx):
self.ctx = ctx
self.echoed = list()
self.log = log.CliLog()
user = getpass.getuser()
if not user:
raise RuntimeError(
"Couldn't find your system username. A username is required for auditing purposes."
)
self.log.user = user
def echo(self, message, db_only=False):
self.echoed.append(message)
if not db_only:
click.echo(message)
def exception(self, message):
logging.exception(message)
def commit(self):
group = ""
if self.ctx.parent and self.ctx.parent.command:
group = self.ctx.parent.command.name
self.log.group = group
self.log.groupcommand = self.ctx.command.name
self.log.output = "\n".join(self.echoed)
self.log.command = " ".join(sys.argv[1:])
db = DB()
db.connect()
session = db.session
session.add(self.log)
session.commit()
db.disconnect()
def cli_logger(f):
"""Decorator for logging cli commands to database.
"""
def new_func(*args, **kwargs):
ctx = click.get_current_context()
if not getattr(ctx, "clilogger", None):
ctx.clilogger = CliLogger(ctx)
try:
result = f(ctx.clilogger, *args, **kwargs)
return result
finally:
ctx.clilogger.commit()
return update_wrapper(new_func, f)
def session(f):
"""Decorator providing a database session.
"""
def new_func(*args, **kwargs):
ctx = click.get_current_context()
if not getattr(ctx, "session", None):
db = DB()
db.connect()
ctx.db = db
ctx.session = db.session
try:
return f(ctx.session, *args, **kwargs)
finally:
ctx.db.disconnect()
return update_wrapper(new_func, f)
......@@ -11,6 +11,7 @@ from cli.commands.analyses.analyses import analyses
from cli.commands.export.export import export
from cli.commands.users.users import users
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
......@@ -34,4 +35,4 @@ cli_group.add_command(export)
cli_group.add_command(users)
if __name__ == "__main__":
cli_group()
cli_group(prog_name="ella-cli")
......@@ -78,36 +78,3 @@ def test_user_modify(session, run_command):
user.User.last_name == last_name,
user.User.email == email,