...
 
Commits (3)
#!/bin/sh
# Hat log parser. This is just a tiny
# wrapper around hat-parser.py, passes
# arguments as-is to the file.
# sourcing env_base
. /usr/lib/hatd/env_base.sh
PYTHON_BIN="$(command -v python3)"
[ -z "${PYTHON_BIN}" ] && print_msg "hatd requires python3" >&2 \
&& exit 1
CLIENT="${BASE_DIR}/hat/hat-parser.py"
exec "${PYTHON_BIN}" "${CLIENT}" "[email protected]"
......@@ -39,35 +39,36 @@ def create_user_files():
with open(file_, 'wt') as f:
f.write('')
# Manual arg help formatter class to make `nargs='+'` show one arg
class ManualHelpFormatter(argparse.RawTextHelpFormatter):
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == argparse.OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == argparse.ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
# Here...
elif action.nargs == argparse.ONE_OR_MORE:
result = '%s' % get_metavar(1)
elif action.nargs == argparse.REMAINDER:
result = '...'
elif action.nargs == argparse.PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def parse_arguments():
'''Parse arguments (for client) and
return appropriate response back.
'''
# Manual formatter class to make `nargs='+'` show one arg
class ManualFormatter(argparse.RawTextHelpFormatter):
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == argparse.OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == argparse.ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
# Here...
elif action.nargs == argparse.ONE_OR_MORE:
result = '%s' % get_metavar(1)
elif action.nargs == argparse.REMAINDER:
result = '...'
elif action.nargs == argparse.PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
parser = argparse.ArgumentParser(prog='hatc', description='HAT client – a client for HAT (Hyper-AT), the one-time scheduler for GNU/Linux.',
formatter_class=ManualFormatter)
formatter_class=ManualHelpFormatter)
parser.add_argument('-l', '--list', dest='joblist',
required=False, action='store_true',
help='Show the list of queued jobs.\n')
......@@ -308,9 +309,10 @@ def main():
data_seq = ('stop_daemon', True)
data = SendReceiveData(data_seq)
data.check_get_send()
exit(0)
if sys.argv[1] in {'-V', '--version'}:
print_msg(__version__, file=sys.stderr)
exit(0)
exit(0)
if not check_daemon_process(DAEMON_PID_FILE):
print_msg('Daemon (hatd) is not running')
exit(127)
......
'''Parser of hat logs. Comes with `hatd`.'''
import argparse
import datetime
import glob
import gzip
import os
import re
import sys
from lib.utils import print_msg
# Constants
USER_LOG_LOCATION = os.path.expanduser('~/.hatd/logs/')
def split_format(line):
'''Takes a line as input and returns
splitted-formatted fields as output.
'''
dt_format = '%Y-%m-%d %H:%M:%S'
splitted_line = line.split(' :')
run_time = splitted_line[0].strip()
run_time_dt = datetime.datetime.strptime(run_time, dt_format)
euid = splitted_line[1].split('>')[1].strip()
job_id = splitted_line[2].split('>')[1].strip()
scheduled_time = splitted_line[3].split('>')[1].strip()
scheduled_time_dt = datetime.datetime.strptime(scheduled_time, dt_format)
command = splitted_line[4].split('>')[1].strip()
return_code = splitted_line[5].split('>')[1].strip()
output = splitted_line[-1].split('>', maxsplit=1)[1].strip()
return (run_time_dt, euid, job_id, scheduled_time_dt,
command, return_code, output)
def parse_arguments():
'''Parse arguments (for logs) and
return appropriate response back.
'''
parser = argparse.ArgumentParser(prog='hat-parser',
description='Log parser for hatc – a client for HAT (Hyper-AT), the one-time scheduler for GNU/Linux. Without any option, it shows all STDOUT logs.')
parser.add_argument('-c', '--command', dest='command',
required=False, help='Part of the command used (Regex supported).\n')
parser.add_argument('-e', '--error', dest='stderr', action='store_true',
required=False, help='Show error messages.')
parser.add_argument('-s', '--scheduled', dest='scheduled', action='store_true',
required=False, help='Compare against scheduled time of jobs instead of run time (default).')
parser.add_argument('-f', '--from', dest='from_time',
required=False, help='Show logs starting from this time (including). Must be in format YYYY-mm-ddTHH:MM:SS (e.g. 2018-02-04T14:34:00, 2017-12-23T02:23:45).\n')
parser.add_argument('-t', '--to', dest='to_time',
required=False, help='Show logs upto this time (including). Must be in format YYYY-mm-ddTHH:MM:SS (e.g. 2018-04-14T23:31:04, 2017-12-31T09:12:45).\n')
args_ns = parser.parse_args()
args_dict = vars(args_ns)
return args_dict
def search_params_formatter(args_dict):
'''Takes the argument dict as input and returns
the appropriate search parameters as a tuple.
Sets appropriate defaults if needed.
'''
command_re = args_dict.get('command') or '.'
logtype = args_dict.get('stderr') or 'stdout'
compare_sched = args_dict.get('scheduled', False)
# I think it's safe to take the start of 2018
# as the starting time if nothing given
start_time = args_dict.get('from_time') or '2018-01-01T00:00:00'
# We'll take end of today as the end time if nothing given
end_time = args_dict.get('to_time') or datetime.datetime.strftime(
datetime.datetime.now(),
'%Y-%m-%dT23:59:59')
try:
start_dt = datetime.datetime.strptime(start_time,
'%Y-%m-%dT%H:%M:%S'
)
end_dt = datetime.datetime.strptime(end_time,
'%Y-%m-%dT%H:%M:%S'
)
except ValueError:
print_msg(
'Datetime must be in format `YYYY-mm-ddTHH:MM:SS`. See `hat-parser --help`.',
file=sys.stderr
)
exit(1)
return (command_re, logtype, compare_sched, start_dt, end_dt)
def main():
'''Main function that calls others to get data and
then iterates over the files line by line.'''
command_re, logtype, compare_sched, start_dt, end_dt = (
search_params_formatter(parse_arguments()))
logfile_glob = os.path.join(USER_LOG_LOCATION, '{}.log*'.format(logtype))
logfiles = glob.iglob(logfile_glob)
# Log lines start with appropriately formatted datetime
dt_pattern = re.compile(r'^\d{4}-\d{2}-\d{2}\s+(?:\d{2}:){2}\d{2}')
for file_ in logfiles:
# Setting appropriate open function
# expecting .gz extension or as-is
open_ = gzip.open if file_.endswith('.gz') else open
with open_(file_, mode='rt') as f:
for line in f:
line = line.rstrip()
if not line:
continue
# If the line does not start with datetime, it is a
# multiline log so printing it and continuing the
# loop without the datetime comparison
if not dt_pattern.search(line):
print_msg(line, end='') # log lines already contain blank lines
continue
(run_time_dt, euid, job_id, scheduled_time_dt,
command, return_code, output) = split_format(line)
# Comparing dt field
compare_dt = scheduled_time_dt if compare_sched else run_time_dt
if start_dt <= compare_dt <= end_dt:
print_msg(line, end='')
# Break out of this file if we're already passed
# the end dt and we're comparing run time
else:
if not compare_sched and compare_dt > end_dt:
break
if __name__ == '__main__':
main()
......@@ -132,7 +132,7 @@ class Job(metaclass=JobMeta):
self.date_time_epoch = self.get_run_at_epoch()
# Saving the job, with the user's EUID as keys, and increasing
# IDs as subdict keys with command, time, use_shell as values
self.job_id = self._get_job_id()
self.job_id = self._get_job_id(self.euid)
if not self.date_time_epoch:
return
......@@ -145,12 +145,14 @@ class Job(metaclass=JobMeta):
}
})
def _get_job_id(self):
def _get_job_id(self, euid):
'''Get job ID, to be used as the Job dict key.'''
# We'll wrap around at 40000
max_id = 40000
current_keys = set(i for v in enqueued_jobs.values()
for i in v.keys())
# We'll wrap around at 10000 for each user
max_id = 10000
current_keys = sum([list(v.keys()) for u, v in enqueued_jobs.items()
if int(u) == euid],
[]
)
current_id = max(current_keys) if current_keys else 0
next_id = current_id + 1
if next_id <= max_id and next_id not in current_keys:
......
......@@ -35,9 +35,9 @@ class FLock:
self.lockf.close()
def print_msg(msg, flush_stream=True, file=sys.stdout):
def print_msg(msg, end='\n', flush_stream=True, file=sys.stdout):
'''Wrapper for formatting-printing.'''
print('\n{}\n'.format(msg), end='\n', flush=flush_stream, file=file)
print('\n{}\n'.format(msg), end=end, flush=flush_stream, file=file)
return
......
......@@ -105,4 +105,3 @@ case "$1" in
exit 2
;;
esac
......@@ -12,15 +12,16 @@ HAT_DB_DIR='/var/lib/hatd'
mkdir -p "${HAT_DIR}"
# Copy required stuffs to `$HAT_DIR`
cp -aRt "${HAT_DIR}" ./{hat{,d,-client},env_base.sh}
cp -aRt "${HAT_DIR}" ./{hat{,d,-{client,parser}},env_base.sh}
# Put Systemd file for the daemon in relevant place.
# If this gives an error regarding wrong/absent destination directory,
# please put the file manually in your distro-advised place
cp system/hat-daemon.service /etc/systemd/system/
# Create symlink for the client
# Create symlink for the client, and parser
ln -sf "${HAT_DIR}"/hat-client /usr/bin/hatc
ln -sf "${HAT_DIR}"/hat-parser /usr/bin/hat-parser
# Create log dir
mkdir -p /var/log/hatd/
......@@ -62,7 +63,7 @@ systemctl enable hat-daemon.service && \
# rm /etc/systemd/system/hat-daemon.service && \
# systemctl daemon-reload
# 2. Remove other files and directories:
# rm -r /var/lib/hatd/ /var/run/hatd/ /usr/lib/hatd/ /etc/logrotate.d/hat-daemon /usr/share/man/man1/hatc.1.gz
# rm -r /var/lib/hatd/ /var/run/hatd/ /usr/lib/hatd/ /etc/logrotate.d/hat-daemon /usr/share/man/man1/hatc.1.gz /usr/bin/hat{c,-parser}
#
# N.B: If you want to keep the enqueued jobs, don't remove `/var/lib/hatd/`, precisely `/var/lib/hatd/hatdb.pkl`.
#
......@@ -207,6 +207,9 @@ hatc -r 2 15 \fB# Removing 2 jobs having IDs 2, 15\fR
.br
.I /usr/lib/hatd
.SH NOTES
For viewing job logs, use \fBhat-parser\fR. See \fBhat-parser --help\fR.
.SH BUGS
There could be bugs. Please report bugs to https://github.com/heemayl/hat/
......