...
 
Commits (3)
......@@ -4,6 +4,7 @@ Benefits of `hat`:
- Seconds resolution i.e. you can run job at the mentioned second
- Use your shell of choice; you're not bound to `/bin/sh`
- Job modification is supported; you can easily modify command, time of an enqueued job
- Flexible datetime specifications, see https://github.com/heemayl/humantime-epoch-converter
- Will run a scheduled job later when the computer was off at that time, so no job will be missed
- User specific jobs, secured approach
......@@ -66,6 +67,37 @@ ID Time Shell Command
% hatc -c
1
% hatc --remove 2
{'msg': 'Queued'}
% hatc -l
Job queue is empty
% hatc -c
0
% hatc -a free 'now +30 mins'
{'msg': 'Done'}
% hatc -l
ID Time Shell Command
1 2018-02-09T03:41:57 - free
% hatc --modify 1 'free -m' _
{'msg': 'Done'}
% hatc -l
ID Time Shell Command
1 2018-02-09T03:41:57 - free -m
% hatc -m 1 _ 'today 14:30:42'
{'msg': 'Done'}
% hatc -l
ID Time Shell Command
1 2018-02-09T14:30:42 - free -m
```
---
......@@ -3,7 +3,6 @@
import argparse
import json
import os
import re
import shlex
import subprocess
import sys
......@@ -66,6 +65,17 @@ def parse_arguments():
Job's STDOUT, STDERR will be logged in `~/.hatd/logs/{stdout,stderr}.log`, respectively.
"""
)
parser.add_argument('-m', '--modify', dest='modify_job',
metavar='<job_id> <command> <datetime_spec> [<shell>]', nargs='+',
required=False, help="""Modify an enqueued job. The first argument must be the job ID (from `hatc -l`). `_` can be used as a placeholder for using an already saved value for an argument (except <job_id>). If <shell> is used, <command> must be specified explicitly. Example:
hatc --modify 2 'free -g' 'now + 30 min' # Everything is updated for Job with ID 2
hatc -m 31 _ 'tomorrow at 14:30' # The command is kept as original, only time is updated
hatc -m 4 'func() { echo Test ;}; func()' _ # Only command is updated
hatc -m 23 'echo $PATH' 'today 18:06:34' dash # Everything is updated
hatc --modify 78 _ 'tomorrow 10 - 6 hr 12 min 3 sec' # Only time is updated
"""
)
parser.add_argument('-r', '--remove', dest='remove_job',
metavar='<JOB_ID>', nargs='+',
required=False, help="""Remove queued job(s) by Job ID. Example:
......@@ -90,6 +100,8 @@ def argument_serializer(args_dict):
return ('jobcount',)
elif args_dict.get('add_job'):
return ('add_job', *args_dict.get('add_job'))
elif args_dict.get('modify_job'):
return ('modify_job', *args_dict.get('modify_job'))
elif args_dict.get('remove_job'):
return ('remove_job', *args_dict.get('remove_job'))
return
......@@ -118,10 +130,8 @@ def json_to_table_print(json_str):
'''Takes a str (e.g. from `json_dumps`)
and converts to a table for printing.
'''
# re.sub is to replace the nested double quotes, for `json.loads`
data = json.loads(re.sub(
r'("command":\s+")[^"]+"([^"]+)"(",)', r'\1\2\3',
json_str.replace('\\', '')))
# Needs raw str to keep \" as-is
data = json.loads(r'{}'.format(json_str))
if data:
# Header
to_print = '\t\t'.join(('ID', 'Time', 'Shell', 'Command'))
......@@ -151,6 +161,7 @@ class SendReceiveData:
self.content = content
self.key_format_map = {
'add_job': self.add_job_fmt,
'modify_job': self.modify_job_fmt,
'remove_job': self.remove_job_fmt,
'joblist': self.joblist_fmt,
'jobcount': self.jobcount_fmt,
......@@ -169,7 +180,7 @@ class SendReceiveData:
self.send_to_daemon()
def add_job_fmt(self, data):
if len(data) not in {2, 3}:
if not (2 <= len(data) <= 3):
raise HatClientException('Ambiguous input')
command = '{} -c "{}"'.format(data[2], data[0]) if len(data) == 3 \
else data[0]
......@@ -185,6 +196,28 @@ class SendReceiveData:
}
}
def modify_job_fmt(self, data):
if not (3 <= len(data) <= 4):
raise HatClientException('Ambiguous input')
try:
job_id = int(data[0])
except ValueError:
raise HatClientException('Ambiguous input')
command = '{} -c "{}"'.format(data[3], data[1]) if len(data) == 4 \
else data[1]
time_ = data[2] if data[2] == '_' else time.strftime(
'%Y-%m-%d_%H:%M:%S',
time.localtime(get_epoch_main(data[2])))
self.out_dict = {
'add_job': {
'euid': os.geteuid(),
'command': command,
'time_': time_,
'use_shell': data[3] if len(data) == 4 else False,
'job_id': job_id
}
}
def remove_job_fmt(self, data):
try:
data = [int(i) for i in data]
......
......@@ -69,14 +69,15 @@ class HatDaemon(metaclass=HatDaemonMeta):
def pid(self):
return self.daemon.pid
def add_job(self, euid, command, time_, use_shell=False):
def add_job(self, euid, command, time_, use_shell=False, job_id=None):
'''Adds a new job.'''
# Sending `job` dict to fifo with required params
job = {
'euid': euid,
'command': command,
'time_': time_,
'use_shell': use_shell
'use_shell': use_shell,
'job_id': job_id
}
write_file(
self.runner_in,
......
......@@ -76,13 +76,14 @@ class BaseRunner(metaclass=BaseRunnerMeta):
except json.JSONDecodeError as e:
write_file(self.daemon_log, str(e), mode='at')
else:
if len(content) in {3, 4}:
if 3 <= len(content) <= 5:
try:
Job(
int(content['euid']),
content['command'],
content['time_'],
content.get('use_shell', False)
content.get('use_shell', False),
content.get('job_id')
)
except (KeyError, HatTimerException) as e:
write_file(
......
......@@ -98,28 +98,42 @@ class JobMeta(ABCMeta):
class Job(metaclass=JobMeta):
'''A job to be done at specified time.'''
def __init__(self, euid, command, time_, use_shell=False):
self.euid = euid
self.command = command
self.time_str = time_
self.use_shell = use_shell
def __init__(self, euid, command, time_, use_shell=False, job_id=None):
self.euid = int(euid)
# Checking Permission
# _check_perm(self.euid)
# Getting when to run in Epoch
self.date_time_epoch = self.get_run_at_epoch()
# job_id is sent by runner when updating a Job params
if job_id:
job = enqueued_jobs[self.euid][job_id]
self.job_id = job_id
self.command = job['command'] if command == '_' \
else command
if time_ == '_':
self.date_time_epoch = job['job_run_at']
else:
self.time_str = time_
self.date_time_epoch = self.get_run_at_epoch()
self.use_shell = job['use_shell'] if use_shell == '_' \
else use_shell
else:
self.command = command
self.use_shell = use_shell
self.time_str = time_
self.date_time_epoch = self.get_run_at_epoch()
# Saving the job, with the user's EUID as keys, and increasing
# IDs as subdict keys with command, time, use_shell as values
self.job_id = self._get_job_id()
if not self.date_time_epoch:
return
# Saving the job, with the user's EUID as keys, and increasing
# IDs as subdict keys with command, time, use_shell as values
self.job_id = self._get_job_id()
enqueued_jobs[int(euid)].update({
enqueued_jobs[self.euid].update({
self.job_id: {
'command': self.command,
'job_run_at': int(self.date_time_epoch), # to int
'use_shell': self.use_shell,
}
})
def _get_job_id(self):
'''Get job ID, to be used as the Job dict key.'''
# We'll wrap around at 40000
......
......@@ -2,6 +2,7 @@
import datetime
import fcntl
import logging
import json
import os
import time
......@@ -16,6 +17,10 @@ class FLock:
def __enter__(self):
self.lockf = open(self.lockfile, 'w')
try:
os.chmod(self.lockfile, 0o660)
except PermissionError:
pass
while True:
try:
fcntl.lockf(self.lockf, fcntl.LOCK_EX | fcntl.LOCK_NB)
......@@ -75,6 +80,19 @@ def username_from_euid(euid):
if str(euid) in line:
return line[0]
def get_logger(logfile='/var/log/hatd/debug.log'):
# Enable (debug) logging to /var/log/hatd/debug.log
logger = logging.getLogger('hatd_base_logger')
handler = logging.FileHandler(logfile)
handler.setFormatter(logging.Formatter('%(asctime)s:: %(message)s'))
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
return logger
if __name__ == '__main__':
pass
......@@ -8,6 +8,7 @@
export PATH='/bin:/usr/bin:/sbin:/usr/sbin'
HAT_DIR='/usr/lib/hatd'
HAT_DB_DIR='/var/lib/hatd'
mkdir -p "${HAT_DIR}"
# Copy everything to `$HAT_DIR`
......@@ -25,6 +26,9 @@ ln -sf "${HAT_DIR}"/hat-client /usr/bin/hatc
# Create log dir
mkdir -p /var/log/hatd/
# Create DB file
[[ -f ${HAT_DB_DIR}/hatdb.pkl ]] || { mkdir -p "${HAT_DB_DIR}" && : >"${HAT_DB_DIR}"/hatdb.pkl ;}
# Copying the logrotate file
cp "${HAT_DIR}"/system/hat-daemon /etc/logrotate.d/
......