Skip to content

Commit

Permalink
Merge pull request #146 from sot/py3-cmds-task
Browse files Browse the repository at this point in the history
Migrate update_cmds task_schedule to ska3 idiom
  • Loading branch information
taldcroft authored Dec 4, 2019
2 parents 85187c3 + 3dac9ba commit 046b4e2
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 8 deletions.
15 changes: 13 additions & 2 deletions kadi/update_cmds.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import argparse
import difflib
import pickle
from pathlib import Path

import numpy as np
import tables
Expand Down Expand Up @@ -48,8 +49,8 @@ def get_opt(args=None):
OCC_SOT_ACCOUNT = os.environ['USER'].lower() == 'sot'
parser = argparse.ArgumentParser(description='Update HDF5 cmds table')
parser.add_argument("--mp-dir",
default='/data/mpcrit1/mplogs',
help="MP load directory")
help=("MP load directory (default=/data/mpcrit1/mplogs) "
"or $SKA/data/mpcrit1/mplogs)"))
parser.add_argument("--start",
help="Start date for update (default=stop-42 days)")
parser.add_argument("--stop",
Expand Down Expand Up @@ -391,6 +392,16 @@ def main(args=None):
.format(pars_dict_path))
pars_dict = {}

if not opt.mp_dir:
for prefix in ('/', os.environ['SKA']):
pth = Path(prefix, 'data', 'mpcrit1', 'mplogs')
if pth.exists():
opt.mp_dir = str(pth)
break
else:
raise FileNotFoundError('no mission planning directories found (need --mp-dir)')
logger.info(f'Using mission planning files at {opt.mp_dir}')

# Recast as dict subclass that remembers if any element was updated
pars_dict = UpdatedDict(pars_dict)

Expand Down
16 changes: 14 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from distutils.core import setup
import os
import sys

# from this_package.version import package_version object
from kadi.version import package_version
Expand All @@ -23,13 +25,22 @@
except ImportError:
cmdclass = {}

entry_points = {'console_scripts': 'get_chandra_states = kadi.commands.states:get_chandra_states'}
if "--user" not in sys.argv:
share_path = os.path.join(sys.prefix, "share", "kadi")
data_files = [(share_path, ['task_schedule_cmds.cfg',
'task_schedule_events.cfg'])]
else:
data_files = None

entry_points = {'console_scripts': [
'get_chandra_states = kadi.commands.states:get_chandra_states',
'kadi_update_cmds = kadi.update_cmds:main']}

setup(name='kadi',
version=package_version.version,
description='Kadi events archive',
author='Tom Aldcroft',
author_email='aldcroft@head.cfa.harvard.edu',
author_email='taldcroft@cfa.harvard.edu',
url='http://cxc.harvard.edu/mta/ASPECT/tool_doc/kadi/',
packages=['kadi', 'kadi.events', 'kadi.cmds', 'kadi.tests',
'kadi.commands', 'kadi.commands.tests'],
Expand All @@ -39,6 +50,7 @@
'static/images/*', 'static/*.css',
'GIT_VERSION']},
tests_require=['pytest'],
data_files=data_files,
cmdclass=cmdclass,
entry_points=entry_points,
)
7 changes: 3 additions & 4 deletions task_schedule_cmds.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,8 @@ loud 0 # Run loudly or quietly (production mode)
# Data files and directories. The *_dir vars can have $ENV{} vars which
# get interpolated. (Note lack of task name after TST_DATA because this is just for test).

data_dir $ENV{SKA_DATA}/kadi # Data file directory
log_dir $ENV{SKA_DATA}/kadi/logs # Log file directory
bin_dir $ENV{SKA_SHARE}/kadi # Bin dir (optional, see task def'n)
data_dir $ENV{SKA}/data/kadi # Data file directory
log_dir $ENV{SKA}/data/kadi/logs # Log file directory
master_log kadi_cmds.log # Composite master log (created in log_dir)
heartbeat task_sched_heartbeat_cmds

Expand All @@ -40,5 +39,5 @@ alert aca@head.cfa.harvard.edu

<task kadi_cmds>
cron * * * * *
exec update_cmds --data-root=$ENV{SKA_DATA}/kadi
exec kadi_update_cmds --data-root=$ENV{SKA}/data/kadi
</task>

0 comments on commit 046b4e2

Please sign in to comment.