Mechanism to define worker(s) to operate a DataJoint pipeline.
The workers orchestrate running the populate routines with logging for better progress and status monitoring
from datajoint_utilities.dj_worker import DataJointWorker, WorkerLog
db_prefix = 'my_pipeline_'
worker1 = DataJointWorker('worker1', db_prefix + 'log',
db_prefix=db_prefix,
run_duration=3600*3,
sleep_duration=10)
worker2 = DataJointWorker('worker2', db_prefix + 'log',
db_prefix=db_prefix,
run_duration=-1,
sleep_duration=10)
@schema
@worker1
class AnalysisTable(dj.Computed):
definition = """
...
"""
def make(self, key):
# some analysis code
pass
@schema
@worker2
class AnalysisTable2(dj.Computed):
definition = """
...
"""
def make(self, key):
# some analysis code
pass
worker1.run()
worker2.run()
WorkerLog.print_recent_jobs()