Skip to content

Commit

Permalink
Merge pull request #19 from jdkent/add_logging
Browse files Browse the repository at this point in the history
[ENH] add logging system to write results to files
  • Loading branch information
zkhan12 authored Apr 30, 2020
2 parents 0cb3653 + 4bce4fb commit 705e140
Show file tree
Hide file tree
Showing 7 changed files with 155 additions and 46 deletions.
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Your version: 0.6.0 Latest version: 0.6.0
# Your version: 0.6.0 Latest version: 0.7.0
# Generated by Neurodocker version 0.6.0
# Timestamp: 2020-04-07 02:17:31 UTC
# Timestamp: 2020-04-30 13:55:19 UTC
#
# Thank you for using Neurodocker. If you discover any issues
# or ways to improve this software, please submit an issue or
Expand Down Expand Up @@ -65,7 +65,7 @@ RUN echo '{ \
\n "instructions": [ \
\n [ \
\n "base", \
\n "hbclab/accel-bids" \
\n "hbclab/accel-bids:unstable" \
\n ], \
\n [ \
\n "user", \
Expand Down
4 changes: 4 additions & 0 deletions Dockerfile_exec
Original file line number Diff line number Diff line change
Expand Up @@ -42,5 +42,9 @@ RUN conda env create -f environment.yml

RUN bash -c 'conda init && . /home/coder/.bashrc && . activate accel && pip install -e /home/coder/projects'

USER root
RUN chown -R coder:coder /home/coder/projects

USER coder
# Run as executable
ENTRYPOINT ["/neurodocker/startup.sh", "accel_transform"]
63 changes: 57 additions & 6 deletions accel_code/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import argparse
from argparse import RawTextHelpFormatter
import os
import logging


# This function builds a parser that allows this tool to be used
Expand All @@ -30,6 +31,8 @@ def get_parser():
parser.add_argument('--replace', default='no',
choices=['yes', 'no'],
help='replace current output file')
parser.add_argument('--logging-directory',
help='directory to place logging files')

return parser

Expand All @@ -39,18 +42,66 @@ def main():
opts = get_parser().parse_args()
root = opts.project_root_directory

logging_directory = opts.logging_directory if opts.logging_directory else os.getcwd()

# assume we can get lab_id and date for the log file
lab_id = utils.get_lab_id(opts.old_file_path)
date = utils.get_date(opts.old_file_path)

ses_id, project = excel_lookup.excel_lookup(lab_id, date, opts.excel_file_path)

sub_id = redcap_query.redcap_query(lab_id, project, opts.api_key)

new_file_name = bids_transform.bids_transform(project, sub_id, ses_id)
# set up the logging configuration
logging_fname = '_'.join([str(lab_id), str(date.date())]) + '.log'
logging_path = os.path.join(logging_directory, logging_fname)

logger = logging.getLogger(__name__)
logger.level = 10
# create file handler which logs even debug messages
fh = logging.FileHandler(logging_path)
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)

# get session and project information
try:
ses_id, project = excel_lookup.excel_lookup(lab_id, date, opts.excel_file_path)
except Exception as e:
logger.exception("could not lookup {} ({}) in {}".format(
lab_id, str(date), opts.excel_file_path))
raise(e)

# find subject id from redcap
try:
sub_id = redcap_query.redcap_query(lab_id, project, opts.api_key)
except Exception as e:
msg = "could not get subject id from redcap for {} ({})"
logger.exception(msg.format(lab_id, str(date)))
raise(e)
# create a new path/filename using this information
try:
new_file_name = bids_transform.bids_transform(project, sub_id, ses_id)
except Exception as e:
msg = "could not create new filename: {} ({})"
logger.exception(msg.format(lab_id, str(date)))
raise(e)

new_file_path = os.path.join(root, new_file_name)

utils.make_directory(opts.old_file_path, new_file_path, opts.replace)
# copy the file to the new project specific location
try:
utils.make_directory(opts.old_file_path, new_file_path, opts.replace)
except Exception as e:
msg = "could not copy file: {} ({})"
logger.exception(msg.format(lab_id, str(date)))
raise(e)

logger.info("{of} -> {nf}".format(of=opts.old_file_path, nf=new_file_path))

return

Expand Down
35 changes: 35 additions & 0 deletions accel_code/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import os
import pandas as pd


class MockProject:

def __init__(self, url, key):
pass

@staticmethod
def export_records(fields, records=None, format='df'):
test_path = os.path.dirname(os.path.realpath(__file__))
if "extend_id" in fields:
mock_file = "mock_redcap_extend.tsv"
elif "better_id" in fields:
mock_file = "mock_redcap_better.tsv"
elif "bike_id" in fields:
mock_file = "mock_redcap_bikeatrain.tsv"
elif "ambi_id" in fields:
mock_file = "mock_redcap_ambi.tsv"
elif "pacr_id" in fields:
mock_file = "mock_redcap_pacr.tsv"
elif "alertid" in fields:
mock_file = "mock_redcap_alert.tsv"
elif "normative_id" in fields:
mock_file = "mock_redcap_normative.tsv"
else:
raise ValueError("project not found!")

df = pd.read_csv(os.path.join(test_path,
"data",
"mock_redcap",
mock_file),
sep="\t", index_col="lab_id")
return df
36 changes: 1 addition & 35 deletions accel_code/tests/test_redcap_query.py
Original file line number Diff line number Diff line change
@@ -1,42 +1,8 @@
import os
import pandas as pd
import redcap
import pytest

from ..redcap_query import redcap_query


class MockProject:

def __init__(self, url, key):
pass

@staticmethod
def export_records(fields, records=None, format='df'):
test_path = os.path.dirname(os.path.realpath(__file__))
if "extend_id" in fields:
mock_file = "mock_redcap_extend.tsv"
elif "better_id" in fields:
mock_file = "mock_redcap_better.tsv"
elif "bike_id" in fields:
mock_file = "mock_redcap_bikeatrain.tsv"
elif "ambi_id" in fields:
mock_file = "mock_redcap_ambi.tsv"
elif "pacr_id" in fields:
mock_file = "mock_redcap_pacr.tsv"
elif "alertid" in fields:
mock_file = "mock_redcap_alert.tsv"
elif "normative_id" in fields:
mock_file = "mock_redcap_normative.tsv"
else:
raise ValueError("project not found!")

df = pd.read_csv(os.path.join(test_path,
"data",
"mock_redcap",
mock_file),
sep="\t", index_col="lab_id")
return df
from .conftest import MockProject


@pytest.mark.parametrize("lab_id,project,expected_participant_id",
Expand Down
53 changes: 53 additions & 0 deletions accel_code/tests/test_run.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import os
import sys

import redcap

from .conftest import MockProject
from ..run import main


def test_main(monkeypatch):

# patch the redcap project
monkeypatch.setattr(redcap, "Project", MockProject)

project_root_directory = os.path.join(
os.path.dirname(__file__),
'data',
'vosslabhpc',
)

old_file_path = os.path.join(
os.path.dirname(__file__),
'data',
'827 (2018-11-29)RAW.csv',
)

api_key = 'fake_api_key'

excel_file_path = os.path.join(
os.path.dirname(__file__),
'data',
'ActiGraph_analysis_summary.xlsx',
)

logging_directory = os.path.join(
os.path.dirname(__file__),
'data',
)

args = [
"accel_transform",
project_root_directory,
old_file_path,
api_key,
excel_file_path,
"--logging-directory", logging_directory,
"--replace", "yes",
]

# pass arguments to be parsed by main
monkeypatch.setattr(sys, 'argv', args)

assert main() is None
4 changes: 2 additions & 2 deletions scripts/make_dockerfile.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ set -e
# Generate Dockerfile.
generate_docker() {
docker run --rm jdkent/neurodocker:dev generate docker \
--base=hbclab/accel-bids \
--base=hbclab/accel-bids:unstable \
--pkg-manager=apt \
--user=coder \
--workdir="/home/coder" \
Expand All @@ -20,4 +20,4 @@ generate_docker() {

generate_docker > Dockerfile

docker build -t hbclab/accel-dev .
docker build -t hbclab/accel-dev .

0 comments on commit 705e140

Please sign in to comment.