diff --git a/VERSION b/VERSION index 777875491..f800ee80e 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -24.1.5 +24.1.6 diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py index 838ad93ff..d4f7faec8 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py @@ -77,7 +77,7 @@ def __init__(self, loris_getopt_obj, script_name): # --------------------------------------------------------------------------------------------- # Create tmp dir and log file (their basename being the name of the script run) # --------------------------------------------------------------------------------------------- - self.tmp_dir = lib.utilities.create_processing_tmp_dir(script_name) + self.tmp_dir = self.loris_getopt_obj.tmp_dir self.log_obj = Log( self.db, self.data_dir, script_name, os.path.basename(self.tmp_dir), self.options_dict, self.verbose ) diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py index 59c19aa52..ec495a2da 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py @@ -119,6 +119,8 @@ def _run_dicom_archive_validation_pipeline(self): message = f"run_dicom_archive_validation.py successfully executed for UploadID {self.upload_id} " \ f"and ArchiveLocation {self.tarchive_path}" self.log_info(message, is_error="N", is_verbose="Y") + # reset mri_upload to Inserting as run_dicom_archive_validation.py will set Inserting=0 after execution + self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) else: message = f"run_dicom_archive_validation.py failed validation for UploadID {self.upload_id}" \ f"and ArchiveLocation {self.tarchive_path}. Exit code was {validation_process.returncode}." @@ -301,6 +303,8 @@ def _run_nifti_insertion(self, nifti_file_path, json_file_path, bval_file_path=N message = f"run_nifti_insertion.py successfully executed for file {nifti_file_path}" self.log_info(message, is_error="N", is_verbose="Y") self.inserted_file_count += 1 + # reset mri_upload to Inserting as run_nifti_insertion.py will set Inserting=0 after execution + self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) else: message = f"run_nifti_insertion.py failed for file {nifti_file_path}.\n{stdout}" print(stdout) @@ -346,6 +350,8 @@ def _add_intended_for_to_fieldmap_json_files(self): """ fmap_files_dict = self.imaging_obj.determine_intended_for_field_for_fmap_json_files(self.tarchive_id) + if not fmap_files_dict: + return for key in fmap_files_dict.keys(): sorted_fmap_files_list = fmap_files_dict[key] diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py index de5520333..1cb0c54b1 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py @@ -5,6 +5,7 @@ import lib.exitcode import os import re +import subprocess import sys from lib.dcm2bids_imaging_pipeline_lib.base_pipeline import BasePipeline @@ -36,6 +37,8 @@ def __init__(self, loris_getopt_obj, script_name): """ super().__init__(loris_getopt_obj, script_name) self.nifti_path = self.options_dict["nifti_path"]["value"] + self.nifti_s3_url = self.options_dict["nifti_path"]["s3_url"] \ + if 's3_url' in self.options_dict["nifti_path"].keys() else None self.nifti_blake2 = blake2b(self.nifti_path.encode('utf-8')).hexdigest() self.nifti_md5 = hashlib.md5(self.nifti_path.encode()).hexdigest() self.json_path = self.options_dict["json_path"]["value"] @@ -48,6 +51,16 @@ def __init__(self, loris_getopt_obj, script_name): self.loris_scan_type = self.options_dict["loris_scan_type"]["value"] self.bypass_extra_checks = self.options_dict["bypass_extra_checks"]["value"] + # --------------------------------------------------------------------------------------------- + # Set 'Inserting' flag to 1 in mri_upload + # --------------------------------------------------------------------------------------------- + self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) + + # --------------------------------------------------------------------------------------------- + # Get S3 object from loris_getopt object + # --------------------------------------------------------------------------------------------- + self.s3_obj = self.loris_getopt_obj.s3_obj + # --------------------------------------------------------------------------------------------- # Check the mri_upload table to see if the DICOM archive has been validated # --------------------------------------------------------------------------------------------- @@ -83,6 +96,8 @@ def __init__(self, loris_getopt_obj, script_name): self.nifti_path, self.subject_id_dict["CandMismatchError"] ) + if self.nifti_s3_url: # push candidate errors to S3 if provided file was on S3 + self._run_push_to_s3_pipeline() self.log_error_and_exit( self.subject_id_dict['CandMismatchError'], lib.exitcode.CANDIDATE_MISMATCH, is_error="Y", is_verbose="N" ) @@ -107,6 +122,8 @@ def __init__(self, loris_getopt_obj, script_name): if not self.scan_type_id: self._move_to_trashbin() self._register_protocol_violated_scan() + if self.nifti_s3_url: # push violations to S3 if provided file was on S3 + self._run_push_to_s3_pipeline() message = f"{self.nifti_path}'s acquisition protocol is 'unknown'." self.log_error_and_exit(message, lib.exitcode.UNKNOWN_PROTOCOL, is_error="Y", is_verbose="N") else: @@ -116,6 +133,8 @@ def __init__(self, loris_getopt_obj, script_name): if not self.scan_type_id: self._move_to_trashbin() self._register_protocol_violated_scan() + if self.nifti_s3_url: # push violations to S3 if provided file was on S3 + self._run_push_to_s3_pipeline() message = f"{self.nifti_path}'s scan type {self.scan_type_name} provided to run_nifti_insertion.py" \ f" is not a valid scan type in the database." self.log_error_and_exit(message, lib.exitcode.UNKNOWN_PROTOCOL, is_error="Y", is_verbose="N") @@ -127,6 +146,8 @@ def __init__(self, loris_getopt_obj, script_name): if not self.bids_categories_dict: self._move_to_trashbin() self._register_protocol_violated_scan() + if self.nifti_s3_url: # push violations to S3 if provided file was on S3 + self._run_push_to_s3_pipeline() message = f"Scan type {self.scan_type_name} does not have BIDS tables set up." self.log_error_and_exit(message, lib.exitcode.UNKNOWN_PROTOCOL, is_error="Y", is_verbose="N") @@ -151,6 +172,8 @@ def __init__(self, loris_getopt_obj, script_name): self._move_to_trashbin() self._register_violations_log(self.exclude_violations_list, self.trashbin_nifti_rel_path) self._register_violations_log(self.warning_violations_list, self.trashbin_nifti_rel_path) + if self.nifti_s3_url: # push violations to S3 if provided file was on S3 + self._run_push_to_s3_pipeline() message = f"{self.nifti_path} violates exclusionary checks listed in mri_protocol_checks. " \ f" List of violations are: {self.exclude_violations_list}" self.log_error_and_exit(message, lib.exitcode.UNKNOWN_PROTOCOL, is_error="Y", is_verbose="N") @@ -167,9 +190,16 @@ def __init__(self, loris_getopt_obj, script_name): # --------------------------------------------------------------------------------------------- self.remove_tmp_dir() + # --------------------------------------------------------------------------------------------- + # Push inserted images to S3 if they were downloaded from S3 + # --------------------------------------------------------------------------------------------- + if self.nifti_s3_url: + self._run_push_to_s3_pipeline() + # --------------------------------------------------------------------------------------------- # If we get there, the insertion was complete and successful # --------------------------------------------------------------------------------------------- + self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('0',)) sys.exit(lib.exitcode.SUCCESS) def _load_json_sidecar_file(self): @@ -658,3 +688,28 @@ def _create_pic_image(self): pic_rel_path = self.imaging_obj.create_imaging_pic(file_info) self.imaging_obj.insert_parameter_file(self.file_id, 'check_pic_filename', pic_rel_path) + + def _run_push_to_s3_pipeline(self): + """ + Run push to S3 script to upload data to S3. This function is called only when the file path to insert provided + to the script is an S3 URL. + """ + + push_to_s3_cmd = [ + "run_push_imaging_files_to_s3_pipeline.py", + "-p", self.options_dict["profile"]["value"], + "-u", str(self.upload_id), + ] + if self.verbose: + push_to_s3_cmd.append("-v") + + s3_process = subprocess.Popen(push_to_s3_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = s3_process.communicate() + + if s3_process.returncode == 0: + message = f"run_push_imaging_files_to_s3_pipeline.py successfully executed for Upload ID {self.upload_id}" + self.log_info(message, is_error="N", is_verbose="Y") + else: + message = f"run_push_imaging_files_to_s3_pipeline.py failed for Upload ID {self.upload_id}.\n{stdout}" + print(stdout) + self.log_info(message, is_error="Y", is_verbose="Y") diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py index b9c69c066..b9f3dcc2d 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py @@ -4,7 +4,6 @@ import lib.exitcode import lib.utilities -from lib.aws_s3 import AwsS3 from lib.dcm2bids_imaging_pipeline_lib.base_pipeline import BasePipeline __license__ = "GPLv3" @@ -33,16 +32,14 @@ def __init__(self, loris_getopt_obj, script_name): self.tarchive_id = self.dicom_archive_obj.tarchive_info_dict["TarchiveID"] # --------------------------------------------------------------------------------------------- - # Get Bucket information from Config and connect to bucket + # Set 'Inserting' flag to 1 in mri_upload # --------------------------------------------------------------------------------------------- - s3_endpoint = self.config_db_obj.get_config("AWS_S3_Endpoint") - s3_bucket_name = self.config_db_obj.get_config("AWS_S3_Default_Bucket") - self.s3_obj = AwsS3( - aws_access_key_id=self.config_file.s3["aws_access_key_id"], - aws_secret_access_key=self.config_file.s3["aws_secret_access_key"], - aws_endpoint_url=s3_endpoint if s3_endpoint else self.config_file.s3["aws_s3_endpoint_url"], - bucket_name=s3_bucket_name if s3_bucket_name else self.config_file.s3["aws_s3_bucket_name"] - ) + self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) + + # --------------------------------------------------------------------------------------------- + # Get S3 object from loris_getopt object + # --------------------------------------------------------------------------------------------- + self.s3_obj = self.loris_getopt_obj.s3_obj # --------------------------------------------------------------------------------------------- # Get all the files from files, parameter_file and violation tables @@ -69,6 +66,7 @@ def __init__(self, loris_getopt_obj, script_name): os.remove(full_path) self._clean_up_empty_folders() + self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('0',)) sys.exit(lib.exitcode.SUCCESS) def _get_files_to_push_list(self): @@ -144,14 +142,11 @@ def _get_list_of_files_from_mri_protocol_violated_scans(self): """ entries = self.imaging_obj.mri_prot_viol_scan_db_obj.get_protocol_violations_for_tarchive_id(self.tarchive_id) - print(entries) for entry in entries: - print(entry['minc_location']) - print(os.path.exists(entry['minc_location'])) if entry['minc_location'].startswith('s3://'): # skip since file already pushed to S3 continue - if not os.path.exists(self.data_dir + entry['minc_location']): + if not os.path.exists(os.path.join(self.data_dir, entry['minc_location'])): # violation has been rerun or moved continue self.files_to_push_list.append({ @@ -182,7 +177,7 @@ def _get_list_of_files_from_mri_violations_log(self): if entry['MincFile'].startswith('s3://'): # skip since file already pushed to S3 continue - if not os.path.exists(self.data_dir + entry['MincFile']): + if not os.path.exists(os.path.join(self.data_dir, entry['MincFile'])): # violation has been rerun or moved continue self.files_to_push_list.append({ @@ -266,7 +261,6 @@ def _clean_up_empty_folders(self): print("Cleaning up empty folders") cand_id = self.subject_id_dict["CandID"] bids_cand_id = f"sub-{cand_id}" - print(os.path.join(self.data_dir, "assembly_bids", bids_cand_id)) lib.utilities.remove_empty_folders(os.path.join(self.data_dir, "assembly_bids", bids_cand_id)) lib.utilities.remove_empty_folders(os.path.join(self.data_dir, "pic", cand_id)) lib.utilities.remove_empty_folders(os.path.join(self.data_dir, "trashbin")) diff --git a/python/lib/imaging.py b/python/lib/imaging.py index 0fe1c0e73..b48bd6279 100644 --- a/python/lib/imaging.py +++ b/python/lib/imaging.py @@ -664,7 +664,9 @@ def look_for_matching_protocols(self, protocols_list, scan_param): matching_protocols_list = [] for protocol in protocols_list: if protocol['series_description_regex']: - if re.search(rf"{protocol['series_description_regex']}", scan_param['SeriesDescription']): + if re.search( + rf"{protocol['series_description_regex']}", scan_param['SeriesDescription'], re.IGNORECASE + ): matching_protocols_list.append(protocol['Scan_type']) elif self.is_scan_protocol_matching_db_protocol(protocol, scan_param): matching_protocols_list.append(protocol['Scan_type']) @@ -793,7 +795,7 @@ def get_violations(self, checks_list, header, severity, scan_param_dict): True for v in valid_ranges if self.in_range(scan_param, v[0], v[1])] )) if valid_ranges else True passes_regex_check = bool(len([ - True for r in valid_regexs if re.match(r, scan_param) + True for r in valid_regexs if re.match(r, scan_param, re.IGNORECASE) ])) if valid_regexs else True if passes_regex_check and passes_range_check: @@ -866,9 +868,16 @@ def determine_intended_for_field_for_fmap_json_files(self, tarchive_id): # get the list of files sorted by acquisition time sorted_new_files_list = self.get_list_of_files_sorted_by_acq_time(files_list) + if not sorted_new_files_list or not sorted_fmap_files_dict: + # if got empty lists, then there are no files to determine IntendedFor either because acq_time + # was not set or because there are no fieldmap data + return None + for key in sorted_fmap_files_dict.keys(): sorted_fmap_files_list = sorted_fmap_files_dict[key] for idx, fmap_dict in enumerate(sorted_fmap_files_list): + if not fmap_dict['acq_time']: + continue fmap_acq_time = fmap_dict['acq_time'] next_fmap_acq_time = sorted_fmap_files_list[idx + 1]['acq_time'] \ if idx + 1 < len(sorted_fmap_files_list) else None @@ -926,13 +935,15 @@ def get_list_of_fmap_files_sorted_by_acq_time(self, files_list): fmap_files_dir_pa = [] fmap_files_no_dir = [] for file_dict in files_list: + bids_info = self.mri_prot_db_obj.get_bids_info_for_scan_type_id( file_dict['AcquisitionProtocolID'] ) - acq_time = self.param_file_db_obj.get_parameter_file_for_file_id_param_type_id( + param_file_result = self.param_file_db_obj.get_parameter_file_for_file_id_param_type_id( file_dict['FileID'], self.param_type_db_obj.get_parameter_type_id('acquisition_time') - )['Value'] + ) + acq_time = param_file_result['Value'] if param_file_result else None if bids_info['BIDSCategoryName'] == 'fmap' and bids_info['BIDSScanType'] in bids_fmap_suffix_list: json_file_path = self.param_file_db_obj.get_parameter_file_for_file_id_param_type_id( file_dict['FileID'], @@ -953,11 +964,14 @@ def get_list_of_fmap_files_sorted_by_acq_time(self, files_list): else: fmap_files_no_dir.append(file_dict) - fmap_files_dict = { - 'dir-AP': sorted(fmap_files_dir_ap, key=lambda x: x['acq_time']), - 'dir-PA': sorted(fmap_files_dir_pa, key=lambda x: x['acq_time']), - 'no-dir': sorted(fmap_files_no_dir, key=lambda x: x['acq_time']), - } + try: + fmap_files_dict = { + 'dir-AP': sorted(fmap_files_dir_ap, key=lambda x: x['acq_time']), + 'dir-PA': sorted(fmap_files_dir_pa, key=lambda x: x['acq_time']), + 'no-dir': sorted(fmap_files_no_dir, key=lambda x: x['acq_time']), + } + except TypeError: + return None return fmap_files_dict @@ -986,10 +1000,11 @@ def get_list_of_files_sorted_by_acq_time(self, files_list): bids_info = self.mri_prot_db_obj.get_bids_info_for_scan_type_id( file_dict['AcquisitionProtocolID'] ) - acq_time = self.param_file_db_obj.get_parameter_file_for_file_id_param_type_id( + param_file_result = self.param_file_db_obj.get_parameter_file_for_file_id_param_type_id( file_dict['FileID'], self.param_type_db_obj.get_parameter_type_id('acquisition_time') - )['Value'] + ) + acq_time = param_file_result['Value'] if param_file_result else None require_fmap = False if (bids_info['BIDSCategoryName'] == 'dwi' and bids_info['BIDSScanType'] in bids_dwi_suffix_list) \ or (bids_info['BIDSCategoryName'] == 'func' and bids_info['BIDSScanType'] in bids_func_suffix_list)\ @@ -1012,7 +1027,12 @@ def get_list_of_files_sorted_by_acq_time(self, files_list): 'need_fmap': require_fmap }) - return sorted(new_files_list, key=lambda x: x['acq_time']) + try: + sorted_files_list = sorted(new_files_list, key=lambda x: x['acq_time']) + except TypeError: + return None + + return sorted_files_list def modify_fmap_json_file_to_write_intended_for(self, sorted_fmap_files_list): """ @@ -1023,6 +1043,8 @@ def modify_fmap_json_file_to_write_intended_for(self, sorted_fmap_files_list): """ for fmap_dict in sorted_fmap_files_list: + if 'IntendedFor' not in fmap_dict: + continue json_file_path = os.path.join(self.config_db_obj.get_config('dataDirBasepath'), fmap_dict['json_file_path']) with open(json_file_path) as json_file: json_data = json.load(json_file) diff --git a/python/lib/lorisgetopt.py b/python/lib/lorisgetopt.py index 9378e74dc..362eb5ad2 100644 --- a/python/lib/lorisgetopt.py +++ b/python/lib/lorisgetopt.py @@ -2,9 +2,15 @@ import getopt import lib.exitcode +import lib.utilities import os import sys +from lib.aws_s3 import AwsS3 +from lib.database import Database +from lib.database_lib.config import Config + + __license__ = "GPLv3" @@ -56,12 +62,9 @@ class LorisGetOpt: # get the options provided by the user loris_getopt_obj = LorisGetOpt(usage, options_dict) - # validate that the options provided are correct - loris_getopt_obj.perform_default_checks_and_load_config() - """ - def __init__(self, usage, options_dict): + def __init__(self, usage, options_dict, script_name): """ Initialize the class, run GetOpt and populate the options_dict with the values that were provided to the script. @@ -80,6 +83,36 @@ def __init__(self, usage, options_dict): sys.exit(lib.exitcode.GETOPT_FAILURE) self.populate_options_dict_values(opts) + self.check_required_options_are_set() + self.load_config_file() + self.tmp_dir = lib.utilities.create_processing_tmp_dir(script_name) + + # --------------------------------------------------------------------------------------------- + # Establish database connection + # --------------------------------------------------------------------------------------------- + self.config_file = self.config_info + self.verbose = self.options_dict["verbose"]["value"] + self.db = Database(self.config_file.mysql, self.verbose) + self.db.connect() + + # --------------------------------------------------------------------------------------------- + # Load the Config, MRI Upload, Parameter Type and Parameter File database classes + # --------------------------------------------------------------------------------------------- + self.config_db_obj = Config(self.db, self.verbose) + + # --------------------------------------------------------------------------------------------- + # Get Bucket information from Config and connect to bucket + # --------------------------------------------------------------------------------------------- + s3_endpoint = self.config_db_obj.get_config("AWS_S3_Endpoint") + s3_bucket_name = self.config_db_obj.get_config("AWS_S3_Default_Bucket") + self.s3_obj = AwsS3( + aws_access_key_id=self.config_file.s3["aws_access_key_id"], + aws_secret_access_key=self.config_file.s3["aws_secret_access_key"], + aws_endpoint_url=s3_endpoint if s3_endpoint else self.config_file.s3["aws_s3_endpoint_url"], + bucket_name=s3_bucket_name if s3_bucket_name else self.config_file.s3["aws_s3_bucket_name"] + ) + + self.check_options_file_path_exists() def get_long_options(self): """ @@ -130,17 +163,6 @@ def populate_options_dict_values(self, opts): arg = True self.options_dict[key]["value"] = arg - def perform_default_checks_and_load_config(self): - """ - Regroups all the different default checks that should be run on GetOpt information - provided when running the script. - """ - - # perform some initial checks - self.check_required_options_are_set() - self.check_options_file_path_exists() - self.load_config_file() - def load_config_file(self): """ Loads the config file based on the value provided by the option '--profile' when @@ -190,7 +212,17 @@ def check_options_file_path_exists(self): for key in self.options_dict: opt_value = self.options_dict[key]["value"] - if self.options_dict[key]["is_path"] and opt_value and not os.path.isfile(opt_value): + if self.options_dict[key]["is_path"] and opt_value and opt_value.startswith('s3://'): + try: + file_path = os.path.join(self.tmp_dir, os.path.basename(opt_value)) + self.s3_obj.download_file(opt_value, file_path) + self.options_dict[key]["s3_url"] = opt_value + self.options_dict[key]["value"] = file_path + except Exception as err: + print(f"[ERROR ] {opt_value} could not be downloaded from S3 bucket. Error was\n{err}") + print(self.usage) + sys.exit(lib.exitcode.INVALID_PATH) + elif self.options_dict[key]["is_path"] and opt_value and not os.path.isfile(opt_value): print(f"\n[ERROR ] {opt_value} does not exist. Please provide a valid path for --{key}\n") print(self.usage) sys.exit(lib.exitcode.INVALID_PATH) diff --git a/python/run_dicom_archive_loader.py b/python/run_dicom_archive_loader.py index edd4bacbd..b4f848f34 100755 --- a/python/run_dicom_archive_loader.py +++ b/python/run_dicom_archive_loader.py @@ -67,7 +67,7 @@ def main(): } # get the options provided by the user - loris_getopt_obj = LorisGetOpt(usage, options_dict) + loris_getopt_obj = LorisGetOpt(usage, options_dict, os.path.basename(__file__[:-3])) # input error checking and load config_file file input_error_checking(loris_getopt_obj) @@ -77,8 +77,6 @@ def main(): def input_error_checking(loris_getopt_obj): - # perform initial checks and load config file (in loris_getopt_obj.config_info) - loris_getopt_obj.perform_default_checks_and_load_config() # check that only one of tarchive_path, upload_id or force has been provided loris_getopt_obj.check_tarchive_path_upload_id_or_force_set() diff --git a/python/run_dicom_archive_validation.py b/python/run_dicom_archive_validation.py index 593fd7026..bfe4c8d28 100755 --- a/python/run_dicom_archive_validation.py +++ b/python/run_dicom_archive_validation.py @@ -65,10 +65,7 @@ def main(): } # get the options provided by the user - loris_getopt_obj = LorisGetOpt(usage, options_dict) - - # input error checking and load config_file file - loris_getopt_obj.perform_default_checks_and_load_config() + loris_getopt_obj = LorisGetOpt(usage, options_dict, os.path.basename(__file__[:-3])) # validate the DICOM archive DicomValidationPipeline(loris_getopt_obj, os.path.basename(__file__[:-3])) diff --git a/python/run_nifti_insertion.py b/python/run_nifti_insertion.py index 3c37c08c5..1382028f5 100755 --- a/python/run_nifti_insertion.py +++ b/python/run_nifti_insertion.py @@ -97,7 +97,7 @@ def main(): } # get the options provided by the user - loris_getopt_obj = LorisGetOpt(usage, options_dict) + loris_getopt_obj = LorisGetOpt(usage, options_dict, os.path.basename(__file__[:-3])) # input error checking and load config_file file input_error_checking(loris_getopt_obj) @@ -107,8 +107,6 @@ def main(): def input_error_checking(loris_getopt_obj): - # perform initial checks and load config file (in loris_getopt_obj.config_info) - loris_getopt_obj.perform_default_checks_and_load_config() # check that only one of tarchive_path, upload_id or force has been provided loris_getopt_obj.check_tarchive_path_upload_id_or_force_set() diff --git a/python/run_push_imaging_files_to_s3_pipeline.py b/python/run_push_imaging_files_to_s3_pipeline.py index f3cf1c126..4ff13a13e 100755 --- a/python/run_push_imaging_files_to_s3_pipeline.py +++ b/python/run_push_imaging_files_to_s3_pipeline.py @@ -55,19 +55,11 @@ def main(): } # get the options provided by the user - loris_getopt_obj = LorisGetOpt(usage, options_dict) - - # input error checking and load config_file file - input_error_checking(loris_getopt_obj) + loris_getopt_obj = LorisGetOpt(usage, options_dict, os.path.basename(__file__[:-3])) # push to S3 pipeline PushImagingFilesToS3Pipeline(loris_getopt_obj, os.path.basename(__file__[:-3])) -def input_error_checking(loris_getopt_obj): - # perform initial checks and load config file (in loris_getopt_obj.config_info) - loris_getopt_obj.perform_default_checks_and_load_config() - - if __name__ == "__main__": main()