Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge 24.1.8 into main #888

Merged
merged 46 commits into from
Feb 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
f471c59
update the VERSION for the next bug fix release (#816)
cmadjar Nov 2, 2022
90e7a9a
[dcm2bids] Remove hardcoded dcm2niix binary to use the value stored i…
cmadjar Nov 2, 2022
02333c9
Pull 24.0.3 in 24.1 release (#820)
cmadjar Nov 2, 2022
0614e8c
fix nonetype errors when the visit of a session does not exist so tha…
cmadjar Nov 7, 2022
f47421d
fix some errors when RepetitionTime is not available in JSON file (#825)
cmadjar Nov 8, 2022
f0810cb
Add capability to download file from s3 (#826)
cmadjar Nov 8, 2022
3644f95
Upload to S3: support object name starting with s3://bucket_name/ for…
cmadjar Nov 8, 2022
55c0d8c
fix database class pselect documentation for the return type (#828)
cmadjar Nov 10, 2022
34e8041
map scan type to scan type ID when scan type provided to run_nifti_in…
cmadjar Nov 10, 2022
47c797e
modify permission of script run_push_imaging_files_to_s3_pipeline.py …
cmadjar Nov 10, 2022
02507c5
skip violation if not found on filesystem since it means the scan has…
cmadjar Nov 11, 2022
9c376bb
update VERSION file (#832)
cmadjar Nov 11, 2022
e62ea82
do not push files to S3 when their path in the DB is already an S3 UR…
cmadjar Nov 11, 2022
0d27d49
fix violation files path when checking if the files are on the filesy…
cmadjar Nov 14, 2022
4fd75e2
Merge 24.0 release into 24.1 release (#836)
cmadjar Nov 14, 2022
a826be3
fix check if file already inserted in DB (#845)
cmadjar Dec 5, 2022
c9afaa2
Fix logic of determining file run number when previously inserted fil…
cmadjar Dec 5, 2022
17f6652
update version file (#847)
cmadjar Dec 5, 2022
b224835
Chunk creation subprocess failure check (#848)
regisoc Dec 5, 2022
afe6903
Revert chunk_pb2.py changes (#849)
laemtl Dec 5, 2022
cef8f87
remove prints in nifti_insertion_pipeline.py (#851)
cmadjar Dec 6, 2022
3af8cf6
fix permissoin denied upon deletion of tmp dir (#853)
cmadjar Dec 8, 2022
115283e
update to next bug fix relesae (#854)
cmadjar Dec 8, 2022
e8103e7
fix duplicated protocols error when same scan type returned (#856)
cmadjar Dec 16, 2022
049414e
Add missing exit codes on the python's side (#857)
cmadjar Dec 21, 2022
27f7e63
add ignore case to regex (#859)
cmadjar Dec 23, 2022
bd74369
add download from S3 and reupload if file provided to run_nifti-inser…
cmadjar Jan 4, 2023
a1dc483
fix intended for bug when no acq time available (#861)
cmadjar Jan 6, 2023
bc00fd6
fix bug for intended for when getting the list of files needed Intend…
cmadjar Jan 6, 2023
82c47b8
fix paths when there are not / at the end of the Config (#866)
cmadjar Jan 12, 2023
e77d767
fix NoneType error /opt/loris/bin/mri/python/lib/dcm2bids_imaging_pip…
cmadjar Jan 12, 2023
9f986c8
Properly update `mri_upload` 'Inserting' column when different sectio…
cmadjar Jan 13, 2023
d53b0a7
update version file to 24.1.6 (#870)
cmadjar Jan 18, 2023
0bda1a1
Add download from S3 for fmap already pushed to S3 that needs to have…
cmadjar Jan 24, 2023
4fd8b48
update version to 24.1.7 (#876)
cmadjar Jan 24, 2023
ca19cc4
Merge 24.0.4 into 24.1 release (#878)
cmadjar Jan 24, 2023
eb3c8c4
add a delete_file function for S3 (#881)
cmadjar Feb 3, 2023
5d50f7f
MATLAB 7.3 HDF5 Workaround (#880)
jeffersoncasimir Feb 9, 2023
abeb185
Fix nii insertion mri protocol group id bug (#883)
cmadjar Feb 10, 2023
3fa41f0
Do not register multiple entries of the same violation in `mri_violat…
cmadjar Feb 10, 2023
3b0b80e
keep track of additional event data (24.1) (#873)
regisoc Feb 13, 2023
d608d18
Copy .set file before updating paths (#886)
jeffersoncasimir Feb 13, 2023
0d82861
change version to 24.1.8 (#887)
cmadjar Feb 14, 2023
a568f9a
Merge tag 'v24.1.8' into merge_24.1.8_into_main
cmadjar Feb 15, 2023
0c1d6ab
correct flake errors
cmadjar Feb 15, 2023
2851cc5
Regis' feedback
cmadjar Feb 16, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
24.1.7
24.1.8
19 changes: 19 additions & 0 deletions python/lib/aws_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,3 +99,22 @@ def download_file(self, s3_object_name, destination_file):
self.s3_bucket_obj.download_file(s3_file_name, destination_file)
except ClientError as err:
raise Exception(f"{s3_object_name} download failure = {format(err)}")

def delete_file(self, s3_object_name):
"""
Function to delete a s3 file or directory.

:param s3_object_name: name of the s3 file or directory
:type s3_object_name: str
"""

s3_bucket_prefix = f"s3://{self.bucket_name}/"
s3_file_name = s3_object_name[len(s3_bucket_prefix):] \
if s3_object_name.startswith(s3_bucket_prefix) else s3_object_name
objects_to_delete = [{'Key': obj.key} for obj in
self.s3_bucket_obj.objects.filter(Prefix=s3_file_name)]
self.s3_bucket_obj.delete_objects(
Delete={
'Objects': objects_to_delete
}
)
2 changes: 1 addition & 1 deletion python/lib/database_lib/mri_violations_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def insert_violations_log(self, field_value_dict):
get_last_id=False
)

def get_excluded_violations_for_tarchive_id(self, tarchive_id, severity=None):
def get_violations_for_tarchive_id(self, tarchive_id, severity=None):
"""
Get the list of violations logged in `mri_violations_log` with excluded severity for a given `TarchiveID`.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,7 @@ def _get_summary_of_insertion(self):
self.tarchive_id
)
protocol_violations_list = [v["minc_location"] for v in prot_viol_results] if prot_viol_results else None
excl_viol_results = self.imaging_obj.mri_viol_log_db_obj.get_excluded_violations_for_tarchive_id(
excl_viol_results = self.imaging_obj.mri_viol_log_db_obj.get_violations_for_tarchive_id(
self.tarchive_id, "exclude"
)
excluded_violations_list = [v["MincFile"] for v in excl_viol_results] if excl_viol_results else None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,8 @@ def __init__(self, loris_getopt_obj, script_name):
# ---------------------------------------------------------------------------------------------
# Determine acquisition protocol (or register into mri_protocol_violated_scans and exits)
# ---------------------------------------------------------------------------------------------
self.scan_type_id, self.mri_protocol_group_id = self._determine_acquisition_protocol()
if not self.loris_scan_type:
self.scan_type_id, self.mri_protocol_group_id = self._determine_acquisition_protocol()
if not self.scan_type_id:
self._move_to_trashbin()
self._register_protocol_violated_scan()
Expand All @@ -129,7 +129,6 @@ def __init__(self, loris_getopt_obj, script_name):
else:
self.scan_type_name = self.imaging_obj.get_scan_type_name_from_id(self.scan_type_id)
else:
self.scan_type_id = self.imaging_obj.get_scan_type_id_from_scan_type_name(self.loris_scan_type)
if not self.scan_type_id:
self._move_to_trashbin()
self._register_protocol_violated_scan()
Expand All @@ -154,6 +153,8 @@ def __init__(self, loris_getopt_obj, script_name):
# ---------------------------------------------------------------------------------------------
# Run extra file checks to determine possible protocol violations
# ---------------------------------------------------------------------------------------------
self.warning_violations_list = []
self.exclude_violations_list = []
if not self.bypass_extra_checks:
self.violations_summary = self.imaging_obj.run_extra_file_checks(
self.session_obj.session_info_dict['ProjectID'],
Expand All @@ -162,8 +163,8 @@ def __init__(self, loris_getopt_obj, script_name):
self.scan_type_id,
self.json_file_dict
)
self.warning_violations_list = self.violations_summary['warning']
self.exclude_violations_list = self.violations_summary['exclude']
self.warning_violations_list = self.violations_summary['warning']
self.exclude_violations_list = self.violations_summary['exclude']

# ---------------------------------------------------------------------------------------------
# Register files in the proper tables
Expand Down Expand Up @@ -350,7 +351,9 @@ def _determine_acquisition_protocol(self):
self.scanner_id
)

protocol_info = self.imaging_obj.get_acquisition_protocol_info(protocols_list, nifti_name, scan_param)
protocol_info = self.imaging_obj.get_acquisition_protocol_info(
protocols_list, nifti_name, scan_param, self.loris_scan_type
)
self.log_info(protocol_info['error_message'], is_error="N", is_verbose="Y")

return protocol_info['scan_type_id'], protocol_info['mri_protocol_group_id']
Expand Down Expand Up @@ -396,7 +399,7 @@ def _move_to_assembly_and_insert_file_info(self):
self.log_info(message, is_error='N', is_verbose='Y')

# add an entry in the violations log table if there is a warning violation associated to the file
if self.violations_summary['warning']:
if self.warning_violations_list:
message = f"Inserting warning violations related to {self.assembly_nifti_rel_path}." \
f" List of violations found: {self.warning_violations_list}"
self.log_info(message, is_error='N', is_verbose='Y')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,10 +166,10 @@ def _get_list_of_files_from_mri_violations_log(self):
parameter_file).
"""

exclude_entries = self.imaging_obj.mri_viol_log_db_obj.get_excluded_violations_for_tarchive_id(
exclude_entries = self.imaging_obj.mri_viol_log_db_obj.get_violations_for_tarchive_id(
self.tarchive_id, "exclude"
)
warning_entries = self.imaging_obj.mri_viol_log_db_obj.get_excluded_violations_for_tarchive_id(
warning_entries = self.imaging_obj.mri_viol_log_db_obj.get_violations_for_tarchive_id(
self.tarchive_id, "warning"
)

Expand Down
76 changes: 52 additions & 24 deletions python/lib/eeg.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,14 +456,15 @@ def fetch_and_insert_eeg_files(self, derivatives=False):
result = physiological.grep_file_id_from_hash(blake2)
physio_file_id = result['PhysiologicalFileID'] if result else None
eeg_path = result['FilePath'] if result else None

if not physio_file_id:
# grep the modality ID from physiological_modality table
modality_id = self.db.grep_id_from_lookup_table(
id_field_name = 'PhysiologicalModalityID',
table_name = 'physiological_modality',
where_field_name = 'PhysiologicalModality',
where_value = self.bids_modality,
insert_if_not_found = False
id_field_name='PhysiologicalModalityID',
table_name='physiological_modality',
where_field_name='PhysiologicalModality',
where_value=self.bids_modality,
insert_if_not_found=False
)

# copy the eeg_file to the LORIS BIDS import directory
Expand All @@ -474,36 +475,38 @@ def fetch_and_insert_eeg_files(self, derivatives=False):
# insert the file along with its information into
# physiological_file and physiological_parameter_file tables
eeg_file_info = {
'FileType' : file_type,
'FilePath' : eeg_path,
'SessionID' : self.session_id,
'FileType': file_type,
'FilePath': eeg_path,
'SessionID': self.session_id,
'AcquisitionTime': eeg_acq_time,
'InsertedByUser' : getpass.getuser(),
'InsertedByUser': getpass.getuser(),
'PhysiologicalOutputTypeID': output_type_id,
'PhysiologicalModalityID' : modality_id
'PhysiologicalModalityID': modality_id
}
physio_file_id = physiological.insert_physiological_file(
eeg_file_info, eeg_file_data
)

# if the EEG file was a set file, then update the filename for the .set
# and .fdt files in the .set file so it can find the proper file for
# visualization and analyses
if file_type == 'set':
file_paths_updated = file_type != 'set'
if not file_paths_updated:
set_full_path = os.path.join(self.data_dir, eeg_path)
fdt_full_path = eeg_file_data['fdt_file'] if 'fdt_file' in eeg_file_data.keys() else None

if fdt_full_path:
fdt_full_path = os.path.join(self.data_dir, eeg_file_data['fdt_file'])
utilities.update_set_file_path_info(set_full_path, fdt_full_path)
file_paths_updated = utilities.update_set_file_path_info(set_full_path, fdt_full_path)

inserted_eegs.append({
'file_id': physio_file_id,
'file_path': eeg_path,
'eegjson_file_path': eegjson_file_path,
'fdt_file_path': fdt_file_path,
'original_file_data': eeg_file,
})
if file_paths_updated:
inserted_eegs.append({
'file_id': physio_file_id,
'file_path': eeg_path,
'eegjson_file_path': eegjson_file_path,
'fdt_file_path': fdt_file_path,
'original_file_data': eeg_file,
})

return inserted_eegs

Expand Down Expand Up @@ -688,9 +691,34 @@ def fetch_and_insert_event_files(
# get the blake2b hash of the task events file
blake2 = blake2b(event_data_file.path.encode('utf-8')).hexdigest()
# insert event data in the database
physiological.insert_event_file(
event_data, event_path, physiological_file_id, blake2
)
# TODO: Temporary
# TODO: should not have two different insert_event_file methods
# TODO: remove this TRY/CATCH and the legacy method when
# TODO: all LORIS code will support additional events.
insert_fallback = False
msg = ""
try:
physiological.insert_event_file(
event_data, event_path, physiological_file_id, blake2
)
except NameError:
# when the insert_event_file function does not exist
msg = "WARNING: function 'insert_event_file' not found. Using fallback method."
insert_fallback = True
except Exception as ex:
# when event table structure is still the old one
if ex.args[0] and ex.args[0].startswith("Insert query failure: "):
msg = "WARNING: error during DB insert. Using fallback method."
insert_fallback = True
else:
# re-raise other errors from db insert
raise ex
# insert fallback, call legacy method
if insert_fallback:
print(msg)
physiological.insert_event_file_legacy(
event_data, event_path, physiological_file_id, blake2
)

event_paths.extend([event_path])

Expand Down Expand Up @@ -723,7 +751,7 @@ def fetch_and_insert_event_files(

if not event_metadata_file:
message = '\nWARNING: no events metadata files (event.json) associated' \
'with physiological file ID ' + physiological_file_id
'with physiological file ID ' + str(physiological_file_id)
print(message)
else:
# copy the event file to the LORIS BIDS import directory
Expand Down
62 changes: 54 additions & 8 deletions python/lib/imaging.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,20 @@ def insert_protocol_violated_scan(self, patient_name, cand_id, psc_id, tarchive_
:type mri_protocol_group_id: int
"""

phase_encoding_dir = scan_param["PhaseEncodingDirection"] if "PhaseEncodingDirection" in scan_param else None
series_uid = scan_param["SeriesInstanceUID"] if "SeriesInstanceUID" in scan_param.keys() else None
image_type = str(scan_param["ImageType"]) if "ImageType" in scan_param.keys() else None
echo_number = repr(scan_param["EchoNumber"]) if "EchoNumber" in scan_param.keys() else None
phase_encoding_dir = scan_param["PhaseEncodingDirection"] \
if "PhaseEncodingDirection" in scan_param.keys() else None

# if there is already an entry for this violation in mri_protocol_violated_scans, do not insert anything
existing_prot_viol_scans = self.mri_prot_viol_scan_db_obj.get_protocol_violations_for_tarchive_id(tarchive_id)
for row in existing_prot_viol_scans:
if row['SeriesUID'] == series_uid \
and row['PhaseEncodingDirection'] == phase_encoding_dir \
and row['image_type'] == image_type \
and row['EchoNumber'] == echo_number:
return

info_to_insert_dict = {
"CandID": cand_id,
Expand All @@ -257,10 +270,10 @@ def insert_protocol_violated_scan(self, patient_name, cand_id, psc_id, tarchive_
"ystep_range": scan_param["ystep"] if "ystep" in scan_param.keys() else None,
"zstep_range": scan_param["zstep"] if "zstep" in scan_param.keys() else None,
"time_range": scan_param["time"] if "time" in scan_param.keys() else None,
"SeriesUID": scan_param["SeriesInstanceUID"] if "SeriesInstanceUID" in scan_param.keys() else None,
"image_type": str(scan_param["ImageType"]) if "ImageType" in scan_param.keys() else None,
"SeriesUID": series_uid,
"image_type": image_type,
"PhaseEncodingDirection": phase_encoding_dir,
"EchoNumber": repr(scan_param["EchoNumber"]) if "EchoNumber" in scan_param else None,
"EchoNumber": echo_number,
"MriProtocolGroupID": mri_protocol_group_id if mri_protocol_group_id else None
}
self.mri_prot_viol_scan_db_obj.insert_protocol_violated_scans(info_to_insert_dict)
Expand All @@ -272,6 +285,35 @@ def insert_mri_violations_log(self, info_to_insert_dict):
:param info_to_insert_dict: dictionary with the information to be inserted in mri_violations_log
:type info_to_insert_dict: dict
"""

series_uid = info_to_insert_dict["SeriesUID"]
echo_number = repr(info_to_insert_dict["EchoNumber"])
phase_encoding_dir = info_to_insert_dict["PhaseEncodingDirection"]
echo_time = info_to_insert_dict['EchoTime']
scan_type = info_to_insert_dict['Scan_type']
severity = info_to_insert_dict['Severity']
header = info_to_insert_dict['Header']
value = info_to_insert_dict['Value']
valid_regex = info_to_insert_dict['ValidRegex']
valid_range = info_to_insert_dict['ValidRange']

# if there is already an entry for this violation in mri_violations_log, do not insert anything
existing_viol_logs = self.mri_viol_log_db_obj.get_violations_for_tarchive_id(
info_to_insert_dict['TarchiveID']
)
for row in existing_viol_logs:
if str(row['SeriesUID']) == str(series_uid) \
and str(row['PhaseEncodingDirection']) == str(phase_encoding_dir) \
and str(row['EchoNumber']) == str(echo_number) \
and str(row['Scan_type']) == str(scan_type) \
and str(row['EchoTime']) == str(echo_time) \
and str(row['Severity']) == str(severity) \
and str(row['Header']) == str(header) \
and str(row['Value']) == str(value) \
and str(row['ValidRange']) == str(valid_range) \
and str(row['ValidRegex']) == str(valid_regex):
return

self.mri_viol_log_db_obj.insert_violations_log(info_to_insert_dict)

def get_parameter_type_id(self, parameter_name):
Expand Down Expand Up @@ -567,7 +609,7 @@ def map_bids_param_to_loris_param(self, file_parameters):

return file_parameters

def get_acquisition_protocol_info(self, protocols_list, nifti_name, scan_param):
def get_acquisition_protocol_info(self, protocols_list, nifti_name, scan_param, scan_type=None):
"""
Get acquisition protocol information (scan_type_id or message to be printed in the log).
- If the protocols list provided as input is empty, the scan_type_id will be set to None and proper message
Expand Down Expand Up @@ -608,7 +650,7 @@ def get_acquisition_protocol_info(self, protocols_list, nifti_name, scan_param):

# look for matching protocols
mri_protocol_group_id = protocols_list[0]['MriProtocolGroupID']
matching_protocols_list = self.look_for_matching_protocols(protocols_list, scan_param)
matching_protocols_list = self.look_for_matching_protocols(protocols_list, scan_param, scan_type)

# if more than one protocol matching, return False, otherwise, return the scan type ID
if not matching_protocols_list:
Expand Down Expand Up @@ -648,7 +690,7 @@ def get_bids_categories_mapping_for_scan_type_id(self, scan_type_id):

return self.mri_prot_db_obj.get_bids_info_for_scan_type_id(scan_type_id)

def look_for_matching_protocols(self, protocols_list, scan_param):
def look_for_matching_protocols(self, protocols_list, scan_param, scan_type=None):
"""
Look for matching protocols in protocols_list given scan parameters stored in scan_param.

Expand All @@ -661,9 +703,13 @@ def look_for_matching_protocols(self, protocols_list, scan_param):
:rtype: list
"""

scan_type_id = self.get_scan_type_id_from_scan_type_name(scan_type) if scan_type else None

matching_protocols_list = []
for protocol in protocols_list:
if protocol['series_description_regex']:
if scan_type_id and protocol['Scan_type'] == scan_type_id:
matching_protocols_list.append(protocol['Scan_type'])
elif protocol['series_description_regex']:
if re.search(
rf"{protocol['series_description_regex']}", scan_param['SeriesDescription'], re.IGNORECASE
):
Expand Down
Loading