Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Unify LORIS-MRI logging #1191

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@ include = [
"python/lib/db",
"python/lib/exception",
"python/lib/config_file.py",
"python/lib/env.py",
"python/lib/file_system.py",
"python/lib/logging.py",
"python/lib/make_env.py",
"python/lib/validate_subject_info.py",
]
typeCheckingMode = "strict"
Expand Down
4 changes: 4 additions & 0 deletions python/lib/database_lib/notification.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@

import datetime

from typing_extensions import deprecated

__license__ = "GPLv3"


@deprecated('Use `lib.db.model.notification_spool` instead')
class Notification:
"""
This class performs database queries for imaging pipeline notification_spool table.
Expand Down Expand Up @@ -46,6 +49,7 @@ def __init__(self, db, verbose, notification_type, notification_origin, process_
self.notification_origin = notification_origin
self.process_id = process_id

@deprecated('Use `lib.logging.register_notification` instead')
def write_to_notification_spool(self, message, is_error, is_verbose, center_id=None):
"""
Insert a row in the notification_spool table.
Expand Down
9 changes: 3 additions & 6 deletions python/lib/db/connect.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
from sqlalchemy import URL, create_engine
from sqlalchemy.orm import Session

from lib.config_file import DatabaseConfig


def connect_to_database(config: DatabaseConfig):
def get_database_engine(config: DatabaseConfig):
"""
Connect to the database and get an SQLAlchemy session to interract with it using the provided
credentials.
Connect to the database and return an SQLAlchemy engine using the provided credentials.
"""

# The SQLAlchemy URL object notably escapes special characters in the configuration attributes
Expand All @@ -20,5 +18,4 @@ def connect_to_database(config: DatabaseConfig):
database = config.database,
)

engine = create_engine(url)
return Session(engine)
return create_engine(url)
8 changes: 4 additions & 4 deletions python/lib/db/query/notification.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
from lib.db.model.notification_type import DbNotificationType


def get_notification_type_with_name(db: Database, name: str):
def try_get_notification_type_with_name(db: Database, name: str):
"""
Get a notification type from the database using its configuration setting name, or raise an
exception if no notification type is found.
Get a notification type from the database using its configuration setting name, or return
`None` if no notification type is found.
"""

return db.execute(select(DbNotificationType)
.where(DbNotificationType.name == name)
).scalar_one()
).scalar_one_or_none()
188 changes: 83 additions & 105 deletions python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import lib.exitcode
import lib.utilities
from lib.dcm2bids_imaging_pipeline_lib.base_pipeline import BasePipeline
from lib.logging import log_error_exit, log_verbose

__license__ = "GPLv3"

Expand Down Expand Up @@ -75,11 +76,12 @@ def __init__(self, loris_getopt_obj, script_name):
message += f"{self.excluded_series_desc_regex_list} will not be considered!"
else:
message += f"{', '.join(self.excluded_series_desc_regex_list)} will not be considered!"
self.log_error_and_exit(message, lib.exitcode.NO_VALID_NIfTI_CREATED, is_error="Y", is_verbose="N")
log_error_exit(self.env, message, lib.exitcode.NO_VALID_NIfTI_CREATED)
else:
message = f"Number of NIfTI files that will be considered for insertion into the database: " \
f"{self.file_to_insert_count}"
self.log_info(message, is_error="N", is_verbose="Y")
log_verbose(self.env, (
f"Number of NIfTI files that will be considered for insertion into the database: "
f"{self.file_to_insert_count}"
))

# ---------------------------------------------------------------------------------------------
# Loop through NIfTI files and call run_nifti_insertion.pl
Expand Down Expand Up @@ -121,15 +123,21 @@ def _run_dicom_archive_validation_pipeline(self):
validation_process = subprocess.Popen(validation_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
validation_process.communicate()
if validation_process.returncode == 0:
message = f"run_dicom_archive_validation.py successfully executed for UploadID {self.upload_id} " \
f"and ArchiveLocation {self.tarchive_path}"
self.log_info(message, is_error="N", is_verbose="Y")
log_verbose(self.env, (
f"run_dicom_archive_validation.py successfully executed for UploadID {self.upload_id} "
f"and ArchiveLocation {self.tarchive_path}"
))
# reset mri_upload to Inserting as run_dicom_archive_validation.py will set Inserting=0 after execution
self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',))
else:
message = f"run_dicom_archive_validation.py failed validation for UploadID {self.upload_id}" \
f"and ArchiveLocation {self.tarchive_path}. Exit code was {validation_process.returncode}."
self.log_error_and_exit(message, lib.exitcode.INVALID_DICOM, is_error="Y", is_verbose="N")
log_error_exit(
self.env,
(
f"run_dicom_archive_validation.py failed validation for UploadID {self.upload_id}"
f"and ArchiveLocation {self.tarchive_path}. Exit code was {validation_process.returncode}."
),
lib.exitcode.INVALID_DICOM,
)

# now that the DICOM archive validation has run, check the database to ensure the validation was completed
# and correctly updated in the DB
Expand All @@ -152,16 +160,20 @@ def _run_dcm2niix_conversion(self):

converter = self.config_db_obj.get_config("converter")
if not re.search(r'.*dcm2niix.*', converter, re.IGNORECASE):
message = f"{converter} does not appear to be a dcm2niix binary."
self.log_error_and_exit(message, lib.exitcode.PROJECT_CUSTOMIZATION_FAILURE, is_error="Y", is_verbose="N")
log_error_exit(
self.env,
f"{converter} does not appear to be a dcm2niix binary.",
lib.exitcode.PROJECT_CUSTOMIZATION_FAILURE,
)

dcm2niix_process = subprocess.Popen(
[converter, "-ba", "n", "-z", "y", "-o", nifti_tmp_dir, self.extracted_dicom_dir],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)

stdout, _ = dcm2niix_process.communicate()
self.log_info(stdout, is_error="N", is_verbose="Y")
log_verbose(self.env, str(stdout))

return nifti_tmp_dir

Expand Down Expand Up @@ -305,15 +317,13 @@ def _run_nifti_insertion(self, nifti_file_path, json_file_path, bval_file_path=N
stdout, _ = insertion_process.communicate()

if insertion_process.returncode == 0:
message = f"run_nifti_insertion.py successfully executed for file {nifti_file_path}"
self.log_info(message, is_error="N", is_verbose="Y")
log_verbose(self.env, f"run_nifti_insertion.py successfully executed for file {nifti_file_path}")
self.inserted_file_count += 1
# reset mri_upload to Inserting as run_nifti_insertion.py will set Inserting=0 after execution
self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',))
else:
message = f"run_nifti_insertion.py failed for file {nifti_file_path}.\n{stdout}"
print(stdout)
self.log_info(message, is_error="Y", is_verbose="Y")
log_verbose(self.env, f"run_nifti_insertion.py failed for file {nifti_file_path}.\n{stdout}")

def _move_and_update_dicom_archive(self):
"""
Expand Down Expand Up @@ -362,8 +372,7 @@ def _add_intended_for_to_fieldmap_json_files(self):
sorted_fmap_files_list = fmap_files_dict[key]
for fmap_dict in sorted_fmap_files_list:
if fmap_dict['json_file_path'].startswith('s3://') and not self.s3_obj:
message = "[ERROR ] S3 configs not configured properly"
self.log_error_and_exit(message, lib.exitcode.S3_SETTINGS_FAILURE, is_error="Y", is_verbose="N")
log_error_exit(self.env, "S3 configs not configured properly", lib.exitcode.S3_SETTINGS_FAILURE)
self.imaging_obj.modify_fmap_json_file_to_write_intended_for(
sorted_fmap_files_list, self.s3_obj, self.tmp_dir
)
Expand Down Expand Up @@ -442,6 +451,7 @@ def _get_summary_of_insertion(self):
- {nb_files_inserted} files were inserted into the files table: {files_list}
- {nb_prot_violation} files did not match any protocol: {prot_viol_list}
- {nb_excluded_viol} files were exclusionary violations: {excl_viol_list}
- Log of process in {self.log_obj.log_file}
- Log of process in {self.env.log_file}
"""
self.log_info(summary, is_error="N", is_verbose="Y")

log_verbose(self.env, summary)
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import lib.exitcode
from lib.dcm2bids_imaging_pipeline_lib.base_pipeline import BasePipeline
from lib.logging import log_error_exit, log_verbose

__license__ = "GPLv3"

Expand Down Expand Up @@ -33,8 +34,7 @@ def __init__(self, loris_getopt_obj, script_name):
# ---------------------------------------------------------------------------------------------
# If we get here, the tarchive is validated & the script stops running so update mri_upload
# ---------------------------------------------------------------------------------------------
message = f"DICOM archive {self.options_dict['tarchive_path']['value']} is valid!"
self.log_info(message, is_error="N", is_verbose="Y")
log_verbose(self.env, f"DICOM archive {self.options_dict['tarchive_path']['value']} is valid!")
self.imaging_upload_obj.update_mri_upload(
upload_id=self.upload_id,
fields=("isTarchiveValidated", "Inserting",),
Expand All @@ -49,18 +49,18 @@ def _validate_dicom_archive_md5sum(self):
logged in the <tarchive> table.
"""

self.log_info(message="Verifying DICOM archive md5sum (checksum)", is_error="N", is_verbose="Y")
log_verbose(self.env, "Verifying DICOM archive md5sum (checksum)")

tarchive_path = os.path.join(self.dicom_lib_dir, self.dicom_archive_obj.tarchive_info_dict["ArchiveLocation"])
result = self.dicom_archive_obj.validate_dicom_archive_md5sum(tarchive_path)
message = result["message"]

if result['success']:
self.log_info(message, is_error="N", is_verbose="Y")
log_verbose(self.env, message)
else:
self.imaging_upload_obj.update_mri_upload(
upload_id=self.upload_id,
fields=("isTarchiveValidated", "IsCandidateInfoValidated"),
values=("0", "0")
)
self.log_error_and_exit(message, lib.exitcode.CORRUPTED_FILE, is_error="Y", is_verbose="N")
log_error_exit(self.env, message, lib.exitcode.CORRUPTED_FILE)
Loading
Loading