diff --git a/python/lib/database_lib/mri_upload_db.py b/python/lib/database_lib/mri_upload_db.py index c636429e2..cb76ebcc0 100644 --- a/python/lib/database_lib/mri_upload_db.py +++ b/python/lib/database_lib/mri_upload_db.py @@ -1,8 +1,11 @@ """This class performs database queries for the mri_upload table""" +from typing_extensions import deprecated + __license__ = "GPLv3" +@deprecated('Use `lib.db.models.mri_upload.DbMriUpload` instead') class MriUploadDB: """ This class performs database queries for imaging dataset stored in the mri_upload table. @@ -34,6 +37,7 @@ def __init__(self, db, verbose): self.db = db self.verbose = verbose + @deprecated('Use `lib.db.models.mri_upload.DbMriUpload` instead') def update_mri_upload(self, upload_id, fields, values): """ Update the `isTarchiveValidated` field of the upload with the value provided @@ -57,6 +61,7 @@ def update_mri_upload(self, upload_id, fields, values): self.db.update(query=query, args=args) + @deprecated('Use `lib.db.models.mri_upload.DbMriUpload` instead') def create_mri_upload_dict(self, where_field, where_value): """ Create a dictionary out of the entry available in the `mri_upload` table. diff --git a/python/lib/db/models/dicom_archive.py b/python/lib/db/models/dicom_archive.py index a494d37d8..444c9fbec 100644 --- a/python/lib/db/models/dicom_archive.py +++ b/python/lib/db/models/dicom_archive.py @@ -47,11 +47,11 @@ class DbDicomArchive(Base): date_sent : Mapped[Optional[datetime]] = mapped_column('DateSent') pending_transfer : Mapped[bool] = mapped_column('PendingTransfer') - series : Mapped[list['db_dicom_archive_series.DbDicomArchiveSeries']] \ + series : Mapped[list['db_dicom_archive_series.DbDicomArchiveSeries']] \ = relationship('DbDicomArchiveSeries', back_populates='archive') - files : Mapped[list['db_dicom_archive_file.DbDicomArchiveFile']] \ + files : Mapped[list['db_dicom_archive_file.DbDicomArchiveFile']] \ = relationship('DbDicomArchiveFile', back_populates='archive') - upload : Mapped[Optional['db_mri_upload.DbMriUpload']] \ + mri_uploads : Mapped[list['db_mri_upload.DbMriUpload']] \ = relationship('DbMriUpload', back_populates='dicom_archive') - session : Mapped[Optional['db_session.DbSession']] \ + session : Mapped[Optional['db_session.DbSession']] \ = relationship('DbSession') diff --git a/python/lib/db/models/mri_upload.py b/python/lib/db/models/mri_upload.py index a13f90c0c..054a0b10a 100644 --- a/python/lib/db/models/mri_upload.py +++ b/python/lib/db/models/mri_upload.py @@ -30,7 +30,7 @@ class DbMriUpload(Base): is_dicom_archive_validated : Mapped[bool] = mapped_column('IsTarchiveValidated') is_phantom : Mapped[bool] = mapped_column('IsPhantom', YNBool) - dicom_archive : Mapped[Optional['db_dicom_archive.DbDicomArchive']] \ - = relationship('DbDicomArchive', back_populates='upload') - session : Mapped[Optional['db_session.DbSession']] \ + dicom_archive : Mapped[Optional['db_dicom_archive.DbDicomArchive']] \ + = relationship('DbDicomArchive', back_populates='mri_uploads') + session : Mapped[Optional['db_session.DbSession']] \ = relationship('DbSession') diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py index 274709b09..52345e4bf 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/base_pipeline.py @@ -5,13 +5,13 @@ import lib.utilities from lib.database import Database from lib.database_lib.config import Config -from lib.db.queries.dicom_archive import try_get_dicom_archive_with_archive_location, try_get_dicom_archive_with_id +from lib.db.queries.dicom_archive import try_get_dicom_archive_with_archive_location +from lib.db.queries.mri_upload import try_get_mri_upload_with_id from lib.db.queries.session import try_get_session_with_cand_id_visit_label from lib.db.queries.site import get_all_sites from lib.exception.determine_subject_info_error import DetermineSubjectInfoError from lib.exception.validate_subject_info_error import ValidateSubjectInfoError from lib.imaging import Imaging -from lib.imaging_upload import ImagingUpload from lib.logging import log_error_exit, log_verbose, log_warning from lib.make_env import make_env from lib.validate_subject_info import validate_subject_info @@ -32,10 +32,9 @@ def __init__(self, loris_getopt_obj, script_name): These includes the following steps: - load pipeline options - establish database connection - - load the Config, Imaging, Tarchive, ImagingUpload, Session and other classes + - load the Config, Imaging and other classes - creates the processing temporary directory - creates the log file for the script execution - - populate the imaging_upload and tarchive info dictionaries - determine the subject IDs - determine the site information - determine the scanner information @@ -51,7 +50,6 @@ def __init__(self, loris_getopt_obj, script_name): self.options_dict = loris_getopt_obj.options_dict self.force = self.options_dict["force"]["value"] if "force" in self.options_dict else None self.verbose = self.options_dict["verbose"]["value"] - self.upload_id = loris_getopt_obj.options_dict["upload_id"]["value"] # ---------------------------------------------------- # Establish database connection @@ -64,7 +62,6 @@ def __init__(self, loris_getopt_obj, script_name): # ----------------------------------------------------------------------------------- self.config_db_obj = Config(self.db, self.verbose) self.imaging_obj = Imaging(self.db, self.verbose, self.config_file) - self.imaging_upload_obj = ImagingUpload(self.db, self.verbose) # --------------------------------------------------------------------------------------------- # Grep config settings from the Config module @@ -83,43 +80,41 @@ def __init__(self, loris_getopt_obj, script_name): # --------------------------------------------------------------------------------------------- # Load imaging_upload and tarchive dictionary # --------------------------------------------------------------------------------------------- - self.load_imaging_upload_and_tarchive_dictionaries() + self.load_mri_upload_and_dicom_archive() # --------------------------------------------------------------------------------------------- # Set Inserting field of mri_upload to indicate a script is running on the upload # and load the notification object # --------------------------------------------------------------------------------------------- - if "UploadID" in self.imaging_upload_obj.imaging_upload_dict.keys(): - self.upload_id = self.imaging_upload_obj.imaging_upload_dict["UploadID"] - self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) + # Update the MRI upload. + self.mri_upload.inserting = True + self.env.db.commit() - # Initiate the notification object now that we have a confirmed UploadID - self.env.init_notifier(self.upload_id) + self.env.init_notifier(self.mri_upload.id) # --------------------------------------------------------------------------------- # Determine subject IDs based on DICOM headers and validate the IDs against the DB # Verify PSC information stored in DICOMs # Grep scanner information based on what is in the DICOM headers # --------------------------------------------------------------------------------- - if self.dicom_archive is not None: - try: - self.subject_info = self.imaging_obj.determine_subject_info(self.dicom_archive) - except DetermineSubjectInfoError as error: - log_error_exit(self.env, error.message, lib.exitcode.PROJECT_CUSTOMIZATION_FAILURE) + try: + self.subject_info = self.imaging_obj.determine_subject_info(self.dicom_archive) + except DetermineSubjectInfoError as error: + log_error_exit(self.env, error.message, lib.exitcode.PROJECT_CUSTOMIZATION_FAILURE) - # verify PSC information stored in DICOMs - self.site_dict = self.determine_study_info() - log_verbose(self.env, ( - f"Found Center Name: {self.site_dict['CenterName']}," - f" Center ID: {self.site_dict['CenterID']}" - )) + # verify PSC information stored in DICOMs + self.site_dict = self.determine_study_info() + log_verbose(self.env, ( + f"Found Center Name: {self.site_dict['CenterName']}," + f" Center ID: {self.site_dict['CenterID']}" + )) - # grep scanner information based on what is in the DICOM headers - self.scanner_id = self.determine_scanner_info() + # grep scanner information based on what is in the DICOM headers + self.scanner_id = self.determine_scanner_info() - def load_imaging_upload_and_tarchive_dictionaries(self): + def load_mri_upload_and_dicom_archive(self): """ - Loads the imaging_upload and tarchive info dictionaries based on the content of the imaging_upload + Loads the MRI upload and DICOM archives based on the content of the imaging_upload and tarchive tables found for the processed UploadID/ArchiveLocation given as argument to the script. """ @@ -127,24 +122,26 @@ def load_imaging_upload_and_tarchive_dictionaries(self): upload_id = self.options_dict["upload_id"]["value"] tarchive_path = self.options_dict["tarchive_path"]["value"] \ if "tarchive_path" in self.options_dict.keys() else None - success = False + if upload_id and tarchive_path: - self.imaging_upload_obj.create_imaging_upload_dict_from_upload_id(upload_id) - if not self.imaging_upload_obj.imaging_upload_dict: + mri_upload = try_get_mri_upload_with_id(self.env.db, upload_id) + if mri_upload is None: log_error_exit( self.env, f"Did not find an entry in mri_upload associated with \'UploadID\' {upload_id}.", lib.exitcode.SELECT_FAILURE, ) - tarchive_id = self.imaging_upload_obj.imaging_upload_dict["TarchiveID"] - if not tarchive_id: + + self.mri_upload = mri_upload + + if self.mri_upload.dicom_archive is None: log_error_exit( self.env, f"UploadID {upload_id} is not linked to any tarchive in mri_upload.", lib.exitcode.SELECT_FAILURE, ) - self.dicom_archive = try_get_dicom_archive_with_id(self.env.db, tarchive_id) + self.dicom_archive = self.mri_upload.dicom_archive if os.path.join(self.data_dir, 'tarchive', self.dicom_archive.archive_location) != tarchive_path: log_error_exit( self.env, @@ -152,35 +149,53 @@ def load_imaging_upload_and_tarchive_dictionaries(self): lib.exitcode.SELECT_FAILURE, ) - err_msg = '' if upload_id: - self.imaging_upload_obj.create_imaging_upload_dict_from_upload_id(upload_id) - if not self.imaging_upload_obj.imaging_upload_dict: - err_msg += f"Did not find an entry in mri_upload associated with 'UploadID' {upload_id}" - else: - if self.imaging_upload_obj.imaging_upload_dict["TarchiveID"]: - tarchive_id = self.imaging_upload_obj.imaging_upload_dict["TarchiveID"] - self.dicom_archive = try_get_dicom_archive_with_id(self.env.db, tarchive_id) - if self.dicom_archive is not None: - success = True - else: - err_msg += f"Could not load tarchive dictionary for TarchiveID {tarchive_id}" + mri_upload = try_get_mri_upload_with_id(self.env.db, upload_id) + if mri_upload is None: + log_error_exit( + self.env, + f"Did not find an entry in mri_upload associated with 'UploadID' {upload_id}", + lib.exitcode.SELECT_FAILURE, + ) + + self.mri_upload = mri_upload + if self.mri_upload.dicom_archive is None: + log_error_exit( + self.env, + f"Did not find a DICOM archive associated with upload ID {upload_id}", + lib.exitcode.SELECT_FAILURE, + ) + + self.dicom_archive = self.mri_upload.dicom_archive elif tarchive_path: archive_location = tarchive_path.replace(self.dicom_lib_dir, "") - self.dicom_archive = try_get_dicom_archive_with_archive_location(self.env.db, archive_location) - if self.dicom_archive is not None: - success, new_err_msg = self.imaging_upload_obj.create_imaging_upload_dict_from_tarchive_id( - self.dicom_archive.id + dicom_archive = try_get_dicom_archive_with_archive_location(self.env.db, archive_location) + if dicom_archive is None: + log_error_exit( + self.env, + f"Could not load tarchive dictionary for ArchiveLocation {archive_location}", + lib.exitcode.SELECT_FAILURE, ) - if not success: - err_msg += new_err_msg - else: - err_msg += f"Could not load tarchive dictionary for ArchiveLocation {archive_location}" - - if not success and not self.force: - log_error_exit(self.env, err_msg, lib.exitcode.SELECT_FAILURE) + self.dicom_archive = dicom_archive + + mri_uploads = self.dicom_archive.mri_uploads + match mri_uploads: + case []: + log_error_exit( + self.env, + f"Did not find an entry in mri_upload associated with 'TarchiveID' {self.dicom_archive.id}", + lib.exitcode.SELECT_FAILURE, + ) + case [mri_upload]: + self.mri_upload = mri_upload + case _: + log_error_exit( + self.env, + f"Found {len(mri_uploads)} rows in mri_upload for 'TarchiveID' {self.dicom_archive.id}", + lib.exitcode.SELECT_FAILURE, + ) def determine_study_info(self): """ @@ -244,14 +259,15 @@ def validate_subject_info(self): try: validate_subject_info(self.env.db, self.subject_info) - self.imaging_upload_obj.update_mri_upload( - upload_id=self.upload_id, fields=('IsCandidateInfoValidated',), values=('1',) - ) + # Update the MRI upload. + self.mri_upload.is_candidate_info_validated = True + self.env.db.commit() except ValidateSubjectInfoError as error: log_warning(self.env, error.message) - self.imaging_upload_obj.update_mri_upload( - upload_id=self.upload_id, fields=('IsCandidateInfoValidated',), values=('0',) - ) + + # Update the MRI upload. + self.mri_upload.is_candidate_info_validated = False + self.env.db.commit() def check_if_tarchive_validated_in_db(self): """ @@ -261,13 +277,13 @@ def check_if_tarchive_validated_in_db(self): If the DICOM archive was not validated, the pipeline will exit and log the proper error information. """ # reload the mri_upload object with updated database values - self.load_imaging_upload_and_tarchive_dictionaries() - mu_dict = self.imaging_upload_obj.imaging_upload_dict - if ("IsTarchiveValidated" not in mu_dict.keys() or not mu_dict["IsTarchiveValidated"]) and not self.force: + self.load_mri_upload_and_dicom_archive() + + if not self.mri_upload.is_dicom_archive_validated and not self.force: log_error_exit( self.env, ( - f"The DICOM archive validation has failed for UploadID {self.upload_id}. Either run the" + f"The DICOM archive validation has failed for UploadID {self.mri_upload.id}. Either run the" f" validation again and fix the problem or use --force to force the insertion of the NIfTI file." ), lib.exitcode.INVALID_DICOM, @@ -311,8 +327,9 @@ def move_file(self, old_file_path, new_file_path): ) def end_upload(self): - if self.upload_id: - self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=("Inserting",), values=("0",)) + # Update the MRI upload. + self.mri_upload.inserting = False + self.env.db.commit() def remove_tmp_dir(self): """ diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py index 98c72275b..db8e3d8ba 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_archive_loader_pipeline.py @@ -114,7 +114,7 @@ def _run_dicom_archive_validation_pipeline(self): "run_dicom_archive_validation.py", "-p", self.options_dict["profile"]["value"], "-t", self.tarchive_path, - "-u", str(self.upload_id) + "-u", str(self.mri_upload.id) ] if self.verbose: validation_command.append("-v") @@ -123,16 +123,17 @@ def _run_dicom_archive_validation_pipeline(self): validation_process.communicate() if validation_process.returncode == 0: log_verbose(self.env, ( - f"run_dicom_archive_validation.py successfully executed for UploadID {self.upload_id} " + f"run_dicom_archive_validation.py successfully executed for UploadID {self.mri_upload.id} " f"and ArchiveLocation {self.tarchive_path}" )) - # reset mri_upload to Inserting as run_dicom_archive_validation.py will set Inserting=0 after execution - self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) + # reset mri_upload to Inserting as run_dicom_archive_validation.py will set Inserting=False after execution + self.mri_upload.inserting = True + self.env.db.commit() else: log_error_exit( self.env, ( - f"run_dicom_archive_validation.py failed validation for UploadID {self.upload_id}" + f"run_dicom_archive_validation.py failed validation for UploadID {self.mri_upload.id}" f"and ArchiveLocation {self.tarchive_path}. Exit code was {validation_process.returncode}." ), lib.exitcode.INVALID_DICOM, @@ -300,7 +301,7 @@ def _run_nifti_insertion(self, nifti_file_path, json_file_path, bval_file_path=N nifti_insertion_command = [ "run_nifti_insertion.py", "-p", self.options_dict["profile"]["value"], - "-u", str(self.upload_id), + "-u", str(self.mri_upload.id), "-n", nifti_file_path, "-j", json_file_path, "-c" @@ -318,8 +319,9 @@ def _run_nifti_insertion(self, nifti_file_path, json_file_path, bval_file_path=N if insertion_process.returncode == 0: log_verbose(self.env, f"run_nifti_insertion.py successfully executed for file {nifti_file_path}") self.inserted_file_count += 1 - # reset mri_upload to Inserting as run_nifti_insertion.py will set Inserting=0 after execution - self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) + # reset mri_upload to Inserting as run_nifti_insertion.py will set Inserting=False after execution + self.mri_upload.inserting = False + self.env.db.commit() else: print(stdout) log_verbose(self.env, f"run_nifti_insertion.py failed for file {nifti_file_path}.\n{stdout}") @@ -406,11 +408,14 @@ def _update_mri_upload(self): """ files_inserted_list = self.imaging_obj.files_db_obj.get_files_inserted_for_tarchive_id(self.dicom_archive.id) - self.imaging_upload_obj.update_mri_upload( - upload_id=self.upload_id, - fields=("Inserting", "InsertionComplete", "number_of_mincInserted", "number_of_mincCreated", "SessionID"), - values=("0", "1", len(files_inserted_list), len(self.nifti_files_to_insert), self.session.id) - ) + + # Update the MRI upload. + self.mri_upload.inserting = True + self.mri_upload.insertion_complete = True + self.mri_upload.number_of_minc_inserted = len(files_inserted_list) + self.mri_upload.number_of_minc_created = len(self.nifti_files_to_insert) + self.mri_upload.session = self.session + self.env.db.commit() def _get_summary_of_insertion(self): """ @@ -443,7 +448,7 @@ def _get_summary_of_insertion(self): excl_viol_list = ', '.join(excluded_violations_list) if excluded_violations_list else 0 summary = f""" - Finished processing UploadID {self.upload_id}! + Finished processing UploadID {self.mri_upload.id}! - DICOM archive info: {self.dicom_archive.id} => {self.tarchive_path} - {nb_files_inserted} files were inserted into the files table: {files_list} - {nb_prot_violation} files did not match any protocol: {prot_viol_list} diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_validation_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_validation_pipeline.py index 193680e86..e9d71f839 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/dicom_validation_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/dicom_validation_pipeline.py @@ -38,11 +38,12 @@ def __init__(self, loris_getopt_obj, script_name): # If we get here, the tarchive is validated & the script stops running so update mri_upload # --------------------------------------------------------------------------------------------- log_verbose(self.env, f"DICOM archive {self.options_dict['tarchive_path']['value']} is valid!") - self.imaging_upload_obj.update_mri_upload( - upload_id=self.upload_id, - fields=("isTarchiveValidated", "Inserting",), - values=("1", "0") - ) + + # Update the MRI upload. + self.mri_upload.is_dicom_archive_validated = True + self.mri_upload.inserting = False + self.env.db.commit() + self.remove_tmp_dir() # remove temporary directory sys.exit(lib.exitcode.SUCCESS) @@ -57,11 +58,10 @@ def _validate_dicom_archive_md5sum(self): dicom_archive_path = os.path.join(self.dicom_lib_dir, self.dicom_archive.archive_location) result = _validate_dicom_archive_md5sum(self.env, self.dicom_archive, dicom_archive_path) if not result: - self.imaging_upload_obj.update_mri_upload( - upload_id=self.upload_id, - fields=("isTarchiveValidated", "IsCandidateInfoValidated"), - values=("0", "0") - ) + # Update the MRI upload. + self.mri_upload.is_dicom_archive_validated = False + self.mri_upload.is_candidate_info_validated = False + self.env.db.commit() log_error_exit( self.env, diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py index 5a37d384e..0eb420518 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/nifti_insertion_pipeline.py @@ -59,7 +59,8 @@ def __init__(self, loris_getopt_obj, script_name): # --------------------------------------------------------------------------------------------- # Set 'Inserting' flag to 1 in mri_upload # --------------------------------------------------------------------------------------------- - self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) + self.mri_upload.inserting = True + self.env.db.commit() # --------------------------------------------------------------------------------------------- # Get S3 object from loris_getopt object @@ -217,7 +218,9 @@ def __init__(self, loris_getopt_obj, script_name): # --------------------------------------------------------------------------------------------- # If we get there, the insertion was complete and successful # --------------------------------------------------------------------------------------------- - self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('0',)) + self.mri_upload.inserting = False + self.env.db.commit() + sys.exit(lib.exitcode.SUCCESS) def _load_json_sidecar_file(self): @@ -730,7 +733,7 @@ def _run_push_to_s3_pipeline(self): push_to_s3_cmd = [ "run_push_imaging_files_to_s3_pipeline.py", "-p", self.options_dict["profile"]["value"], - "-u", str(self.upload_id), + "-u", str(self.mri_upload.id), ] if self.verbose: push_to_s3_cmd.append("-v") @@ -741,10 +744,10 @@ def _run_push_to_s3_pipeline(self): if s3_process.returncode == 0: log_verbose( self.env, - f"run_push_imaging_files_to_s3_pipeline.py successfully executed for Upload ID {self.upload_id}" + f"run_push_imaging_files_to_s3_pipeline.py successfully executed for Upload ID {self.mri_upload.id}" ) else: log_verbose( self.env, - f"run_push_imaging_files_to_s3_pipeline.py failed for Upload ID {self.upload_id}.\n{stdout}" + f"run_push_imaging_files_to_s3_pipeline.py failed for Upload ID {self.mri_upload.id}.\n{stdout}" ) diff --git a/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py b/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py index 4cabd74f0..0a8661d49 100644 --- a/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py +++ b/python/lib/dcm2bids_imaging_pipeline_lib/push_imaging_files_to_s3_pipeline.py @@ -34,7 +34,8 @@ def __init__(self, loris_getopt_obj, script_name): # --------------------------------------------------------------------------------------------- # Set 'Inserting' flag to 1 in mri_upload # --------------------------------------------------------------------------------------------- - self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('1',)) + self.mri_upload.inserting = True + self.env.db.commit() # --------------------------------------------------------------------------------------------- # Get S3 object from loris_getopt object @@ -68,7 +69,8 @@ def __init__(self, loris_getopt_obj, script_name): os.remove(full_path) self._clean_up_empty_folders() - self.imaging_upload_obj.update_mri_upload(upload_id=self.upload_id, fields=('Inserting',), values=('0',)) + self.mri_upload.inserting = False + self.env.db.commit() sys.exit(lib.exitcode.SUCCESS) def _get_files_to_push_list(self): diff --git a/python/lib/imaging_upload.py b/python/lib/imaging_upload.py index 00e5889cb..3e21587c8 100644 --- a/python/lib/imaging_upload.py +++ b/python/lib/imaging_upload.py @@ -1,10 +1,13 @@ """This class gather functions for mri upload handling.""" +from typing_extensions import deprecated + from lib.database_lib.mri_upload_db import MriUploadDB __license__ = "GPLv3" +@deprecated('Use `lib.db.models.mri_upload.DbMriUpload` instead') class ImagingUpload: """ This class gather functions that interact with the database and allow mri_upload @@ -41,6 +44,7 @@ def __init__(self, db, verbose): self.imaging_upload_dict = dict() + @deprecated('Use `lib.db.queries.mri_upload.try_get_mri_upload_with_id` instead') def create_imaging_upload_dict_from_upload_id(self, upload_id): """ Fill in the imaging upload dictionary with the information found for a given upload ID in the mri_upload table. @@ -52,6 +56,7 @@ def create_imaging_upload_dict_from_upload_id(self, upload_id): results = self.mri_upload_db_obj.create_mri_upload_dict('UploadID', upload_id) self.imaging_upload_dict = results[0] if results else None + @deprecated('Use `lib.db.models.dicom_archive.DbDicomArchive.mri_uploads` instead') def create_imaging_upload_dict_from_tarchive_id(self, tarchive_id): """ Fill in the imaging upload dictionary with information found for a given TarchiveID in the mri_upload table. @@ -73,6 +78,7 @@ def create_imaging_upload_dict_from_tarchive_id(self, tarchive_id): else: return False, f"Did not find an entry in mri_upload associated with 'TarchiveID' {tarchive_id}" + @deprecated('Use `lib.db.models.mri_upload.DbMriUpload` instead') def update_mri_upload(self, upload_id, fields, values): """ Calls the MriUpload database lib to update the mri_upload table. diff --git a/python/tests/integration/scripts/test_run_dicom_archive_loader.py b/python/tests/integration/scripts/test_run_dicom_archive_loader.py index c8563e477..b40b46ddd 100644 --- a/python/tests/integration/scripts/test_run_dicom_archive_loader.py +++ b/python/tests/integration/scripts/test_run_dicom_archive_loader.py @@ -42,5 +42,8 @@ def test(): # Check that the expected data has been inserted in the database mri_upload = get_mri_upload_with_patient_name(db, 'MTL001_300001_V2') + assert mri_upload.inserting is False + assert mri_upload.is_candidate_info_validated is True + assert mri_upload.is_dicom_archive_validated is True assert mri_upload.session is not None assert len(mri_upload.session.files) == 1