diff --git a/src/deadline/client/api/_submit_job_bundle.py b/src/deadline/client/api/_submit_job_bundle.py index 17a1ed305..90f93ca28 100644 --- a/src/deadline/client/api/_submit_job_bundle.py +++ b/src/deadline/client/api/_submit_job_bundle.py @@ -37,6 +37,7 @@ JobAttachmentsFileSystem, AssetRootGroup, AssetRootManifest, + AssetUploadGroup, JobAttachmentS3Settings, ) from ...job_attachments.progress_tracker import SummaryStatistics, ProgressReportMetadata @@ -58,11 +59,12 @@ def create_job_from_job_bundle( max_retries_per_task: Optional[int] = None, print_function_callback: Callable[[str], None] = lambda msg: None, decide_cancel_submission_callback: Callable[ - [dict[str, int], int, int], bool - ] = lambda paths, files, bytes: False, + [AssetUploadGroup], bool + ] = lambda upload_group: False, hashing_progress_callback: Optional[Callable[[ProgressReportMetadata], bool]] = None, upload_progress_callback: Optional[Callable[[ProgressReportMetadata], bool]] = None, create_job_result_callback: Optional[Callable[[], bool]] = None, + require_paths_exist: bool = False, ) -> Union[str, None]: """ Creates a job in the AWS Deadline Cloud farm/queue configured as default for the @@ -212,10 +214,15 @@ def create_job_from_job_bundle( if asset_references and "jobAttachmentSettings" in queue: # Extend input_filenames with all the files in the input_directories missing_directories: set[str] = set() - empty_directories: set[str] = set() for directory in asset_references.input_directories: if not os.path.isdir(directory): - missing_directories.add(directory) + if require_paths_exist: + missing_directories.add(directory) + else: + logger.info( + f"Input path '{directory}' does not exist. Adding to referenced paths." + ) + asset_references.referenced_paths.add(directory) continue is_dir_empty = True @@ -226,30 +233,21 @@ def create_job_from_job_bundle( asset_references.input_filenames.update( os.path.normpath(os.path.join(root, file)) for file in files ) + # Empty directories just become references since there's nothing to upload if is_dir_empty: - empty_directories.add(directory) + logger.info(f"Input directory '{directory}' is empty. Adding to referenced paths.") + asset_references.referenced_paths.add(directory) asset_references.input_directories.clear() - misconfigured_directories = missing_directories or empty_directories - if misconfigured_directories: - all_misconfigured_inputs = "" + if missing_directories: + all_missing_directories = "\n\t".join(sorted(list(missing_directories))) misconfigured_directories_msg = ( "Job submission contains misconfigured input directories and cannot be submitted." - " All input directories must exist and cannot be empty." + " All input directories must exist." + f"\nNon-existent directories:\n\t{all_missing_directories}" ) - if missing_directories: - missing_directory_list = sorted(list(missing_directories)) - all_missing_directories = "\n\t".join(missing_directory_list) - all_misconfigured_inputs += ( - f"\nNon-existent directories:\n\t{all_missing_directories}" - ) - if empty_directories: - empty_directory_list = sorted(list(empty_directories)) - all_empty_directories = "\n\t".join(empty_directory_list) - all_misconfigured_inputs += f"\nEmpty directories:\n\t{all_empty_directories}" - - raise MisconfiguredInputsError(misconfigured_directories_msg + all_misconfigured_inputs) + raise MisconfiguredInputsError(misconfigured_directories_msg) queue_role_session = api.get_queue_user_boto3_session( deadline=deadline, @@ -267,18 +265,14 @@ def create_job_from_job_bundle( ) upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=job_bundle_dir, input_paths=sorted(asset_references.input_filenames), output_paths=sorted(asset_references.output_directories), referenced_paths=sorted(asset_references.referenced_paths), storage_profile=storage_profile, + require_paths_exist=require_paths_exist, ) if upload_group.asset_groups: - if decide_cancel_submission_callback( - upload_group.num_outside_files_by_root, - upload_group.total_input_files, - upload_group.total_input_bytes, - ): + if decide_cancel_submission_callback(upload_group): print_function_callback("Job submission canceled.") return None diff --git a/src/deadline/client/cli/_groups/bundle_group.py b/src/deadline/client/cli/_groups/bundle_group.py index 0ec8b383a..1047c6cfb 100644 --- a/src/deadline/client/cli/_groups/bundle_group.py +++ b/src/deadline/client/cli/_groups/bundle_group.py @@ -13,13 +13,13 @@ from botocore.exceptions import ClientError from deadline.client import api -from deadline.client.config import set_setting +from deadline.client.config import config_file, get_setting, set_setting from deadline.job_attachments.exceptions import ( AssetSyncError, AssetSyncCancelledError, MisconfiguredInputsError, ) -from deadline.job_attachments.models import JobAttachmentsFileSystem +from deadline.job_attachments.models import AssetUploadGroup, JobAttachmentsFileSystem from deadline.job_attachments.progress_tracker import ProgressReportMetadata from deadline.job_attachments._utils import _human_readable_file_size @@ -91,6 +91,11 @@ def validate_parameters(ctx, param, value): is_flag=True, help="Skip any confirmation prompts", ) +@click.option( + "--require-paths-exist", + is_flag=True, + help="Require all input paths to exist", +) @click.argument("job_bundle_dir") @_handle_error def bundle_submit( @@ -102,6 +107,7 @@ def bundle_submit( priority, max_failed_tasks_count, max_retries_per_task, + require_paths_exist, **args, ): """ @@ -116,36 +122,54 @@ def bundle_submit( def _check_create_job_wait_canceled() -> bool: return sigint_handler.continue_operation - def _decide_cancel_submission( - deviated_file_count_by_root: dict[str, int], - num_files: int, - upload_size: int, - ): + def _decide_cancel_submission(upload_group: AssetUploadGroup) -> bool: + """ + Callback to decide if submission should be cancelled or not. Return 'True' to cancel. + Prints a warning that requires confirmation if paths are found outside of configured storage profile locations. + """ + warning_message = "" + for group in upload_group.asset_groups: + if not group.file_system_location_name: + warning_message += f"\n\nUnder the directory '{group.root_path}':" + warning_message += ( + f"\n\t{len(group.inputs)} input file{'' if len(group.inputs) == 1 else 's'}" + if len(group.inputs) > 0 + else "" + ) + warning_message += ( + f"\n\t{len(group.outputs)} output director{'y' if len(group.outputs) == 1 else 'ies'}" + if len(group.outputs) > 0 + else "" + ) + warning_message += ( + f"\n\t{len(group.references)} referenced file{'' if len(group.references) == 1 else 's'} and/or director{'y' if len(group.outputs) == 1 else 'ies'}" + if len(group.references) > 0 + else "" + ) + + # Exit early if there are no warnings and we've either set auto accept or there's no files to confirm + if not warning_message and ( + yes + or config_file.str2bool(get_setting("settings.auto_accept", config=config)) + or upload_group.total_input_files == 0 + ): + return False + message_text = ( - f"Job submission contains {num_files} files totaling {_human_readable_file_size(upload_size)}. " - " All files will be uploaded to S3 if they are not already present in the job attachments bucket." + f"Job submission contains {upload_group.total_input_files} input files totaling {_human_readable_file_size(upload_group.total_input_bytes)}. " + " All input files will be uploaded to S3 if they are not already present in the job attachments bucket." ) - if deviated_file_count_by_root: - root_by_count_message = "\n\n".join( - [ - f"{file_count} files from: '{directory}'" - for directory, file_count in deviated_file_count_by_root.items() - ] - ) + if warning_message: message_text += ( - f"\n\nFiles were found outside of the configured storage profile location(s). " - " Please confirm that you intend to upload files from the following directories:\n\n" - f"{root_by_count_message}\n\n" - "To permanently remove this warning you must only upload files located within a storage profile location." + f"\n\nFiles were specified outside of the configured storage profile location(s). " + " Please confirm that you intend to submit a job that uses files from the following directories:" + f"{warning_message}\n\n" + "To permanently remove this warning you must only use files located within a storage profile location." ) message_text += "\n\nDo you wish to proceed?" - return ( - not yes - and num_files > 0 - and not click.confirm( - message_text, - default=not deviated_file_count_by_root, - ) + return not click.confirm( + message_text, + default=not warning_message, ) try: @@ -163,6 +187,7 @@ def _decide_cancel_submission( create_job_result_callback=_check_create_job_wait_canceled, print_function_callback=click.echo, decide_cancel_submission_callback=_decide_cancel_submission, + require_paths_exist=require_paths_exist, ) # Check Whether the CLI options are modifying any of the default settings that affect diff --git a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py index b2fc4f33a..606b830c2 100644 --- a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py @@ -23,6 +23,7 @@ QGroupBox, QLabel, QMessageBox, + QPushButton, QProgressBar, QTextEdit, QVBoxLayout, @@ -52,7 +53,12 @@ split_parameter_args, ) from deadline.job_attachments.exceptions import AssetSyncCancelledError, MisconfiguredInputsError -from deadline.job_attachments.models import AssetRootGroup, AssetRootManifest, StorageProfile +from deadline.job_attachments.models import ( + AssetRootGroup, + AssetRootManifest, + AssetUploadGroup, + StorageProfile, +) from deadline.job_attachments.progress_tracker import ProgressReportMetadata, SummaryStatistics from deadline.job_attachments.upload import S3AssetManager from deadline.job_attachments._utils import _human_readable_file_size @@ -105,6 +111,7 @@ def start_submission( asset_manager: Optional[S3AssetManager], deadline_client: BaseClient, auto_accept: bool = False, + require_paths_exist: bool = False, ) -> Optional[Dict[str, Any]]: """ Starts a submission. Returns the response from calling create job. If an error occurs @@ -131,6 +138,7 @@ def start_submission( self._asset_manager = asset_manager self._deadline_client = deadline_client self._auto_accept = auto_accept + self._require_paths_exist = require_paths_exist self._start_submission() return self.exec_() @@ -240,10 +248,15 @@ def _start_submission(self): ): # Extend input_filenames with all the files in the input_directories missing_directories: set[str] = set() - empty_directories: set[str] = set() for directory in self.asset_references.input_directories: if not os.path.isdir(directory): - missing_directories.add(directory) + if self._require_paths_exist: + missing_directories.add(directory) + else: + logging.info( + f"Input directory '{directory}' does not exist. Adding to referenced paths." + ) + self.asset_references.referenced_paths.add(directory) continue is_dir_empty = True @@ -254,46 +267,33 @@ def _start_submission(self): self.asset_references.input_filenames.update( os.path.normpath(os.path.join(root, file)) for file in files ) + # Empty directories just become references since there's nothing to upload if is_dir_empty: - empty_directories.add(directory) + logging.info( + f"Input directory '{directory}' is empty. Adding to referenced paths." + ) + self.asset_references.referenced_paths.add(directory) self.asset_references.input_directories.clear() - misconfigured_directories = missing_directories or empty_directories - if misconfigured_directories: + if missing_directories: sample_size = 3 - sample_of_misconfigured_inputs = "" - all_misconfigured_inputs = "" misconfigured_directories_msg = ( "Job submission contains misconfigured input directories and cannot be submitted." - " All input directories must exist and cannot be empty." + " All input directories must exist." ) - if missing_directories: - missing_directory_list = sorted(list(missing_directories)) - sample_of_missing_directories = "\n\t".join( - missing_directory_list[:sample_size] - ) - sample_of_misconfigured_inputs += ( - f"\nNon-existent directories:\n\t{sample_of_missing_directories}\n" - ) - all_missing_directories = "\n\t".join(missing_directory_list) - all_misconfigured_inputs += ( - f"\nNon-existent directories:\n\t{all_missing_directories}" - ) - if empty_directories: - empty_directory_list = sorted(list(empty_directories)) - sample_of_empty_directories = "\n\t".join(empty_directory_list[:sample_size]) - sample_of_misconfigured_inputs += ( - f"\nEmpty directories:\n\t{sample_of_empty_directories}" - ) - all_empty_directories = "\n\t".join(empty_directory_list) - all_misconfigured_inputs += f"\nEmpty directories:\n\t{all_empty_directories}" + missing_directory_list = sorted(list(missing_directories)) + sample_of_missing_directories = "\n\t".join(missing_directory_list[:sample_size]) + sample_of_misconfigured_inputs = ( + f"\nNon-existent directories:\n\t{sample_of_missing_directories}\n" + ) + all_missing_directories = "\n\t".join(missing_directory_list) + all_misconfigured_inputs = ( + f"\nNon-existent directories:\n\t{all_missing_directories}" + ) logging.error(misconfigured_directories_msg + all_misconfigured_inputs) - just_a_sample = ( - len(missing_directories) > sample_size or len(empty_directories) > sample_size - ) - if just_a_sample: + if len(missing_directories) > sample_size: misconfigured_directories_msg += ( " Check logs for all occurrences, here's a sample:\n" ) @@ -302,19 +302,15 @@ def _start_submission(self): raise MisconfiguredInputsError(misconfigured_directories_msg) upload_group = self._asset_manager.prepare_paths_for_upload( - job_bundle_path=self._job_bundle_dir, input_paths=sorted(self.asset_references.input_filenames), output_paths=sorted(self.asset_references.output_directories), referenced_paths=sorted(self.asset_references.referenced_paths), storage_profile=self._storage_profile, + require_paths_exist=self._require_paths_exist, ) # If we find any Job Attachments, start a background thread if upload_group.asset_groups: - if not self._confirm_asset_references_outside_storage_profile( - upload_group.num_outside_files_by_root, - upload_group.total_input_files, - upload_group.total_input_bytes, - ): + if not self._confirm_asset_references_outside_storage_profile(upload_group): raise UserInitiatedCancel("Submission canceled.") self._start_hashing( @@ -595,38 +591,62 @@ def handle_thread_exception(self, e: BaseException) -> None: logger.error(str(e)) def _confirm_asset_references_outside_storage_profile( - self, - deviated_file_count_by_root: dict[str, int], - num_files: int, - upload_size: int, + self, upload_group: AssetUploadGroup ) -> bool: """ Creates a dialog to prompt the user to confirm that they want to proceed with uploading when files were found outside of the configured storage profile locations. """ - message_box = QMessageBox(self) message_text = ( - f"Job submission contains {num_files} files totaling {_human_readable_file_size(upload_size)}. " - " All files will be uploaded to S3 if they are not already present in the job attachments bucket." + f"Job submission contains {upload_group.total_input_files} input files totaling {_human_readable_file_size(upload_group.total_input_bytes)}. " + " All input files will be uploaded to S3 if they are not already present in the job attachments bucket." ) - if deviated_file_count_by_root: - root_by_count_message = "\n\n".join( - [ - f"{file_count} files from: '{directory}'" - for directory, file_count in deviated_file_count_by_root.items() - ] - ) + warning_message = "" + for group in upload_group.asset_groups: + if not group.file_system_location_name: + warning_message += f"\n\nUnder the directory '{group.root_path}':" + warning_message += ( + f"\n\t{len(group.inputs)} input file{'' if len(group.inputs) == 1 else 's'}" + if len(group.inputs) > 0 + else "" + ) + warning_message += ( + f"\n\t{len(group.outputs)} output director{'y' if len(group.outputs) == 1 else 'ies'}" + if len(group.outputs) > 0 + else "" + ) + warning_message += ( + f"\n\t{len(group.references)} referenced file{'' if len(group.references) == 1 else 's'} and/or director{'y' if len(group.outputs) == 1 else 'ies'}" + if len(group.references) > 0 + else "" + ) + + # Exit early if we've set auto accept and there are no warnings + if not warning_message and self._auto_accept: + return True + + # Build the UI + message_box = QMessageBox(self) + if warning_message: message_text += ( - f"\n\nFiles were found outside of the configured storage profile location(s). " - " Please confirm that you intend to upload files from the following directories:\n\n" - f"{root_by_count_message}\n\n" - "To remove this warning you must only upload files located within a storage profile location." + f"\n\nFiles were specified outside of the configured storage profile location(s). " + " Please confirm that you intend to submit a job that uses files from the following directories:" + f"{warning_message}\n\n" + "To permanently remove this warning you must only use files located within a storage profile location." ) message_box.setIcon(QMessageBox.Warning) + message_box.setText(message_text) message_box.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel) message_box.setDefaultButton(QMessageBox.Ok) + if not warning_message: + # If we don't have any warnings, add the "Do not ask again" button that acts like 'OK' but sets the config + # setting to always auto-accept similar prompts in the future. + dont_ask_button = QPushButton("Do not ask again", self) + dont_ask_button.clicked.connect(lambda: set_setting("settings.auto_accept", "true")) + message_box.addButton(dont_ask_button, QMessageBox.ActionRole) + message_box.setWindowTitle("Job Attachments Valid Files Confirmation") selection = message_box.exec() diff --git a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py index 2e1b18a85..394ee8d0e 100644 --- a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py @@ -475,6 +475,7 @@ def on_submit(self): asset_manager, deadline, auto_accept=str2bool(get_setting("settings.auto_accept")), + require_paths_exist=self.job_attachments.get_require_paths_exist(), ) except UserInitiatedCancel as uic: logger.info("Canceling submission.") diff --git a/src/deadline/client/ui/widgets/job_attachments_tab.py b/src/deadline/client/ui/widgets/job_attachments_tab.py index 7026c83da..3ef095d60 100644 --- a/src/deadline/client/ui/widgets/job_attachments_tab.py +++ b/src/deadline/client/ui/widgets/job_attachments_tab.py @@ -58,11 +58,19 @@ def __init__( def _build_ui(self) -> None: tab_layout = QVBoxLayout(self) + # Create a group box for general settings + size_policy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding) + size_policy.setVerticalStretch(10) + self.general_group = QGroupBox("General Submission Settings", self) + tab_layout.addWidget(self.general_group) + self.general_group.setSizePolicy(size_policy) + general_layout = QVBoxLayout(self.general_group) + # Create a group box for each type of attachment self.input_files_group = QGroupBox("Attach Input Files", self) - tab_layout.addWidget(self.input_files_group) size_policy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding) size_policy.setVerticalStretch(80) + tab_layout.addWidget(self.input_files_group) self.input_files_group.setSizePolicy(size_policy) input_files_layout = QVBoxLayout(self.input_files_group) @@ -80,6 +88,10 @@ def _build_ui(self) -> None: tab_layout.addWidget(self.output_directories_group) output_directories_layout = QVBoxLayout(self.output_directories_group) + # General settings + self.general_settings = JobAttachmentsGeneralWidget(self) + general_layout.addWidget(self.general_settings) + # The "Attach Input Files" attachments self.input_files_controls = JobAttachmentsControlsWidget(self) self.input_files_controls.show_auto_detected.stateChanged.connect( @@ -308,6 +320,27 @@ def get_asset_references(self) -> AssetReferences: """ return self.auto_detected_attachments.union(self.attachments) + def get_require_paths_exist(self) -> bool: + """ + Returns the checkbox value of whether to allow empty paths or not. + """ + return self.general_settings.require_paths_exist.isChecked() + + +class JobAttachmentsGeneralWidget(QWidget): + """ + A Widget that contains general settings for a specific submission. + """ + + def __init__(self, parent=None) -> None: + super().__init__(parent=parent) + + layout = QHBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + + self.require_paths_exist = QCheckBox("Require All Input Paths Exist", parent=self) + layout.addWidget(self.require_paths_exist) + class JobAttachmentsControlsWidget(QWidget): """ diff --git a/src/deadline/job_attachments/models.py b/src/deadline/job_attachments/models.py index baaca9c78..63f48172e 100644 --- a/src/deadline/job_attachments/models.py +++ b/src/deadline/job_attachments/models.py @@ -55,7 +55,6 @@ class AssetUploadGroup: """Represents all of the information needed to prepare to upload assets""" asset_groups: List[AssetRootGroup] = field(default_factory=list) - num_outside_files_by_root: dict[str, int] = field(default_factory=dict) total_input_files: int = 0 total_input_bytes: int = 0 diff --git a/src/deadline/job_attachments/upload.py b/src/deadline/job_attachments/upload.py index e71d1ab4a..410c9f290 100644 --- a/src/deadline/job_attachments/upload.py +++ b/src/deadline/job_attachments/upload.py @@ -824,6 +824,7 @@ def _get_asset_groups( referenced_paths: set[str], local_type_locations: dict[str, str] = {}, shared_type_locations: dict[str, str] = {}, + require_paths_exist: bool = False, ) -> list[AssetRootGroup]: """ For the given input paths and output paths, a list of groups is returned, where paths sharing @@ -847,7 +848,13 @@ def _get_asset_groups( # Need to use absolute to not resolve symlinks, but need normpath to get rid of relative paths, i.e. '..' abs_path = Path(os.path.normpath(Path(_path).absolute())) if not abs_path.exists(): - missing_input_paths.add(abs_path) + if require_paths_exist: + missing_input_paths.add(abs_path) + else: + logger.info( + f"Input path '{_path}' resolving to '{abs_path}' does not exist. Adding to referenced paths." + ) + referenced_paths.add(_path) continue if abs_path.is_dir(): misconfigured_directories.add(abs_path) @@ -1042,30 +1049,13 @@ def _get_file_system_locations_by_type( shared_type_locations[fs_loc.path] = fs_loc.name return local_type_locations, shared_type_locations - def _get_deviated_file_count_by_root( - self, groups: list[AssetRootGroup], root_path: str - ) -> dict[str, int]: - """ - Given a list of AssetRootGroups and a root directory, return a dict of root paths and file counts that - are outside of that root path, and aren't in any storage profile locations. - """ - real_root_path = Path(root_path).resolve() - deviated_file_count_by_root: dict[str, int] = {} - for group in groups: - if ( - not str(Path(group.root_path).resolve()).startswith(str(real_root_path)) - and not group.file_system_location_name - and group.inputs - ): - deviated_file_count_by_root[group.root_path] = len(group.inputs) - return deviated_file_count_by_root - def _group_asset_paths( self, input_paths: list[str], output_paths: list[str], referenced_paths: list[str], storage_profile: Optional[StorageProfile] = None, + require_paths_exist: bool = False, ) -> list[AssetRootGroup]: """ Resolves all of the paths that will be uploaded, sorting by storage profile location. @@ -1085,17 +1075,18 @@ def _group_asset_paths( {rf_path for rf_path in referenced_paths if rf_path}, local_type_locations, shared_type_locations, + require_paths_exist, ) return asset_groups def prepare_paths_for_upload( self, - job_bundle_path: str, input_paths: list[str], output_paths: list[str], referenced_paths: list[str], storage_profile: Optional[StorageProfile] = None, + require_paths_exist: bool = False, ) -> AssetUploadGroup: """ Processes all of the paths required for upload, grouping them by asset root and local storage profile locations. @@ -1103,15 +1094,15 @@ def prepare_paths_for_upload( for files that were not under the root path or any local storage profile locations. """ asset_groups = self._group_asset_paths( - input_paths, output_paths, referenced_paths, storage_profile + input_paths, + output_paths, + referenced_paths, + storage_profile, + require_paths_exist, ) (input_file_count, input_bytes) = self._get_total_input_size_from_asset_group(asset_groups) - num_outside_files_by_bundle_path = self._get_deviated_file_count_by_root( - asset_groups, job_bundle_path - ) return AssetUploadGroup( asset_groups=asset_groups, - num_outside_files_by_root=num_outside_files_by_bundle_path, total_input_files=input_file_count, total_input_bytes=input_bytes, ) diff --git a/test/integ/deadline_job_attachments/test_job_attachments.py b/test/integ/deadline_job_attachments/test_job_attachments.py index 503e59a5b..09acb4a22 100644 --- a/test/integ/deadline_job_attachments/test_job_attachments.py +++ b/test/integ/deadline_job_attachments/test_job_attachments.py @@ -147,7 +147,6 @@ def upload_input_files_assets_not_in_cas(job_attachment_test: JobAttachmentTest) # WHEN upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(job_attachment_test.ASSET_ROOT), input_paths=[str(job_attachment_test.SCENE_MA_PATH)], output_paths=[str(job_attachment_test.OUTPUT_PATH)], referenced_paths=[], @@ -225,7 +224,6 @@ def upload_input_files_one_asset_in_cas( # WHEN upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(job_attachment_test.ASSET_ROOT), input_paths=input_paths, output_paths=[str(job_attachment_test.OUTPUT_PATH)], referenced_paths=[], @@ -321,7 +319,6 @@ def test_upload_input_files_all_assets_in_cas( # WHEN upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(job_attachment_test.ASSET_ROOT), input_paths=input_paths, output_paths=[str(job_attachment_test.OUTPUT_PATH)], referenced_paths=[], @@ -1153,7 +1150,6 @@ def upload_input_files_no_input_paths( # WHEN upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(job_attachment_test.ASSET_ROOT), input_paths=[], output_paths=[str(job_attachment_test.OUTPUT_PATH)], referenced_paths=[], @@ -1221,7 +1217,6 @@ def test_upload_input_files_no_download_paths(job_attachment_test: JobAttachment # WHEN upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(job_attachment_test.ASSET_ROOT), input_paths=[str(job_attachment_test.SCENE_MA_PATH)], output_paths=[], referenced_paths=[], @@ -1337,7 +1332,6 @@ def test_upload_bucket_wrong_account(external_bucket: str, job_attachment_test: ): # The attempt to upload the asset manifest should be blocked. upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(job_attachment_test.ASSET_ROOT), input_paths=[str(job_attachment_test.SCENE_MA_PATH)], output_paths=[str(job_attachment_test.OUTPUT_PATH)], referenced_paths=[], diff --git a/test/unit/deadline_client/api/test_job_bundle_submission.py b/test/unit/deadline_client/api/test_job_bundle_submission.py index bd095be1a..36d771206 100644 --- a/test/unit/deadline_client/api/test_job_bundle_submission.py +++ b/test/unit/deadline_client/api/test_job_bundle_submission.py @@ -6,6 +6,7 @@ import json import os +from logging import INFO from pathlib import Path from typing import Any, Dict, Tuple from unittest.mock import ANY, patch, Mock @@ -552,7 +553,6 @@ def fake_print_callback(msg: str) -> None: ) mock_prepare_paths.assert_called_once_with( - job_bundle_path=temp_job_bundle_dir, input_paths=sorted( [ os.path.join(temp_assets_dir, "asset-1.txt"), @@ -563,6 +563,7 @@ def fake_print_callback(msg: str) -> None: output_paths=[os.path.join(temp_assets_dir, "somedir")], referenced_paths=[], storage_profile=MOCK_STORAGE_PROFILE, + require_paths_exist=False, ) mock_hash_attachments.assert_called_once_with( asset_manager=ANY, @@ -670,7 +671,6 @@ def fake_print_callback(msg: str) -> None: ) mock_prepare_paths.assert_called_once_with( - job_bundle_path=temp_job_bundle_dir, input_paths=sorted( [ os.path.join(temp_assets_dir, "asset-1.txt"), @@ -681,6 +681,7 @@ def fake_print_callback(msg: str) -> None: output_paths=[os.path.join(temp_assets_dir, "somedir")], referenced_paths=[], storage_profile=MOCK_STORAGE_PROFILE, + require_paths_exist=False, ) mock_hash_attachments.assert_not_called() mock_upload_assets.assert_not_called() @@ -801,20 +802,23 @@ def test_create_job_from_job_bundle_partially_empty_directories( def test_create_job_from_job_bundle_misconfigured_directories( - fresh_deadline_config, temp_job_bundle_dir + fresh_deadline_config, temp_job_bundle_dir, caplog ): """ - Test a job bundle with input directories that do not exist, or are empty + Test that a submitting a job with the `require_paths_exist` flag set to true + with a job bundle with input directories that do not exist throws an error. + Also confirms that empty directories as logged and added to referenced paths. """ job_template_type, job_template = MOCK_JOB_TEMPLATE_CASES["MINIMAL_JSON"] temp_bundle_dir_as_path = Path(temp_job_bundle_dir) - missing_directory = str(temp_bundle_dir_as_path / "does" / "not" / "exist") + missing_directory = str(temp_bundle_dir_as_path / "does" / "not" / "exist" / "bad_path") empty_directory = str(temp_bundle_dir_as_path / "empty_dir") Path(empty_directory).mkdir() - with patch.object(api._session, "get_boto3_session"), patch.object( + with patch.object(_submit_job_bundle.api, "get_boto3_session"), patch.object( _submit_job_bundle.api, "get_boto3_client" - ) as client_mock: + ) as client_mock, patch.object(_submit_job_bundle.api, "get_queue_user_boto3_session"): + caplog.set_level(INFO) client_mock().get_queue.side_effect = [MOCK_GET_QUEUE_RESPONSE] config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) @@ -838,30 +842,37 @@ def test_create_job_from_job_bundle_misconfigured_directories( json.dump({"assetReferences": asset_references}, f) # WHEN / THEN - with pytest.raises( - MisconfiguredInputsError, match=f"{missing_directory}|{empty_directory}" - ): + with pytest.raises(MisconfiguredInputsError) as execinfo: api.create_job_from_job_bundle( job_bundle_dir=temp_job_bundle_dir, queue_parameter_definitions=[], + require_paths_exist=True, ) + assert "bad_path" in str(execinfo) + assert "empty_dir" not in str(execinfo) + assert "empty_dir' is empty. Adding to referenced paths." in caplog.text + def test_create_job_from_job_bundle_misconfigured_input_files( - fresh_deadline_config, temp_job_bundle_dir + fresh_deadline_config, temp_job_bundle_dir, caplog ): """ - Test a job bundle with input files that do not exist, or are actually a directory + Test that a submitting a job without the `require_paths_exist` flag set, + with a job bundle with input directories that do not exist does not include those + directories in the warning message, but DOES incldue misconfigured directories that + were specified as files, which should result in an error. """ job_template_type, job_template = MOCK_JOB_TEMPLATE_CASES["MINIMAL_JSON"] temp_bundle_dir_as_path = Path(temp_job_bundle_dir) missing_file = str(temp_bundle_dir_as_path / "does" / "not" / "exist.png") - directory_pretending_to_be_file = str(temp_bundle_dir_as_path / "not a file") + directory_pretending_to_be_file = str(temp_bundle_dir_as_path / "sneaky_bad_not_file") Path(directory_pretending_to_be_file).mkdir() with patch.object(api._session, "get_boto3_session"), patch.object( _submit_job_bundle.api, "get_boto3_client" ) as client_mock, patch.object(_submit_job_bundle.api, "get_queue_user_boto3_session"): + caplog.set_level(INFO) client_mock().get_queue.side_effect = [MOCK_GET_QUEUE_RESPONSE] config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) @@ -888,14 +899,16 @@ def test_create_job_from_job_bundle_misconfigured_input_files( json.dump({"assetReferences": asset_references}, f) # WHEN / THEN - with pytest.raises( - MisconfiguredInputsError, match=rf"{missing_file}|{directory_pretending_to_be_file}" - ): + with pytest.raises(MisconfiguredInputsError) as execinfo: api.create_job_from_job_bundle( job_bundle_dir=temp_job_bundle_dir, queue_parameter_definitions=[], ) + assert "sneaky_bad_not_file" in str(execinfo) + assert "exist.png" not in str(execinfo) + assert "exist.png' does not exist. Adding to referenced paths." in caplog.text + def test_create_job_from_job_bundle_with_single_asset_file( fresh_deadline_config, temp_job_bundle_dir, temp_assets_dir @@ -990,11 +1003,11 @@ def fake_print_callback(msg: str) -> None: ) mock_prepare_paths.assert_called_once_with( - job_bundle_path=temp_job_bundle_dir, input_paths=[os.path.join(temp_assets_dir, "asset-1.txt")], output_paths=[], referenced_paths=[], storage_profile=MOCK_STORAGE_PROFILE, + require_paths_exist=False, ) mock_hash_attachments.assert_called_once_with( asset_manager=ANY, diff --git a/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py b/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py index 91b3a67c6..3e5ea3a34 100644 --- a/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py +++ b/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py @@ -376,11 +376,11 @@ def test_create_job_from_job_bundle_with_all_asset_ref_variants( ] ) mock_prepare_paths.assert_called_once_with( - job_bundle_path=temp_job_bundle_dir, input_paths=input_paths, output_paths=output_paths, referenced_paths=referenced_paths, storage_profile=None, + require_paths_exist=False, ) mock_hash_assets.assert_called_once_with( asset_groups=[AssetRootGroup()], diff --git a/test/unit/deadline_client/cli/test_cli_bundle.py b/test/unit/deadline_client/cli/test_cli_bundle.py index 4b72b85a9..487843ae2 100644 --- a/test/unit/deadline_client/cli/test_cli_bundle.py +++ b/test/unit/deadline_client/cli/test_cli_bundle.py @@ -18,6 +18,7 @@ from deadline.client.cli._groups import bundle_group from deadline.client.api import _submit_job_bundle from deadline.client.config.config_file import set_setting +from deadline.job_attachments.upload import S3AssetManager from deadline.job_attachments.models import JobAttachmentsFileSystem from deadline.job_attachments.progress_tracker import SummaryStatistics @@ -326,6 +327,9 @@ def test_cli_bundle_asset_load_method(fresh_deadline_config, temp_job_bundle_dir } json.dump(data, f) + upload_group_mock = Mock() + upload_group_mock.asset_groups = [Mock()] + upload_group_mock.total_input_files = 0 attachment_mock = Mock() attachment_mock.total_bytes = 0 attachment_mock.total_files.return_value = 0 @@ -344,6 +348,8 @@ def test_cli_bundle_asset_load_method(fresh_deadline_config, temp_job_bundle_dir _submit_job_bundle.api, "get_queue_user_boto3_session" ), patch.object( bundle_group.api, "get_deadline_cloud_library_telemetry_client" + ), patch.object( + S3AssetManager, "prepare_paths_for_upload", return_value=upload_group_mock ): bundle_boto3_client_mock().create_job.return_value = MOCK_CREATE_JOB_RESPONSE bundle_boto3_client_mock().get_job.return_value = MOCK_GET_JOB_RESPONSE diff --git a/test/unit/deadline_job_attachments/test_upload.py b/test/unit/deadline_job_attachments/test_upload.py index 3e46569db..2129ce451 100644 --- a/test/unit/deadline_job_attachments/test_upload.py +++ b/test/unit/deadline_job_attachments/test_upload.py @@ -173,7 +173,6 @@ def test_asset_management( # When upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(asset_root), input_paths=[ str(scene_file), str(texture_file), @@ -210,7 +209,7 @@ def test_asset_management( ) # Then - assert not upload_group.num_outside_files_by_root # Shouldn't be any outside files + # TODO: need to mock out Storage Profiles and confirm no empty filesystem locations expected_attachments = Attachments( manifests=[ ManifestProperties( @@ -336,6 +335,7 @@ def test_asset_management_windows_multi_root( input_c = root_c.join("input.txt") input_c.write("a") os.utime(input_c, (1234, 1234)) + root_d = r"D:\my\awesome" input_d = r"D:\my\awesome\input2.txt" # doesn't exist, shouldn't get included output_d = r"D:\my\awesome\outputdir" cache_dir = tmpdir.mkdir("cache") @@ -364,7 +364,6 @@ def test_asset_management_windows_multi_root( # When upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(root_c), input_paths=[input_c, input_d], output_paths=[output_d], referenced_paths=[], @@ -397,9 +396,9 @@ def test_asset_management_windows_multi_root( outputRelativeDirectories=[], ), ManifestProperties( - rootPath=output_d, + rootPath=root_d, rootPathFormat=PathFormat.WINDOWS, - outputRelativeDirectories=["."], + outputRelativeDirectories=["outputdir"], ), ], ) @@ -416,10 +415,10 @@ def test_asset_management_windows_multi_root( "inputManifestHash": "manifesthash", }, { - "rootPath": f"{output_d}", + "rootPath": f"{root_d}", "rootPathFormat": PathFormat("windows").value, "outputRelativeDirectories": [ - ".", + "outputdir", ], }, ], @@ -540,7 +539,6 @@ def test_asset_management_many_inputs( # When upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(asset_root), input_paths=input_files, output_paths=[str(Path(asset_root).joinpath("outputs"))], referenced_paths=[], @@ -697,7 +695,6 @@ def test_asset_management_many_inputs_with_same_hash( # When upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(asset_root), input_paths=input_files, output_paths=[str(Path(asset_root).joinpath("outputs"))], referenced_paths=[], @@ -821,7 +818,6 @@ def mock_hash_file(file_path: str, hash_alg: HashAlgorithm): # When upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(tmpdir), input_paths=[already_uploaded_file, not_yet_uploaded_file], output_paths=[], referenced_paths=[], @@ -965,7 +961,6 @@ def test_asset_management_no_outputs_large_number_of_inputs_already_uploaded( # When upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(tmpdir), input_paths=input_files, output_paths=[], referenced_paths=[], @@ -1075,7 +1070,6 @@ def test_asset_management_no_inputs( # When upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(tmpdir), input_paths=[], output_paths=[output_dir], referenced_paths=[], @@ -1266,7 +1260,6 @@ def test_asset_management_manifest_version_not_implemented(self, farm_id, queue_ test_file = tmpdir.join("test.txt") test_file.write("test") upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(tmpdir), input_paths=[test_file], output_paths=[], referenced_paths=[], @@ -1688,11 +1681,8 @@ def test_asset_management_input_not_exists(self, farm_id, queue_id, tmpdir, capl ) # WHEN / THEN - with pytest.raises( - MisconfiguredInputsError, match=f"{input_not_exist}|{directory_as_file}" - ): + with pytest.raises(MisconfiguredInputsError, match="scene"): asset_manager.prepare_paths_for_upload( - job_bundle_path=str(asset_root), input_paths=[input_not_exist, directory_as_file, scene_file], output_paths=[str(Path(asset_root).joinpath("outputs"))], referenced_paths=[], @@ -1774,7 +1764,6 @@ def test_manage_assets_with_symlinks( ) upload_group = asset_manager.prepare_paths_for_upload( - job_bundle_path=str(tmpdir), input_paths=[str(symlink_input_path)], output_paths=[str(symlink_output_path)], referenced_paths=[],