diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index 23d68dd64230..1b78f52ebcc8 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -1972,8 +1972,6 @@ def add(src, dest): dir_name = "datasets" dir_path = os.path.join(export_directory, dir_name) - dataset_hid = as_dict["hid"] - assert dataset_hid, as_dict if dataset.dataset.id in self.dataset_id_to_path: file_name, extra_files_path = self.dataset_id_to_path[dataset.dataset.id] @@ -1992,7 +1990,7 @@ def add(src, dest): self.serialization_options.get_identifier(self.security, conversion) if conversion else None ) target_filename = get_export_dataset_filename( - as_dict["name"], as_dict["extension"], dataset_hid, conversion_key=conversion_key + as_dict["name"], as_dict["extension"], as_dict["encoded_id"], conversion_key=conversion_key ) arcname = os.path.join(dir_name, target_filename) src = file_name @@ -2008,7 +2006,7 @@ def add(src, dest): if len(file_list): extra_files_target_filename = get_export_dataset_extra_files_dir_name( - as_dict["name"], as_dict["extension"], dataset_hid, conversion_key=conversion_key + as_dict["encoded_id"], conversion_key=conversion_key ) arcname = os.path.join(dir_name, extra_files_target_filename) add(extra_files_path, os.path.join(export_directory, arcname)) @@ -2973,22 +2971,22 @@ def tar_export_directory(export_directory: StrPath, out_file: StrPath, gzip: boo store_archive.add(os.path.join(export_directory, export_path), arcname=export_path) -def get_export_dataset_filename(name: str, ext: str, hid: int, conversion_key: Optional[str]) -> str: +def get_export_dataset_filename(name: str, ext: str, encoded_id: str, conversion_key: Optional[str]) -> str: """ Builds a filename for a dataset using its name an extension. """ base = "".join(c in FILENAME_VALID_CHARS and c or "_" for c in name) if not conversion_key: - return f"{base}_{hid}.{ext}" + return f"{base}_{encoded_id}.{ext}" else: - return f"{base}_{hid}_conversion_{conversion_key}.{ext}" + return f"{base}_{encoded_id}_conversion_{conversion_key}.{ext}" -def get_export_dataset_extra_files_dir_name(name: str, ext: str, hid: int, conversion_key: Optional[str]) -> str: +def get_export_dataset_extra_files_dir_name(encoded_id: str, conversion_key: Optional[str]) -> str: if not conversion_key: - return f"extra_files_path_{hid}" + return f"extra_files_path_{encoded_id}" else: - return f"extra_files_path_{hid}_conversion_{conversion_key}" + return f"extra_files_path_{encoded_id}_conversion_{conversion_key}" def imported_store_for_metadata( diff --git a/test/unit/data/model/test_model_store.py b/test/unit/data/model/test_model_store.py index 5e0163ce12bb..7fae0bdcbdd3 100644 --- a/test/unit/data/model/test_model_store.py +++ b/test/unit/data/model/test_model_store.py @@ -545,6 +545,21 @@ def validate_invocation_collection_crate_directory(crate_directory): assert dataset in root["hasPart"] +def test_export_history_with_missing_hid(): + # The dataset's hid was used to compose the file name during the export but it + # can be missing sometimes. We now use the dataset's encoded id instead. + app = _mock_app() + u, history, d1, d2, j = _setup_simple_cat_job(app) + + # Remove hid from d1 + d1.hid = None + app.commit() + + temp_directory = mkdtemp() + with store.DirectoryModelExportStore(temp_directory, app=app, export_files="copy") as export_store: + export_store.export_history(history) + + def test_export_history_to_ro_crate(tmp_path): app = _mock_app() u, history, d1, d2, j = _setup_simple_cat_job(app)