diff --git a/tests/fixtures/failed_job_state.json b/tests/fixtures/failed_job_state.json index 64f84cca..a9abd53a 100644 --- a/tests/fixtures/failed_job_state.json +++ b/tests/fixtures/failed_job_state.json @@ -208,7 +208,7 @@ }, "activity_id": "nmdc:wfmag-11-g7msr323.1", "last_status": "Failed", - "done": true, + "done": false, "failed_count": 2, "start": "2024-09-16T19:33:32.562412+00:00", "end": "2024-09-16T21:52:12.873101+00:00", diff --git a/tests/fixtures/mags_workflow_state.json b/tests/fixtures/mags_workflow_state.json index 7cf173f0..f631ecca 100644 --- a/tests/fixtures/mags_workflow_state.json +++ b/tests/fixtures/mags_workflow_state.json @@ -208,7 +208,7 @@ }, "activity_id": "nmdc:wfmag-11-g7msr323.1", "last_status": "Succeeded", - "done": true, + "done": false, "failed_count": 0, "start": "2024-09-16T19:33:32.562412+00:00", "end": "2024-09-16T21:52:12.873101+00:00", diff --git a/tests/test_watch_nmdc.py b/tests/test_watch_nmdc.py index 98ea78a5..c4bd7106 100644 --- a/tests/test_watch_nmdc.py +++ b/tests/test_watch_nmdc.py @@ -264,11 +264,13 @@ def test_job_manager_prepare_and_cache_new_job_force(site_config, initial_state_ def test_job_manager_get_finished_jobs(site_config, initial_state_file, fixtures_dir): + # Mock the URL and response + # Arrange - initial state has 1 failure and is not done fh = FileHandler(site_config, initial_state_file) jm = JobManager(site_config, fh) - # Add a job to the cache - mags is done and successful + # Add a finished job: finished job is not done, but has a last_status of Succeeded new_job_state = json.load(open(fixtures_dir / "mags_workflow_state.json")) assert new_job_state new_job = WorkflowJob(site_config, new_job_state) @@ -295,22 +297,29 @@ def test_job_manager_get_finished_jobs(site_config, initial_state_file, fixtures def test_job_manager_process_successful_job(site_config, initial_state_file, fixtures_dir): - # Arrange - fh = FileHandler(site_config, initial_state_file) - jm = JobManager(site_config, fh) - new_job_state = json.load(open(fixtures_dir / "mags_workflow_state.json")) - assert new_job_state - new_job = WorkflowJob(site_config, new_job_state) - jm.job_cache.append(new_job) - # Act - db = jm.process_successful_job(new_job) - # Assert - assert db - assert isinstance(db, Database) - assert new_job.done - assert new_job.job_status == "Succeeded" - # cleanup - jm.job_cache = [] + # mock job.job.get_job_metadata - use fixture cromwell/succeded_metadata.json + job_metadata = json.load(open(fixtures_dir / "mags_job_metadata.json")) + with patch("nmdc_automation.workflow_automation.wfutils.CromwellRunner.get_job_metadata") as mock_get_metadata: + mock_get_metadata.return_value = job_metadata + + + + # Arrange + fh = FileHandler(site_config, initial_state_file) + jm = JobManager(site_config, fh) + new_job_state = json.load(open(fixtures_dir / "mags_workflow_state.json")) + assert new_job_state + new_job = WorkflowJob(site_config, new_job_state) + jm.job_cache.append(new_job) + # Act + db = jm.process_successful_job(new_job) + # Assert + assert db + assert isinstance(db, Database) + assert new_job.done + assert new_job.job_status == "Succeeded" + # cleanup + jm.job_cache = [] def test_job_manager_process_failed_job(site_config, initial_state_file, fixtures_dir): diff --git a/tests/test_wfutils.py b/tests/test_wfutils.py index 6e05344f..37ed404a 100644 --- a/tests/test_wfutils.py +++ b/tests/test_wfutils.py @@ -13,8 +13,18 @@ import requests import tempfile from unittest import mock +import importlib.resources +import yaml +from functools import lru_cache +import linkml.validator +from linkml_runtime.dumpers import yaml_dumper +@lru_cache(maxsize=None) +def get_nmdc_materialized(): + with importlib.resources.open_text("nmdc_schema", "nmdc_materialized_patterns.yaml") as f: + return yaml.safe_load(f) + def test_workflow_job(site_config, fixtures_dir): workflow_state = json.load(open(fixtures_dir / "mags_workflow_state.json")) @@ -75,9 +85,6 @@ def test_cromwell_job_runner_get_job_metadata(site_config, fixtures_dir, mock_cr assert job_runner.metadata == metadata - - - def test_workflow_job_as_workflow_execution_dict(site_config, fixtures_dir): workflow_state = json.load(open(fixtures_dir / "mags_workflow_state.json")) job_metadata = json.load(open(fixtures_dir / "mags_job_metadata.json")) @@ -275,8 +282,7 @@ def test_workflow_job_data_objects_and_execution_record_mags(site_config, fixtur assert data_objects for data_object in data_objects: assert isinstance(data_object, DataObject) - wfe_dict = job.make_workflow_execution_record(data_objects) - wfe = workflow_process_factory(wfe_dict) + wfe = job.make_workflow_execution(data_objects) assert isinstance(wfe, MagsAnalysis) # attributes from final_stats_json assert wfe.mags_list @@ -286,7 +292,9 @@ def test_workflow_job_data_objects_and_execution_record_mags(site_config, fixtur assert mag.eukaryotic_evaluation assert isinstance(mag.eukaryotic_evaluation, EukEval) assert mag.eukaryotic_evaluation.completeness + assert isinstance(mag.eukaryotic_evaluation.completeness, float) assert mag.eukaryotic_evaluation.contamination + assert isinstance(mag.eukaryotic_evaluation.contamination, float) assert mag.eukaryotic_evaluation.ncbi_lineage assert mag.eukaryotic_evaluation.ncbi_lineage # check that the other final_stats props are there