diff --git a/nchs_mortality/delphi_nchs_mortality/constants.py b/nchs_mortality/delphi_nchs_mortality/constants.py index 4e8cdc144..800444e58 100644 --- a/nchs_mortality/delphi_nchs_mortality/constants.py +++ b/nchs_mortality/delphi_nchs_mortality/constants.py @@ -25,7 +25,6 @@ "prop" ] INCIDENCE_BASE = 100000 -GEO_RES = ["state", "nation"] # this is necessary as a delimiter in the f-string expressions we use to # construct detailed error reports diff --git a/nchs_mortality/delphi_nchs_mortality/run.py b/nchs_mortality/delphi_nchs_mortality/run.py index 05d6319a4..01ea0c8f5 100644 --- a/nchs_mortality/delphi_nchs_mortality/run.py +++ b/nchs_mortality/delphi_nchs_mortality/run.py @@ -14,7 +14,7 @@ from .archive_diffs import arch_diffs from .constants import (METRICS, SENSOR_NAME_MAP, - SENSORS, INCIDENCE_BASE, GEO_RES) + SENSORS, INCIDENCE_BASE) from .pull import pull_nchs_mortality_data @@ -87,31 +87,29 @@ def run_module(params: Dict[str, Any]): stats = [] df_pull = pull_nchs_mortality_data(token, test_file) - for geo in GEO_RES: - for metric in METRICS: - if metric == 'percent_of_expected_deaths': - logger.info("Generating signal and exporting to CSV", - metric = metric) - df = df_pull.copy() - if geo == "nation": - df = county_to_nation(df) - df["val"] = df[metric] - df["se"] = np.nan - df["sample_size"] = np.nan - # df = df[~df["val"].isnull()] - df = add_nancodes(df) - sensor_name = "_".join([SENSOR_NAME_MAP[metric]]) - dates = create_export_csv( - df, - geo_res=geo, - export_dir=daily_export_dir, - start_date=datetime.strptime(export_start_date, "%Y-%m-%d"), - sensor=sensor_name, - weekly_dates=True - ) - if len(dates) > 0: - stats.append((max(dates), len(dates))) - else: + for metric in METRICS: + if metric == 'percent_of_expected_deaths': + logger.info("Generating signal and exporting to CSV", + metric = metric) + df = df_pull.copy() + df["val"] = df[metric] + df["se"] = np.nan + df["sample_size"] = np.nan + # df = df[~df["val"].isnull()] + df = add_nancodes(df) + sensor_name = "_".join([SENSOR_NAME_MAP[metric]]) + dates = create_export_csv( + df, + geo_res="state", + export_dir=daily_export_dir, + start_date=datetime.strptime(export_start_date, "%Y-%m-%d"), + sensor=sensor_name, + weekly_dates=True + ) + if len(dates) > 0: + stats.append((max(dates), len(dates))) + else: + for geo in ["state", "nation"]: for sensor in SENSORS: logger.info("Generating signal and exporting to CSV", metric = metric, diff --git a/nchs_mortality/tests/test_run.py b/nchs_mortality/tests/test_run.py index 342525a47..6fa3ad48d 100644 --- a/nchs_mortality/tests/test_run.py +++ b/nchs_mortality/tests/test_run.py @@ -43,7 +43,8 @@ def test_output_files_exist(self, run_as_module, date): for d in dates: for metric in metrics: if metric == "deaths_percent_of_expected": - expected_files += [f"weekly_{d}_{geo}_{metric}.csv"] + # No nation aggregation for this metric + expected_files += [f"weekly_{d}_state_{metric}.csv"] else: for sensor in sensors: expected_files += [f"weekly_{d}_{geo}_{metric}_{sensor}.csv"]