Skip to content

Commit

Permalink
Merge pull request #1182 from cmu-delphi/release/v3.2.2
Browse files Browse the repository at this point in the history
Release v3.2.2
  • Loading branch information
duanecmu authored Jun 16, 2022
2 parents d5373bd + 1d26d0a commit 6e8dac5
Show file tree
Hide file tree
Showing 10 changed files with 277 additions and 56 deletions.
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "www-covidcast",
"version": "3.2.1",
"version": "3.2.2",
"private": true,
"license": "MIT",
"description": "",
Expand Down
8 changes: 8 additions & 0 deletions scripts/generateDescriptions.js
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,14 @@ function convertDescriptions(code) {
casesOrDeathSignals: parseNestedOrString,
mapTitleText: parseNestedOrString,
unitShort: (v) => v || '',
overrides: (v) =>
parseObject(v, {
county: parseObject,
state: parseObject,
nation: parseObject,
hhs: parseObject,
msa: parseObject,
}),
ageStratifications: parseArray,
});
});
Expand Down
137 changes: 123 additions & 14 deletions src/data/fetchTriple.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,8 @@ function toGeoPair(
function toSourceSignalPair<S extends { id: string; signal: string; valueScaleFactor?: number }>(
transfer: (keyof EpiDataJSONRow)[],
mixinValues: Partial<EpiDataRow>,
sensor: S | readonly S[],
sensor: readonly S[],
) {
if (!isArray(sensor)) {
mixinValues.source = sensor.id;
mixinValues.signal = sensor.signal;
return {
factor: sensor.valueScaleFactor ?? 1,
sourceSignalPairs: SourceSignalPair.from(sensor),
};
}
const grouped = groupBySource(sensor);

let factor: number | ((row: EpiDataRow) => number);
Expand Down Expand Up @@ -97,8 +89,58 @@ function toSourceSignalPair<S extends { id: string; signal: string; valueScaleFa
};
}

function resolveBackwardOverrides(
rows: EpiDataRow[],
overrides: { level: RegionLevel; fromId: string; fromSignal: string; toId: string; toSignal: string }[],
): EpiDataRow[] {
if (overrides.length === 0) {
return rows;
}
function toKey(id: string, signal: string, level: RegionLevel) {
return `${id}@${signal}@${level}`;
}
const over = new Map(overrides.map((o) => [toKey(o.toId, o.toSignal, o.level), o]));
for (const row of rows) {
const key = toKey(row.source, row.signal, row.geo_type);
const signalOverride = over.get(key);
if (signalOverride) {
row.source = signalOverride.fromId;
row.signal = signalOverride.fromSignal;
}
}
return rows;
}

function mapOverrides(
overrides: { level: RegionLevel; fromId: string; fromSignal: string; toId: string; toSignal: string }[],
typeSensors: readonly { id: string; signal: string; valueScaleFactor?: number }[],
) {
if (overrides.length === 0) {
return typeSensors;
}
return typeSensors.map((d) => {
for (const o of overrides) {
if (o.fromId === d.id && o.fromSignal === d.signal) {
return {
id: o.toId,
signal: o.toSignal,
valueScaleFactor: d.valueScaleFactor,
};
}
}
return d;
});
}

export default function fetchTriple<
S extends { id: string; signal: string; format: Sensor['format']; isWeeklySignal: boolean },
S extends {
id: string;
signal: string;
format: Sensor['format'];
isWeeklySignal: boolean;
overrides?: Sensor['overrides'];
valueScaleFactor?: number;
},
>(
sensor: S | readonly S[],
region: Region | RegionLevel | readonly Region[],
Expand All @@ -120,6 +162,31 @@ export default function fetchTriple<
return asOf;
}

function resolveForwardOverrides(geoPairs: GeoPair | GeoPair[], typeSensors: readonly S[]) {
const levels = Array.from(
new Set<RegionLevel>(Array.isArray(geoPairs) ? geoPairs.map((d) => d.level) : [geoPairs.level]),
);
const overrides: { level: RegionLevel; fromId: string; fromSignal: string; toId: string; toSignal: string }[] = [];
for (const sensor of typeSensors) {
if (!sensor.overrides) {
continue;
}
for (const level of levels) {
if (sensor.overrides[level] != null) {
// override
overrides.push({
level,
fromId: sensor.id,
fromSignal: sensor.signal,
toId: sensor.overrides[level]!.id,
toSignal: sensor.overrides[level]!.signal,
});
}
}
}
return { overrides, levels };
}

function fetchImpl(
type: 'day' | 'week',
geoPairs: GeoPair | GeoPair[],
Expand All @@ -128,7 +195,6 @@ export default function fetchTriple<
typedMixinValues: Partial<EpiDataRow>,
) {
typedMixinValues.time_type = type;
const { sourceSignalPairs, factor } = toSourceSignalPair(typedTransfer, typedMixinValues, typeSensors);
if (date instanceof Date) {
// single level and single date
typedMixinValues.time_value = type === 'day' ? toTimeValue(date) : toTimeWeekValue(date);
Expand All @@ -137,9 +203,52 @@ export default function fetchTriple<
} else {
typedTransfer.push('time_value');
}
return callAPI(type, sourceSignalPairs, geoPairs, new TimePair(type, date), typedTransfer, {
asOf: fixAsOf(),
}).then((rows) => parseData(rows, typedMixinValues, factor));
const timePair = new TimePair(type, date);

const { overrides, levels } = resolveForwardOverrides(geoPairs, typeSensors);

if (overrides.length === 0 || levels.length === 1) {
// simple case: none or direct replacement
const mappedSensors = mapOverrides(overrides, typeSensors);
const { sourceSignalPairs, factor } = toSourceSignalPair(typedTransfer, typedMixinValues, mappedSensors);
return callAPI(type, sourceSignalPairs, geoPairs, timePair, typedTransfer, {
asOf: fixAsOf(),
}).then((rows) => resolveBackwardOverrides(parseData(rows, typedMixinValues, factor), overrides));
}

// multiple calls one for each mapped level
const mappedLevels = Array.from(new Set(overrides.map((d) => d.level)));
const calls: Promise<EpiDataRow[]>[] = [];
const geo = Array.isArray(geoPairs) ? geoPairs : [geoPairs];
for (const mappedLevel of mappedLevels) {
// compute subset of what needs to be mapped and can be transferred at once
const levelOverrides = overrides.filter((d) => d.level === mappedLevel);
const levelGeo = geo.filter((d) => d.level === mappedLevel);

const mappedSensors = mapOverrides(levelOverrides, typeSensors);
const levelTransfer = typedTransfer.slice();
const levelMixins = { ...typedMixinValues };
const { sourceSignalPairs, factor } = toSourceSignalPair(levelTransfer, levelMixins, mappedSensors);
calls.push(
callAPI(type, sourceSignalPairs, levelGeo, timePair, levelTransfer, {
asOf: fixAsOf(),
}).then((rows) => resolveBackwardOverrides(parseData(rows, levelMixins, factor), levelOverrides)),
);
}
const unmappedLevels = levels.filter((d) => !mappedLevels.includes(d));
if (unmappedLevels.length > 0) {
// compute subset of what needs to be mapped and can be transferred at once
const levelGeo = geo.filter((d) => unmappedLevels.includes(d.level));
const levelTransfer = typedTransfer.slice();
const levelMixins = { ...typedMixinValues };
const { sourceSignalPairs, factor } = toSourceSignalPair(levelTransfer, levelMixins, typeSensors);
calls.push(
callAPI(type, sourceSignalPairs, levelGeo, timePair, levelTransfer, {
asOf: fixAsOf(),
}).then((rows) => parseData(rows, levelMixins, factor)),
);
}
return Promise.all(calls).then((r) => ([] as EpiDataRow[]).concat(...r));
}

const [day, week] = splitDailyWeekly(sensor);
Expand Down
2 changes: 2 additions & 0 deletions src/data/sensor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ export interface Sensor {

readonly formatSpecifier: string;
formatValue(v?: number | null, enforceSign?: boolean): string;

readonly overrides?: Partial<Record<RegionLevel, { id: string; signal: string }>>;
}

function determineHighValuesAre(sensor: {
Expand Down
135 changes: 117 additions & 18 deletions src/data/trend.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import type { EpiDataRow } from './fetchData';
import type { Sensor } from '../stores/constants';
import { callTrendAPI, EpiDataTrendRow, FieldSpec } from './api';
import { GeoPair, SourceSignalPair } from './apimodel';
import type { Region } from './regions';
import type { Region, RegionLevel } from './regions';
import { splitDailyWeekly } from './sensor';
import type { TimeFrame } from './TimeFrame';
import { parseAPITime, toTimeValue } from './utils';
Expand Down Expand Up @@ -119,32 +119,131 @@ export function asSensorTrend(
return t;
}

export function fetchTrend(
signal: Sensor | Sensor[],
region: Region | Region[],
export function fetchTrendSR(
signal: Sensor,
region: Region,
date: Date,
window: TimeFrame,
fields?: FieldSpec<EpiDataTrendRow>,
): Promise<EpiDataTrendRow[]> {
const geo = Array.isArray(region) ? GeoPair.fromArray(region) : GeoPair.from(region);
if (!Array.isArray(signal)) {
const geo = GeoPair.from(region);
let source = SourceSignalPair.from(signal);
if (signal.overrides && signal.overrides[region.level]) {
// need to map but no need to unmap since not transferred
source = SourceSignalPair.from(signal.overrides[region.level]!);
}
return callTrendAPI(
signal.isWeeklySignal ? 'week' : 'day',
source,
geo,
date,
window,
signal.isWeeklySignal ? 1 : 7,
{ exclude: ['geo_type', 'geo_value', 'signal_signal', 'signal_source'] },
);
}

export function fetchTrendR(
signal: Sensor,
regions: Region[],
date: Date,
window: TimeFrame,
): Promise<EpiDataTrendRow[]> {
const calls: Promise<EpiDataTrendRow[]>[] = [];
// for each mapped level
for (const level of Object.keys(signal.overrides || {})) {
const levelRegions = regions.filter((d) => d.level === level);
if (levelRegions.length === 0) {
continue;
}
calls.push(
callTrendAPI(
signal.isWeeklySignal ? 'week' : 'day',
SourceSignalPair.from(signal.overrides![level as RegionLevel]!),
GeoPair.fromArray(levelRegions),
date,
window,
signal.isWeeklySignal ? 1 : 7,
{
exclude: ['signal_signal', 'signal_source'],
},
),
);
}
// all not mapped ones
const rest = regions.filter((d) => !signal.overrides || signal.overrides[d.level] == null);
if (rest.length > 0) {
calls.push(
callTrendAPI(
signal.isWeeklySignal ? 'week' : 'day',
SourceSignalPair.from(signal),
GeoPair.fromArray(rest),
date,
window,
signal.isWeeklySignal ? 1 : 7,
{
exclude: ['signal_signal', 'signal_source'],
},
),
);
}

if (calls.length === 1) {
return calls[0];
}
return Promise.all(calls).then((r) => ([] as EpiDataTrendRow[]).concat(...r));
}

export function fetchTrendS(
signal: Sensor[],
region: Region,
date: Date,
window: TimeFrame,
): Promise<EpiDataTrendRow[]> {
const geo = GeoPair.from(region);
const fields: FieldSpec<EpiDataTrendRow> = { exclude: ['geo_type', 'geo_value'] };

function fetchMultiSignals(type: 'day' | 'week', sensors: Sensor[]) {
if (sensors.length === 0) {
return [];
}
const lookup = new Map<string, Sensor>();
const mapped = sensors.map((s) => {
const override = s.overrides?.[region.level];
if (override) {
lookup.set(`${override.id}@${override.signal}`, s);
// map forward
return override;
}
return s;
});
return callTrendAPI(
signal.isWeeklySignal ? 'week' : 'day',
SourceSignalPair.from(signal),
type,
SourceSignalPair.fromArray(mapped),
geo,
date,
window,
signal.isWeeklySignal ? 1 : 7,
type == 'week' ? 1 : 7,
fields,
);
).then((rows) => {
if (lookup.size === 0) {
return rows;
}
// map back
for (const row of rows) {
const key = `${row.signal_source}@${row.signal_signal}`;
const base = lookup.get(key);
if (base) {
row.signal_source = base.id;
row.signal_signal = base.signal;
}
}
return rows;
});
}
return Promise.all(
splitDailyWeekly(signal).map(({ type, sensors }) =>
sensors.length === 0
? []
: callTrendAPI(type, SourceSignalPair.fromArray(sensors), geo, date, window, type == 'week' ? 1 : 7, fields),
),
).then((r) => ([] as EpiDataTrendRow[]).concat(...r));

return Promise.all(splitDailyWeekly(signal).map(({ type, sensors }) => fetchMultiSignals(type, sensors))).then((r) =>
([] as EpiDataTrendRow[]).concat(...r),
);
}

export function computeLatest(
Expand Down
Loading

0 comments on commit 6e8dac5

Please sign in to comment.