diff --git a/dev/backfill.ts b/dev/backfill.ts index 017eb3886..5a9d05339 100644 --- a/dev/backfill.ts +++ b/dev/backfill.ts @@ -37,7 +37,6 @@ async function scanApi() { // unretryable steam error // if (err?.result?.status === 2) { // nextSeqNum += 1; - // redisCount(redis, 'skip_seq_num'); // // continue with next seq num // continue; // } else { diff --git a/dev/legacyArchive.ts b/dev/legacyArchive.ts index f13abe85f..3b2d4e370 100644 --- a/dev/legacyArchive.ts +++ b/dev/legacyArchive.ts @@ -1,6 +1,5 @@ import crypto from 'crypto'; import cassandra from '../store/cassandra'; -import redis from '../store/redis'; import config from '../config'; import db from '../store/db'; import { deserialize, redisCount } from '../util/utility'; @@ -90,7 +89,7 @@ async function doArchiveFromLegacy(matchId: number) { JSON.stringify({ ...match, players: match.players || playerMatches }), ); const result = await matchArchive.archivePut(matchId.toString(), blob); - redisCount(redis, 'match_archive_write'); + redisCount('match_archive_write'); if (result) { // Mark the match archived await db.raw( diff --git a/routes/api.ts b/routes/api.ts index 77d9acca0..227520b25 100644 --- a/routes/api.ts +++ b/routes/api.ts @@ -2,7 +2,6 @@ import { Router } from 'express'; import { FilterType, filterDeps } from '../util/filter'; import spec from './spec'; import db from '../store/db'; -import redis from '../store/redis'; import { alwaysCols } from './playerFields'; import { queryParamToArray, redisCount } from '../util/utility'; @@ -50,7 +49,7 @@ api.use('/players/:account_id/:info?', async (req, res, cb) => { const isSelf = Number(req.user?.account_id) === Number(req.params.account_id); if (isSelf) { - redisCount(redis, 'self_profile_view'); + redisCount('self_profile_view'); } const isPrivate = Boolean(privacy.rows[0]?.fh_unavailable) && diff --git a/routes/keyManagement.ts b/routes/keyManagement.ts index cfdf928d4..511bdf4f8 100644 --- a/routes/keyManagement.ts +++ b/routes/keyManagement.ts @@ -196,7 +196,7 @@ keys Number(req.user?.account_id) > threshold.account_id - Number(config.API_KEY_GEN_THRESHOLD); if (fail) { - redisCount(redis, 'gen_api_key_invalid'); + redisCount('gen_api_key_invalid'); return res.sendStatus(400).json({ error: 'Failed validation' }); } } diff --git a/routes/spec.ts b/routes/spec.ts index df1ba0f6f..308997cb1 100644 --- a/routes/spec.ts +++ b/routes/spec.ts @@ -950,7 +950,7 @@ Without a key, you can make 2,000 free calls per day at a rate limit of 60 reque config.NODE_ENV !== 'development' && (await redis.get('fh_queue:' + playerId)) ) { - redisCount(redis, 'fullhistory_skip'); + redisCount('fullhistory_skip'); return res.json({ length: 0 }); } const length = await addJob({ @@ -1517,12 +1517,12 @@ Without a key, you can make 2,000 free calls per day at a rate limit of 60 reque // We validated the ID in middleware const matchId = req.params.match_id; // Count this request - redisCount(redis, 'request'); - redisCountDistinct(redis, 'distinct_request', matchId); + redisCount('request'); + redisCountDistinct('distinct_request', matchId); let priority = 1; if (req.query.api_key) { priority = 1; - redisCount(redis, 'request_api_key'); + redisCount('request_api_key'); redis.zincrby( 'request_usage_count', 1, @@ -1537,7 +1537,7 @@ Without a key, you can make 2,000 free calls per day at a rate limit of 60 reque if (req.headers.origin === config.UI_HOST) { // Give UI requests priority priority = 0; - redisCount(redis, 'request_ui'); + redisCount('request_ui'); } if ( req.user?.account_id && diff --git a/store/buildMatch.ts b/store/buildMatch.ts index 4be7cd6cb..b950fdf1a 100644 --- a/store/buildMatch.ts +++ b/store/buildMatch.ts @@ -101,7 +101,7 @@ async function buildMatch( const key = `match:${matchId}`; const reply = await redis.get(key); if (reply) { - redisCount(redis, 'match_cache_hit'); + redisCount('match_cache_hit'); return JSON.parse(reply); } @@ -114,7 +114,7 @@ async function buildMatch( return null; } match.od_data = odData; - redisCount(redis, 'build_match'); + redisCount('build_match'); let playersMatchData: (Player | ParsedPlayer)[] = match.players; // Get names, last login for players from DB playersMatchData = await Promise.all( diff --git a/store/getApiData.ts b/store/getApiData.ts index a25a2be13..cae6844ce 100644 --- a/store/getApiData.ts +++ b/store/getApiData.ts @@ -1,10 +1,8 @@ import config from '../config'; -import { generateJob, getSteamAPIData, redisCount } from '../util/utility'; +import { redisCount } from '../util/utility'; import { Archive } from './archive'; import cassandra from './cassandra'; -import { insertMatch } from './insert'; import { getPGroup, type ApiMatch } from './pgroup'; -import redis from './redis'; const blobArchive = config.ENABLE_BLOB_ARCHIVE ? new Archive('blob') : null; /** @@ -23,7 +21,7 @@ export async function readApiData(matchId: number, noBlobStore?: boolean): Promi if (!data && blobArchive && !noBlobStore) { const archive = await blobArchive.archiveGet(`${matchId}_api`); if (archive) { - redisCount(redis, 'blob_archive_read'); + redisCount('blob_archive_read'); } data = archive ? JSON.parse(archive.toString()) as ApiMatch : undefined; } diff --git a/store/getArchivedData.ts b/store/getArchivedData.ts index b08ccc061..ad199af7a 100644 --- a/store/getArchivedData.ts +++ b/store/getArchivedData.ts @@ -2,7 +2,6 @@ import config from '../config'; import { redisCount } from '../util/utility'; import { Archive } from './archive'; import db from './db'; -import redis from './redis'; const matchArchive = config.ENABLE_MATCH_ARCHIVE ? new Archive('match') : null; const playerArchive = config.ENABLE_PLAYER_ARCHIVE ? new Archive('player') : null; @@ -50,7 +49,7 @@ export async function tryReadArchivedMatch( ? JSON.parse(blob.toString()) : null; if (result) { - redisCount(redis, 'match_archive_read'); + redisCount('match_archive_read'); return result; } } catch (e) { diff --git a/store/getGcData.ts b/store/getGcData.ts index bff26975b..d5b17c323 100644 --- a/store/getGcData.ts +++ b/store/getGcData.ts @@ -82,7 +82,7 @@ async function saveGcData( throw new Error('invalid data'); } // Count successful calls - redisCount(redis, 'retriever'); + redisCount('retriever'); redis.hincrby('retrieverSuccessSteamIDs', steamid, 1); redis.expireat( 'retrieverSuccessSteamIDs', @@ -194,7 +194,7 @@ export async function getOrFetchGcData( // Check if we have gcdata cached const saved = await readGcData(matchId); if (saved) { - redisCount(redis, 'regcdata'); + redisCount('regcdata'); if (config.DISABLE_REGCDATA) { // If high load, we can disable refetching gcdata return { data: saved, error: null }; diff --git a/store/getMeta.ts b/store/getMeta.ts index 0ac42022d..83adf5ff7 100644 --- a/store/getMeta.ts +++ b/store/getMeta.ts @@ -1,9 +1,6 @@ import ProtoBuf from 'protobufjs'; -import axios from 'axios'; -import fs from 'fs'; import { exec } from 'child_process'; import { promisify } from 'util'; -import redis from './redis'; import { buildReplayUrl, redisCount } from '../util/utility'; import { readGcData } from './getGcData'; const execPromise = promisify(exec); @@ -32,7 +29,7 @@ export async function getMeta(matchId: number) { const message = await getMetaFromUrl(url); if (message) { // Count the number of meta parses - redisCount(redis, 'meta_parse'); + redisCount('meta_parse'); } // Return the info, it may be null if we failed at any step or meta isn't available return message; diff --git a/store/getParsedData.ts b/store/getParsedData.ts index 259e2ad18..086d29753 100644 --- a/store/getParsedData.ts +++ b/store/getParsedData.ts @@ -7,7 +7,6 @@ import { Archive } from './archive'; import cassandra from './cassandra'; import db from './db'; import { insertMatch } from './insert'; -import redis from './redis'; import axios from 'axios'; const blobArchive = config.ENABLE_BLOB_ARCHIVE ? new Archive('blob') : null; @@ -108,7 +107,7 @@ export async function getOrFetchParseData( }> { const saved = await readParseData(matchId); if (saved) { - redisCount(redis, 'reparse'); + redisCount('reparse'); if (config.DISABLE_REPARSE) { // If high load, we can disable parsing already parsed matches return { data: saved, skipped: true, error: null }; diff --git a/store/insert.ts b/store/insert.ts index 69109f622..479f3e99b 100644 --- a/store/insert.ts +++ b/store/insert.ts @@ -454,29 +454,26 @@ export async function insertMatch( }] [ended: ${moment.unix(endedAt ?? 0).fromNow()}] ${match.match_id}`; redis.publish(options.type, message); if (options.type === 'parsed') { - redisCount(redis, 'parser'); + redisCount('parser'); } if (options.origin === 'scanner' && options.type === 'api') { - redisCount(redis, 'added_match'); + redisCount('added_match'); match.players .filter((p) => p.account_id) .forEach(async (p) => { if (p.account_id) { redisCountDistinct( - redis, 'distinct_match_player', p.account_id.toString(), ); const visitTime = Number(await redis.zscore('visitors', p.account_id.toString())); if (visitTime) { redisCountDistinct( - redis, 'distinct_match_player_user', p.account_id.toString(), ); if (visitTime > Number(moment().subtract(30, 'day').format('X'))) { redisCountDistinct( - redis, 'distinct_match_player_recent_user', p.account_id.toString(), ); @@ -634,7 +631,7 @@ export async function insertMatch( if (!doParse) { return null; } - redisCount(redis, 'auto_parse'); + redisCount('auto_parse'); let priority = 5; if (isProLeague) { priority = -1; diff --git a/store/queries.ts b/store/queries.ts index e48729143..45e95c661 100644 --- a/store/queries.ts +++ b/store/queries.ts @@ -234,7 +234,7 @@ export async function getPlayerMatchesWithMetadata( // User disabled public match history from Dota, so don't return matches return [[], null]; } - redisCount(redis, 'player_matches'); + redisCount('player_matches'); const columns = await getCassandraColumns('player_caches'); const sanitizedProject = queryObj.project.filter((f: string) => columns[f]); const projection = queryObj.projectAll ? ['*'] : sanitizedProject; @@ -250,7 +250,7 @@ export async function getPlayerMatchesWithMetadata( ? await readCachedPlayerMatches(accountId, projection) : undefined; if (cache?.length) { - redisCountDistinct(redis, 'distinct_player_cache', accountId.toString()); + redisCountDistinct('distinct_player_cache', accountId.toString()); await redis.zadd('player_matches_visit', moment().format('X'), accountId); // Keep some number of recent players visited for auto-cache await redis.zremrangebyrank('player_matches_visit', '0', '-50001'); @@ -345,11 +345,11 @@ async function readCachedPlayerMatches( ); const result = rows[0]?.blob; if (result) { - redisCount(redis, 'player_cache_hit'); + redisCount('player_cache_hit'); if ( await isAutoCachePlayer(redis, accountId) ) { - redisCount(redis, 'auto_player_cache_hit'); + redisCount('auto_player_cache_hit'); } const output = JSON.parse(gunzipSync(result).toString()); // Remove columns not asked for @@ -367,17 +367,17 @@ async function readCachedPlayerMatches( 'NX', ); if (!lock) { - redisCount(redis, 'player_cache_wait'); + redisCount('player_cache_wait'); // console.log('[PLAYERCACHE] waiting for lock on %s', accountId); // Couldn't acquire the lock, wait and try again await new Promise((resolve) => setTimeout(resolve, 1000)); return readCachedPlayerMatches(accountId, project); } - redisCount(redis, 'player_cache_miss'); + redisCount('player_cache_miss'); if ( await isAutoCachePlayer(redis, accountId) ) { - redisCount(redis, 'auto_player_cache_miss'); + redisCount('auto_player_cache_miss'); } const result = await populateCache(accountId, project); // Release the lock @@ -403,7 +403,7 @@ export async function populateCache( // all.length, // zip.length, // ); - redisCount(redis, 'player_cache_write'); + redisCount('player_cache_write'); await cassandra.execute( `INSERT INTO player_temp(account_id, blob) VALUES(?, ?) USING TTL ?`, [accountId, zip, Number(config.PLAYER_CACHE_SECONDS)], @@ -722,7 +722,7 @@ export async function getMatchDataFromBlobWithMetadata( if (!archived && !api) { // Use this event to count the number of failed requests // Could be due to missing data or invalid ID--need to analyze - redisCount(redis, 'steam_api_backfill'); + redisCount('steam_api_backfill'); return [null, null]; } diff --git a/svc/autocache.ts b/svc/autocache.ts index 20aba691d..126ffbf87 100644 --- a/svc/autocache.ts +++ b/svc/autocache.ts @@ -1,18 +1,16 @@ // Processes a queue of auto player cache requests import { populateCache } from '../store/queries'; import { runQueue } from '../store/queue'; -import redis from '../store/redis'; import { redisCount, redisCountDistinct } from '../util/utility'; async function processCache(job: CacheJob) { const accountId = job; console.log(accountId); redisCountDistinct( - redis, 'distinct_auto_player_cache', accountId, ); - redisCount(redis, 'auto_player_cache'); + redisCount('auto_player_cache'); // Don't need to await this since it's just caching populateCache(Number(accountId), ['match_id']); } diff --git a/svc/fullhistory.ts b/svc/fullhistory.ts index 14ba1bd2e..6f2476d2c 100644 --- a/svc/fullhistory.ts +++ b/svc/fullhistory.ts @@ -8,7 +8,6 @@ import { eachLimitPromise, } from '../util/utility'; import db from '../store/db'; -import redis from '../store/redis'; import { runQueue } from '../store/queue'; import { getPlayerMatches } from '../store/queries'; import { insertMatch } from '../store/insert'; @@ -23,9 +22,9 @@ async function updatePlayer(player: FullHistoryJob) { .where({ account_id: player.account_id, }); - redisCount(redis, 'fullhistory'); + redisCount('fullhistory'); if (!player.long_history) { - redisCount(redis, 'fullhistory_short'); + redisCount('fullhistory_short'); } } @@ -119,7 +118,7 @@ async function processFullHistory(job: FullHistoryJob) { delete match_ids[matchId]; } if (Object.keys(match_ids).length > 0) { - redisCount(redis, 'fullhistory_op'); + redisCount('fullhistory_op'); } // make api_details requests for matches const promiseFuncs = Object.keys(match_ids).map( diff --git a/svc/mmr.ts b/svc/mmr.ts index 1c02087b0..f7f2dd926 100644 --- a/svc/mmr.ts +++ b/svc/mmr.ts @@ -1,7 +1,6 @@ // Processes a queue of requests to update MMR/rank medal for players import { runQueue } from '../store/queue'; import db from '../store/db'; -import redis from '../store/redis'; import { insertPlayerRating } from '../store/insert'; import config from '../config'; import { @@ -18,7 +17,7 @@ async function processMmr(job: MmrJob) { const { data } = await axios.get(url, { timeout: 5000, }); - redisCount(redis, 'retriever_player'); + redisCount('retriever_player'); // Update player's Dota Plus status if changed const player = { diff --git a/svc/parser.ts b/svc/parser.ts index 1f4112216..38fd19200 100755 --- a/svc/parser.ts +++ b/svc/parser.ts @@ -22,7 +22,7 @@ async function parseProcessor(job: ParseJob) { let gcTime = 0; let parseTime = 0; try { - redisCount(redis, 'parser_job'); + redisCount('parser_job'); const matchId = job.match_id; // Check if match is in safe integer range @@ -39,7 +39,7 @@ async function parseProcessor(job: ParseJob) { // Check if match is already parsed according to PG // Doing the check early means we don't verify API or gcdata if (await checkIsParsed(matchId)) { - redisCount(redis, 'reparse_early'); + redisCount('reparse_early'); if (config.DISABLE_REPARSE_EARLY) { // If high load, we can disable parsing already parsed matches log('skip'); @@ -65,7 +65,7 @@ async function parseProcessor(job: ParseJob) { const { leagueid, duration, start_time } = apiMatch; if (!leagueid && Date.now() / 1000 - start_time > 30 * 24 * 60 * 60) { - redisCount(redis, 'oldparse'); + redisCount('oldparse'); if (config.DISABLE_OLD_PARSE) { // Valve doesn't keep non-league replays for more than a few weeks. // Skip even attempting the parse if it's too old @@ -148,11 +148,11 @@ async function parseProcessor(job: ParseJob) { redis.publish('parsed', message); console.log(message); if (type === 'fail') { - redisCount(redis, 'parser_fail'); + redisCount('parser_fail'); } else if (type === 'crash') { - redisCount(redis, 'parser_crash'); + redisCount('parser_crash'); } else if (type === 'skip') { - redisCount(redis, 'parser_skip'); + redisCount('parser_skip'); } } } diff --git a/svc/scanner.ts b/svc/scanner.ts index 584b3d1f8..cfa33de19 100755 --- a/svc/scanner.ts +++ b/svc/scanner.ts @@ -36,7 +36,7 @@ async function scanApi(seqNum: number) { // unretryable steam error if (err?.result?.status === 2) { nextSeqNum += 1; - redisCount(redis, 'skip_seq_num'); + redisCount('skip_seq_num'); // continue with next seq num continue; } else { @@ -82,7 +82,7 @@ async function processMatch(match: ApiMatch) { if (!result) { if (Number(config.SCANNER_OFFSET)) { // secondary scanner picked up a missing match - redisCount(redis, 'secondary_scanner'); + redisCount('secondary_scanner'); } await insertMatch(match, { type: 'api', diff --git a/svc/web.ts b/svc/web.ts index a9f4a61e6..9e9ef4c4a 100644 --- a/svc/web.ts +++ b/svc/web.ts @@ -103,9 +103,9 @@ const onResFinish = ( } }); } - redisCount(redis, 'api_hits'); + redisCount('api_hits'); if (req.headers.origin === config.UI_HOST) { - redisCount(redis, 'api_hits_ui'); + redisCount('api_hits_ui'); } const normPath = req.route?.path; redis.zincrby('api_paths', 1, req.method + ' ' + normPath); @@ -564,7 +564,7 @@ app.use((req, res) => app.use( (err: Error, req: express.Request, res: express.Response, cb: ErrorCb) => { console.log('[ERR]', req.originalUrl, err); - redisCount(redis, '500_error'); + redisCount('500_error'); if (config.NODE_ENV === 'development' || config.NODE_ENV === 'test') { // default express handler return cb(err?.message || JSON.stringify(err)); @@ -601,6 +601,6 @@ logSub.on('message', (channel: string, message: string) => { process.on('exit', (code) => { if (code > 0) { - redisCount(redis, 'web_crash'); + redisCount('web_crash'); } }); diff --git a/util/archiveUtil.ts b/util/archiveUtil.ts index 8e760f4e7..6afb2c4bc 100644 --- a/util/archiveUtil.ts +++ b/util/archiveUtil.ts @@ -85,11 +85,11 @@ export async function doArchivePlayerMatches( } // check data completeness with isDataComplete if (!isDataComplete(match as ParsedMatch)) { - redisCount(null, 'incomplete_archive'); + redisCount('incomplete_archive'); console.log('INCOMPLETE match %s', matchId); return; } - redisCount(null, 'match_archive_write'); + redisCount('match_archive_write'); // console.log('SIMULATE ARCHIVE match %s', matchId); // TODO (howard) don't actually archive until verification of data format return; diff --git a/util/utility.ts b/util/utility.ts index e787c8362..bd7a9ab35 100644 --- a/util/utility.ts +++ b/util/utility.ts @@ -196,9 +196,9 @@ function getSteamAPIDataCallback(url: string | GetDataOptions, cb: ErrorCb) { parse.host = url.proxy[Math.floor(Math.random() * url.proxy.length)]; } if (parse.host === 'api.steampowered.com') { - redisCount(null, 'steam_api_call'); + redisCount('steam_api_call'); } else { - redisCount(null, 'steam_proxy_call'); + redisCount('steam_proxy_call'); } } const target = urllib.format(parse); @@ -237,9 +237,9 @@ function getSteamAPIDataCallback(url: string | GetDataOptions, cb: ErrorCb) { target, ); if (res?.statusCode === 429) { - redisCount(null, 'steam_429'); + redisCount('steam_429'); } else if (res?.statusCode === 403) { - redisCount(null, 'steam_403'); + redisCount('steam_403'); } const backoff = res?.statusCode === 429 ? 3000 : 1000; return setTimeout(() => { @@ -877,8 +877,8 @@ export async function getApiHosts(): Promise { * @param redis The Redis instance (null to dynamic import the default redis) * @param prefix The counter name */ -export async function redisCount(redis: Redis | null, prefix: MetricName) { - const redisToUse = redis ?? (await import('../store/redis.js')).redis; +export async function redisCount(prefix: MetricName) { + const redisToUse = config.REDIS_URL ? (await import('../store/redis.js')).redis : null; const key = `${prefix}:v2:${moment().startOf('hour').format('X')}`; await redisToUse?.incr(key); await redisToUse?.expireat( @@ -888,11 +888,10 @@ export async function redisCount(redis: Redis | null, prefix: MetricName) { } export async function redisCountDistinct( - redis: Redis | null, prefix: MetricName, value: string, ) { - const redisToUse = redis ?? (await import('../store/redis.js')).redis; + const redisToUse = config.REDIS_URL ? (await import('../store/redis.js')).redis : null; const key = `${prefix}:v2:${moment().startOf('hour').format('X')}`; await redisToUse?.pfadd(key, value); await redisToUse?.expireat(