From a49bca42e68e836922ace17b3acb6b95cf3068a3 Mon Sep 17 00:00:00 2001 From: John Gomersall Date: Wed, 28 Feb 2024 08:56:03 +0000 Subject: [PATCH 1/5] Add redis configuration for staging and production --- .env | 3 ++- .github/workflows/container-deploy.yml | 6 +++++- docker-compose.yml | 9 +++++++++ docker/dev.yml | 13 +++---------- src/domain/services/import.service.spec.ts | 4 ++-- src/domain/services/import.service.ts | 2 +- 6 files changed, 22 insertions(+), 15 deletions(-) diff --git a/.env b/.env index bc6ce36..4eec671 100644 --- a/.env +++ b/.env @@ -8,7 +8,8 @@ POSTGRES_PORT=127.0.0.1:5512 POSTGRES_DB=query POSTGRES_USER=productopener POSTGRES_PASSWORD=productopener +COMMON_NET_NAME=po_default MONGO_URI=mongodb://localhost:27017 -#REDIS_URL=redis://localhost:6379 +REDIS_URL=redis://localhost:6379 # Log levels are: debug, verbose, log (default), warn, error LOG_LEVEL=debug diff --git a/.github/workflows/container-deploy.yml b/.github/workflows/container-deploy.yml index 233920c..06cdc1e 100644 --- a/.github/workflows/container-deploy.yml +++ b/.github/workflows/container-deploy.yml @@ -30,15 +30,19 @@ jobs: # deploy target echo "SSH_HOST=10.1.0.200" >> $GITHUB_ENV # configurations + echo "COMMON_NET_NAME=po_webnet" >> $GITHUB_ENV echo "MONGO_URI=mongodb://10.1.0.200:27017" >> $GITHUB_ENV + echo "REDIS_URL=redis://redis:6379" >> $GITHUB_ENV - name: Set various variable for production deployment if: matrix.env == 'off-query-org' run: | # deploy target echo "SSH_HOST=10.1.0.201" >> $GITHUB_ENV # configurations - # mongodb (through stunnel) + echo "COMMON_NET_NAME=" >> $GITHUB_ENV + # mongodb and redis (through stunnel) echo "MONGO_URI=mongodb://10.1.0.113:27017" >> $GITHUB_ENV + echo "REDIS_URL=redis://10.1.0.113:6379" >> $GITHUB_ENV - name: Wait for container build workflow uses: tomchv/wait-my-workflow@v1.1.0 diff --git a/docker-compose.yml b/docker-compose.yml index 5c96d72..d37373c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,6 +7,8 @@ services: - POSTGRES_DB volumes: - dbdata:/var/lib/postgresql/data + networks: + - common_net query: image: ghcr.io/openfoodfacts/openfoodfacts-query:${TAG} @@ -24,6 +26,13 @@ services: depends_on: query_postgres: condition: service_started + networks: + - common_net + +networks: + common_net: + name: ${COMMON_NET_NAME} + external: true volumes: dbdata: diff --git a/docker/dev.yml b/docker/dev.yml index ff1d495..d25479d 100644 --- a/docker/dev.yml +++ b/docker/dev.yml @@ -5,21 +5,14 @@ services: - "${POSTGRES_PORT:-5512}:5432" volumes: - ./data:/mnt/data - networks: - - po_default query: build: . image: openfoodfacts-query:dev environment: - # Use Product Opener's MongoDB + # Use Product Opener's MongoDB and REDIS - MONGO_URI=mongodb://mongodb:27017 + - REDIS_URL=redis://redis:6379 + volumes: - ./data:/data - networks: - - po_default - -networks: - po_default: - external: true - name: po_default diff --git a/src/domain/services/import.service.spec.ts b/src/domain/services/import.service.spec.ts index 4807581..7f50450 100644 --- a/src/domain/services/import.service.spec.ts +++ b/src/domain/services/import.service.spec.ts @@ -371,7 +371,7 @@ describe('receiveMessages', () => { await client.connect(); try { // When: A message is sent - const messageId = await client.xAdd('product_update', '*', { + const messageId = await client.xAdd('product_updates_off', '*', { code: 'TEST1', }); @@ -384,7 +384,7 @@ describe('receiveMessages', () => { // If a new message is added importSpy.mockClear(); - await client.xAdd('product_update', '*', { + await client.xAdd('product_updates_off', '*', { code: 'TEST2', }); diff --git a/src/domain/services/import.service.ts b/src/domain/services/import.service.ts index c5d7f3a..2c2b99d 100644 --- a/src/domain/services/import.service.ts +++ b/src/domain/services/import.service.ts @@ -413,7 +413,7 @@ export class ImportService { // XREAD can read from multiple streams, starting at a // different ID for each... { - key: 'product_update', + key: 'product_updates_off', id: lastMessageId, }, ], From 346d8b59dada697f4ed459a3b2e49366cfce111d Mon Sep 17 00:00:00 2001 From: John Gomersall Date: Mon, 11 Mar 2024 17:04:50 +0000 Subject: [PATCH 2/5] Disable redis for tests --- test/global-setup.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/global-setup.ts b/test/global-setup.ts index cb5e269..91ee64b 100644 --- a/test/global-setup.ts +++ b/test/global-setup.ts @@ -13,6 +13,9 @@ export default async function () { process.env.POSTGRES_PASSWORD = container.getPassword(); globalThis.__PGCONTAINER__ = container; + // We don't use redis in the tests + process.env.REDIS_URL = ''; + // Tried running migrations with the API but doesn't work because // of the way Jest mocks things. Even importing MikroORM is enough to break things. // https://github.com/mikro-orm/mikro-orm/discussions/3795 From 29ac449a0c0a4f5f354b463046dfa8f6ad53837f Mon Sep 17 00:00:00 2001 From: John Gomersall Date: Mon, 11 Mar 2024 17:05:05 +0000 Subject: [PATCH 3/5] Make docker more explicit to avoid unnecessary builds --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 1435291..000032a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,7 @@ FROM node:lts-alpine -COPY . . +COPY ./src ./src +COPY ./package* . +COPY ./tsconfig.* . RUN npm ci RUN npm run build CMD [ "node", "dist/main.js"] From 5afb35d6a73497adaff6aeb5ae3aeade87a89175 Mon Sep 17 00:00:00 2001 From: John Gomersall Date: Mon, 11 Mar 2024 17:31:44 +0000 Subject: [PATCH 4/5] Default to no redis for local dev --- .env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env b/.env index 4eec671..721c935 100644 --- a/.env +++ b/.env @@ -10,6 +10,6 @@ POSTGRES_USER=productopener POSTGRES_PASSWORD=productopener COMMON_NET_NAME=po_default MONGO_URI=mongodb://localhost:27017 -REDIS_URL=redis://localhost:6379 +#REDIS_URL=redis://localhost:6379 # Log levels are: debug, verbose, log (default), warn, error LOG_LEVEL=debug From d0431b1887474c26b7b614f49d4375978d9e6827 Mon Sep 17 00:00:00 2001 From: John Gomersall Date: Mon, 11 Mar 2024 17:31:56 +0000 Subject: [PATCH 5/5] Make most import messages debug level --- src/domain/services/import.service.ts | 44 ++++++++++++++++++--------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/src/domain/services/import.service.ts b/src/domain/services/import.service.ts index 2c2b99d..3ceef49 100644 --- a/src/domain/services/import.service.ts +++ b/src/domain/services/import.service.ts @@ -48,7 +48,7 @@ export class ImportService { if (from) { const fromTime = Math.floor(new Date(from).getTime() / 1000); filter['last_modified_t'] = { $gt: fromTime }; - this.logger.log(`Starting import from ${from}`); + this.logger.debug(`Starting import from ${from}`); } const latestModified = await this.importWithFilter( @@ -72,7 +72,7 @@ export class ImportService { // queries that should only affect products loaded in this import const updateId = Ulid.generate().toRaw(); - this.logger.log('Connecting to MongoDB'); + this.logger.debug('Connecting to MongoDB'); const client = new MongoClient(process.env.MONGO_URI); await client.connect(); const db = client.db('off'); @@ -88,7 +88,18 @@ export class ImportService { // Repeat the below for normal and then obsolete products // Both are stored in the same table in PostgreSQL - for (const obsolete of [false, true]) { + const collections = { + normal: { + obsolete: false, + count: 0, + }, + obsolete: { + obsolete: true, + count: 0, + }, + }; + for (const collection of Object.values(collections)) { + const obsolete = collection.obsolete; const products = db.collection(`products${obsolete ? '_obsolete' : ''}`); const cursor = products.find(filter, { projection }); let i = 0; @@ -110,12 +121,12 @@ export class ImportService { this.em.clear(); } if (!(i % this.importLogInterval)) { - this.logger.log(`Updated ${i}`); + this.logger.debug(`Updated ${i}`); } } await this.em.flush(); - this.logger.log(`${i}${obsolete ? ' Obsolete' : ''} Products imported`); await cursor.close(); + collection.count = i; } await client.close(); @@ -126,7 +137,10 @@ export class ImportService { if (fullImport) { await this.deleteOtherProducts(updateId); } - this.logger.log('Finished'); + + this.logger.log( + `Imported ${collections.normal.count} Products and ${collections.obsolete.count} Obsolete Products from ${source}`, + ); return latestModified; } @@ -223,7 +237,7 @@ export class ImportService { * This was found to be quicker than using ORM functionality */ async updateTags(updateId: string, fullImport = false) { - this.logger.log(`Updating tags for updateId: ${updateId}`); + this.logger.debug(`Updating tags for updateId: ${updateId}`); const connection = this.em.getConnection(); @@ -316,7 +330,7 @@ export class ImportService { logText += ` > ${affectedRows}`; } await connection.execute('commit'); - this.logger.log(logText + ' rows'); + this.logger.debug(logText + ' rows'); for (const [tag, entity] of Object.entries(ProductTagMap.MAPPED_TAGS)) { let logText = `Updated ${tag}`; @@ -355,7 +369,7 @@ export class ImportService { logText += ` inserted ${results['affectedRows']} rows`; - this.logger.log(logText); + this.logger.debug(logText); } } @@ -363,7 +377,7 @@ export class ImportService { const deleted = await this.em.nativeDelete(Product, { $or: [{ lastUpdateId: { $ne: updateId } }, { lastUpdateId: null }], }); - this.logger.log(`${deleted} Products deleted`); + this.logger.debug(`${deleted} Products deleted`); } async scheduledImportFromMongo() { @@ -465,7 +479,7 @@ export class ImportService { if (lastModified < fromTime) { skip++; if (!(skip % this.importLogInterval)) { - this.logger.log(`Skippped ${skip}`); + this.logger.debug(`Skippped ${skip}`); } continue; } @@ -486,18 +500,18 @@ export class ImportService { this.em.clear(); } if (!(i % this.importLogInterval)) { - this.logger.log(`Updated ${i}`); + this.logger.debug(`Updated ${i}`); } } catch (e) { - this.logger.log(e.message + ': ' + line); + this.logger.debug(e.message + ': ' + line); } } await this.em.flush(); - this.logger.log(`${i} Products imported`); + this.logger.debug(`${i} Products imported`); await this.updateTags(updateId, !from); if (!from) { await this.deleteOtherProducts(updateId); } - this.logger.log('Finished'); + this.logger.debug('Finished'); } }