diff --git a/.env b/.env index f028d45..3e64455 100644 --- a/.env +++ b/.env @@ -9,6 +9,7 @@ POSTGRES_PORT=127.0.0.1:5512 POSTGRES_DB=query POSTGRES_USER=productopener POSTGRES_PASSWORD=productopener +COMMON_NET_NAME=po_default MONGO_URI=mongodb://localhost:27017 #REDIS_URL=redis://localhost:6379 # Log levels are: debug, verbose, log (default), warn, error diff --git a/.github/workflows/container-deploy.yml b/.github/workflows/container-deploy.yml index c9f7add..5939d3c 100644 --- a/.github/workflows/container-deploy.yml +++ b/.github/workflows/container-deploy.yml @@ -30,15 +30,19 @@ jobs: # deploy target echo "SSH_HOST=10.1.0.200" >> $GITHUB_ENV # configurations + echo "COMMON_NET_NAME=po_webnet" >> $GITHUB_ENV echo "MONGO_URI=mongodb://10.1.0.200:27017" >> $GITHUB_ENV + echo "REDIS_URL=redis://redis:6379" >> $GITHUB_ENV - name: Set various variable for production deployment if: matrix.env == 'off-query-org' run: | # deploy target echo "SSH_HOST=10.1.0.201" >> $GITHUB_ENV # configurations - # mongodb (through stunnel) + echo "COMMON_NET_NAME=" >> $GITHUB_ENV + # mongodb and redis (through stunnel) echo "MONGO_URI=mongodb://10.1.0.113:27017" >> $GITHUB_ENV + echo "REDIS_URL=redis://10.1.0.113:6379" >> $GITHUB_ENV - name: Wait for container build workflow uses: tomchv/wait-my-workflow@v1.1.0 diff --git a/Dockerfile b/Dockerfile index 1435291..000032a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,7 @@ FROM node:lts-alpine -COPY . . +COPY ./src ./src +COPY ./package* . +COPY ./tsconfig.* . RUN npm ci RUN npm run build CMD [ "node", "dist/main.js"] diff --git a/docker-compose.yml b/docker-compose.yml index 450f0e7..dd9d46f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,6 +8,8 @@ services: - POSTGRES_DB volumes: - dbdata:/var/lib/postgresql/data + networks: + - common_net query: image: ghcr.io/openfoodfacts/openfoodfacts-query:${TAG} @@ -26,6 +28,13 @@ services: depends_on: query_postgres: condition: service_started + networks: + - common_net + +networks: + common_net: + name: ${COMMON_NET_NAME} + external: true volumes: dbdata: diff --git a/docker/dev.yml b/docker/dev.yml index ff1d495..d25479d 100644 --- a/docker/dev.yml +++ b/docker/dev.yml @@ -5,21 +5,14 @@ services: - "${POSTGRES_PORT:-5512}:5432" volumes: - ./data:/mnt/data - networks: - - po_default query: build: . image: openfoodfacts-query:dev environment: - # Use Product Opener's MongoDB + # Use Product Opener's MongoDB and REDIS - MONGO_URI=mongodb://mongodb:27017 + - REDIS_URL=redis://redis:6379 + volumes: - ./data:/data - networks: - - po_default - -networks: - po_default: - external: true - name: po_default diff --git a/src/domain/services/import.service.spec.ts b/src/domain/services/import.service.spec.ts index 4807581..7f50450 100644 --- a/src/domain/services/import.service.spec.ts +++ b/src/domain/services/import.service.spec.ts @@ -371,7 +371,7 @@ describe('receiveMessages', () => { await client.connect(); try { // When: A message is sent - const messageId = await client.xAdd('product_update', '*', { + const messageId = await client.xAdd('product_updates_off', '*', { code: 'TEST1', }); @@ -384,7 +384,7 @@ describe('receiveMessages', () => { // If a new message is added importSpy.mockClear(); - await client.xAdd('product_update', '*', { + await client.xAdd('product_updates_off', '*', { code: 'TEST2', }); diff --git a/src/domain/services/import.service.ts b/src/domain/services/import.service.ts index c5d7f3a..3ceef49 100644 --- a/src/domain/services/import.service.ts +++ b/src/domain/services/import.service.ts @@ -48,7 +48,7 @@ export class ImportService { if (from) { const fromTime = Math.floor(new Date(from).getTime() / 1000); filter['last_modified_t'] = { $gt: fromTime }; - this.logger.log(`Starting import from ${from}`); + this.logger.debug(`Starting import from ${from}`); } const latestModified = await this.importWithFilter( @@ -72,7 +72,7 @@ export class ImportService { // queries that should only affect products loaded in this import const updateId = Ulid.generate().toRaw(); - this.logger.log('Connecting to MongoDB'); + this.logger.debug('Connecting to MongoDB'); const client = new MongoClient(process.env.MONGO_URI); await client.connect(); const db = client.db('off'); @@ -88,7 +88,18 @@ export class ImportService { // Repeat the below for normal and then obsolete products // Both are stored in the same table in PostgreSQL - for (const obsolete of [false, true]) { + const collections = { + normal: { + obsolete: false, + count: 0, + }, + obsolete: { + obsolete: true, + count: 0, + }, + }; + for (const collection of Object.values(collections)) { + const obsolete = collection.obsolete; const products = db.collection(`products${obsolete ? '_obsolete' : ''}`); const cursor = products.find(filter, { projection }); let i = 0; @@ -110,12 +121,12 @@ export class ImportService { this.em.clear(); } if (!(i % this.importLogInterval)) { - this.logger.log(`Updated ${i}`); + this.logger.debug(`Updated ${i}`); } } await this.em.flush(); - this.logger.log(`${i}${obsolete ? ' Obsolete' : ''} Products imported`); await cursor.close(); + collection.count = i; } await client.close(); @@ -126,7 +137,10 @@ export class ImportService { if (fullImport) { await this.deleteOtherProducts(updateId); } - this.logger.log('Finished'); + + this.logger.log( + `Imported ${collections.normal.count} Products and ${collections.obsolete.count} Obsolete Products from ${source}`, + ); return latestModified; } @@ -223,7 +237,7 @@ export class ImportService { * This was found to be quicker than using ORM functionality */ async updateTags(updateId: string, fullImport = false) { - this.logger.log(`Updating tags for updateId: ${updateId}`); + this.logger.debug(`Updating tags for updateId: ${updateId}`); const connection = this.em.getConnection(); @@ -316,7 +330,7 @@ export class ImportService { logText += ` > ${affectedRows}`; } await connection.execute('commit'); - this.logger.log(logText + ' rows'); + this.logger.debug(logText + ' rows'); for (const [tag, entity] of Object.entries(ProductTagMap.MAPPED_TAGS)) { let logText = `Updated ${tag}`; @@ -355,7 +369,7 @@ export class ImportService { logText += ` inserted ${results['affectedRows']} rows`; - this.logger.log(logText); + this.logger.debug(logText); } } @@ -363,7 +377,7 @@ export class ImportService { const deleted = await this.em.nativeDelete(Product, { $or: [{ lastUpdateId: { $ne: updateId } }, { lastUpdateId: null }], }); - this.logger.log(`${deleted} Products deleted`); + this.logger.debug(`${deleted} Products deleted`); } async scheduledImportFromMongo() { @@ -413,7 +427,7 @@ export class ImportService { // XREAD can read from multiple streams, starting at a // different ID for each... { - key: 'product_update', + key: 'product_updates_off', id: lastMessageId, }, ], @@ -465,7 +479,7 @@ export class ImportService { if (lastModified < fromTime) { skip++; if (!(skip % this.importLogInterval)) { - this.logger.log(`Skippped ${skip}`); + this.logger.debug(`Skippped ${skip}`); } continue; } @@ -486,18 +500,18 @@ export class ImportService { this.em.clear(); } if (!(i % this.importLogInterval)) { - this.logger.log(`Updated ${i}`); + this.logger.debug(`Updated ${i}`); } } catch (e) { - this.logger.log(e.message + ': ' + line); + this.logger.debug(e.message + ': ' + line); } } await this.em.flush(); - this.logger.log(`${i} Products imported`); + this.logger.debug(`${i} Products imported`); await this.updateTags(updateId, !from); if (!from) { await this.deleteOtherProducts(updateId); } - this.logger.log('Finished'); + this.logger.debug('Finished'); } } diff --git a/test/global-setup.ts b/test/global-setup.ts index cb5e269..91ee64b 100644 --- a/test/global-setup.ts +++ b/test/global-setup.ts @@ -13,6 +13,9 @@ export default async function () { process.env.POSTGRES_PASSWORD = container.getPassword(); globalThis.__PGCONTAINER__ = container; + // We don't use redis in the tests + process.env.REDIS_URL = ''; + // Tried running migrations with the API but doesn't work because // of the way Jest mocks things. Even importing MikroORM is enough to break things. // https://github.com/mikro-orm/mikro-orm/discussions/3795