diff --git a/package-lock.json b/package-lock.json index 3519ddc..b00c721 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,8 @@ "ejs": "^3.1.8", "express": "^4.18.1", "jskos-tools": "^1.0.26", - "nodemon": "^2.0.19" + "nodemon": "^2.0.19", + "pg": "^8.8.0" }, "devDependencies": { "eslint": "^8.19.0", @@ -409,6 +410,14 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, + "node_modules/buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "engines": { + "node": ">=4" + } + }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -2421,6 +2430,11 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -2482,6 +2496,80 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, + "node_modules/pg": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.8.0.tgz", + "integrity": "sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==", + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.5.2", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", + "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": { + "split2": "^4.1.0" + } + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -2519,6 +2607,41 @@ "node": ">=4" } }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/pre-commit": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/pre-commit/-/pre-commit-1.2.2.tgz", @@ -3257,6 +3380,14 @@ "os-shim": "^0.1.2" } }, + "node_modules/split2": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", + "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/standard-readme": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/standard-readme/-/standard-readme-2.0.4.tgz", @@ -3960,6 +4091,14 @@ "node": ">=12" } }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -4279,6 +4418,11 @@ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", "dev": true }, + "buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" + }, "bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -5744,6 +5888,11 @@ "aggregate-error": "^3.0.0" } }, + "packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, "parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -5789,6 +5938,61 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, + "pg": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.8.0.tgz", + "integrity": "sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==", + "requires": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.5.2", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + } + }, + "pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" + }, + "pg-pool": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", + "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", + "requires": {} + }, + "pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "requires": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + } + }, + "pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "requires": { + "split2": "^4.1.0" + } + }, "picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -5811,6 +6015,29 @@ "util-deprecate": "^1.0.2" } }, + "postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==" + }, + "postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==" + }, + "postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==" + }, + "postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "requires": { + "xtend": "^4.0.0" + } + }, "pre-commit": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/pre-commit/-/pre-commit-1.2.2.tgz", @@ -6360,6 +6587,11 @@ "os-shim": "^0.1.2" } }, + "split2": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", + "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==" + }, "standard-readme": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/standard-readme/-/standard-readme-2.0.4.tgz", @@ -6919,6 +7151,11 @@ "dev": true, "peer": true }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/package.json b/package.json index ad7e15d..f8f9224 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,8 @@ "ejs": "^3.1.8", "express": "^4.18.1", "jskos-tools": "^1.0.26", - "nodemon": "^2.0.19" + "nodemon": "^2.0.19", + "pg": "^8.8.0" }, "devDependencies": { "eslint": "^8.19.0", diff --git a/src/backend/postgres.js b/src/backend/postgres.js new file mode 100644 index 0000000..d9144ec --- /dev/null +++ b/src/backend/postgres.js @@ -0,0 +1,213 @@ + +import pg from "pg" +const { Pool } = pg + +export default class PostgreSQLBackend { + + // Establish connection to backend or throw error + constructor(config) { + this.db = new Pool({ + user: "stefan" || config.user, + password: "" || config.password, + host: "localhost" || config.host, + database: "subjects" || config.database, + port: 5432 || config.port, + idleTimeoutMillis: 0, + connectionTimeoutMillis: 0, + }) + + ;(async () => { + const client = await this.db.connect() + try { + const res = await client.query("SELECT * FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema';") + if (res.rowCount === 0) { + await client.query(` + CREATE TABLE subjects ( + ppn TEXT NOT NULL, + voc TEXT NOT NULL, + notation TEXT NOT NULL + ); + + CREATE INDEX idx_notation on subjects (notation); + CREATE INDEX idx_ppn on subjects (ppn); + + CREATE TABLE metadata ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ); + `) + } + } finally { + client.release() + } + })() + this.name = `PostgreSQL database ${config.database} (port ${config.port})` + } + + async disconnect() { + await this.db.end() + } + + async occurrences({scheme, notation}) { + const client = await this.db.connect() + try { + const results = await client.query("SELECT count(*) AS freq FROM subjects WHERE voc = $1 and notation = $2", [scheme.VOC, notation]) + return results.rows + } catch (error) { + console.log(error) + return [] + } finally { + client.release() + } + } + + async coOccurrences({scheme, notation, otherScheme, threshold}) { + const client = await this.db.connect() + try { + const results = await client.query(`SELECT b.voc, b.notation, count(*) AS freq FROM subjects AS b JOIN (SELECT ppn FROM subjects WHERE voc = $1 AND notation = $2) a ON a.ppn = b.ppn WHERE b.voc ${otherScheme ? "=" : "!="} $3 GROUP BY b.voc, b.notation HAVING count(*) >= $4 ORDER BY freq DESC LIMIT 10;`, [scheme.VOC, notation, otherScheme ? otherScheme.VOC : scheme.VOC, threshold]) + return results.rows + } catch (error) { + console.log(error) + return [] + } finally { + client.release() + } + } + + async updateRecord(ppn, rows=[]) { + const deleteAllQuery = "DELETE FROM subjects WHERE ppn = $1" + const deleteOneQuery = "DELETE FROM subjects WHERE ppn = $1 AND voc = $2" + const insertQuery = "INSERT INTO subjects (ppn, voc, notation) VALUES ($1, $2, $3)" + + // Sort rows (deletion first) + rows.sort((a, b) => { + if (!a.notation && b.notation || !a.voc && b.voc) { + return -1 + } + return 1 + }) + + const client = await this.db.connect() + try { + await client.query("BEGIN") + + for (const row of rows) { + if (!row.voc) { + await client.query(deleteAllQuery, [ppn]) + } else if (!row.notation) { + await client.query(deleteOneQuery, [ppn, row.voc]) + } else { + await client.query(insertQuery, [ppn, row.voc, row.notation]) + } + } + + await client.query("COMMIT") + } catch (e) { + await client.query("ROLLBACK") + console.log(e) + } finally { + client.release() + } + } + + async batchImport(data) { + const client = await this.db.connect() + + try { + + // Drop indexes to recreate later + console.time("drop indexes/data") + await client.query("DROP INDEX IF EXISTS idx_notation;") + await client.query("DROP INDEX IF EXISTS idx_ppn;") + await client.query("TRUNCATE subjects;") + console.timeEnd("drop indexes/data") + // await client.query("BEGIN") + + const bulkInsert = async (rows) => { + const keys = Object.keys(rows[0]) + let valueStr = "" + let valueArray = [] + let valueIndex = 1 + for (let row of rows) { + if (valueStr) { + valueStr += "," + } + valueStr += "(" + keys.map((value, index) => `$${valueIndex + index}`) + ")" + valueArray = valueArray.concat(keys.map((value) => row[value])) + valueIndex += keys.length + } + await client.query(`INSERT INTO subjects (${keys.join(",")}) VALUES ${valueStr}`, valueArray) + } + + let rows = [] + let inserted = 0 + console.time("insert") + + for await (const row of data) { + rows.push(row) + if (rows.length === 2000) { + inserted += rows.length + await bulkInsert(rows) + rows = [] + if (inserted % 1000000 === 0) { + // await client.query("COMMIT") + console.timeEnd("insert") + console.log(inserted) + console.time("insert") + // await client.query("BEGIN") + } + } + } + + inserted += rows.length + await bulkInsert(rows) + // await client.query("COMMIT") + console.timeEnd("insert") + console.log(inserted) + // Recreate indexes + console.time("recreate indexes") + await client.query("CREATE INDEX idx_notation on subjects (notation);") + await client.query("CREATE INDEX idx_ppn on subjects (ppn);") + console.timeEnd("recreate indexes") + + + } catch (error) { + console.log(error) + // await client.query("ROLLBACK") + } finally { + client.release() + } + } + + async metadata() { + const client = await this.db.connect() + try { + const { occcount } = (await client.query("SELECT COUNT(*) AS occCount FROM subjects")).rows[0] + const { reccount } = (await client.query("SELECT COUNT(DISTINCT ppn) AS recCount FROM subjects")).rows[0] + const { voccount } = (await client.query("SELECT COUNT(DISTINCT voc) AS vocCount FROM subjects")).rows[0] + return { occCount: occcount, recCount: reccount, vocCount: voccount } + } catch (error) { + console.log(error) + return [] + } finally { + client.release() + } + } + + async updateMetadata(data) { + if (!Array.isArray(data)) { + data = [data] + } + const client = await this.db.connect() + try { + const updateQuery = "INSERT INTO metadata VALUES ($1, $2) ON CONFLICT (key) DO UPDATE SET value = $2" + for (const row of data) { + await client.query(updateQuery, [row.key, row.value]) + } + } catch (error) { + console.log(error) + } finally { + client.release() + } + } +} diff --git a/src/config.js b/src/config.js index 7988be7..a6a3ab3 100644 --- a/src/config.js +++ b/src/config.js @@ -13,8 +13,17 @@ import fs from "fs" export const schemes = JSON.parse(fs.readFileSync(config.schemesFile)).map(scheme => new jskos.ConceptScheme(scheme)) export const links = JSON.parse(fs.readFileSync(config.linksFile)) +const backendName = process.env.BACKEND || "SQLiteBackend" import SQLiteBackend from "./backend/sqlite.js" -export const backend = new SQLiteBackend(config) +import PostgreSQLBackend from "./backend/postgres.js" +const backends = [SQLiteBackend, PostgreSQLBackend] +const backendClass = backends.find(b => b.name === backendName) -console.log(`Configured ${schemes.length} vocabularies from ${config.schemesFile}. Using ${backend.name}.`) +if (!backendClass) { + console.error(`Backend ${backendName} not found.`) + process.exit(1) +} +export const backend = new backendClass(config) + +console.log(`Configured ${schemes.length} vocabularies from ${config.schemesFile}. Using ${backend.name}.`)