diff --git a/packages/itmat-apis/src/graphql/resolvers/fileResolvers.ts b/packages/itmat-apis/src/graphql/resolvers/fileResolvers.ts index a0cdfcecf..7cb2efb1e 100644 --- a/packages/itmat-apis/src/graphql/resolvers/fileResolvers.ts +++ b/packages/itmat-apis/src/graphql/resolvers/fileResolvers.ts @@ -50,7 +50,12 @@ export class FileResolvers { ); } else { try { - const res = await this.dataCore.uploadFileData(context.req.user, args.studyId, await args.file, targetFieldId, args.description); + // We need to rename the properties to fit for the new V3 naming + const description = { + ...parsedDescription, + subjectId: parsedDescription['participantId'] + }; + const res = await this.dataCore.uploadFileData(context.req.user, args.studyId, await args.file, targetFieldId, JSON.stringify(description)); const fileEntry = await this.db.collections.files_collection.findOne({ id: res.id }); if (args.fileLength) { if (args.fileLength.toString() !== fileEntry?.fileSize.toString()) { @@ -105,4 +110,4 @@ export class FileResolvers { } }; } -} \ No newline at end of file +} diff --git a/packages/itmat-apis/src/trpc/dataProcedure.ts b/packages/itmat-apis/src/trpc/dataProcedure.ts index 4aa33b897..f430bef7e 100644 --- a/packages/itmat-apis/src/trpc/dataProcedure.ts +++ b/packages/itmat-apis/src/trpc/dataProcedure.ts @@ -304,6 +304,33 @@ export class DataRouter { opts.input.forceUpdate ); }), + /** + * Get the latest files of a study. + * + * @param studyId - The id of the study. + * @param versionId - The id of the data version. By default not specified for the latest version. + * @param fieldIds - The list of fields to return. + * + * @return IFile[] - The list of objects of IFile. + */ + getFilesLatest: this.baseProcedure.input(z.object({ + studyId: z.string(), + versionId: z.optional(z.string()), + fieldIds: z.optional(z.array(z.string())), + readable: z.optional(z.boolean()), + useCache: z.optional(z.boolean()), + forceUpdate: z.optional(z.boolean()) + })).query(async (opts) => { + return await this.dataCore.getStudyFilesLatest( + opts.ctx.req.user, + opts.input.studyId, + opts.input.fieldIds, + opts.input.versionId, + opts.input.readable, + opts.input.useCache, + opts.input.forceUpdate + ); + }), /** * Get the file of a study. * diff --git a/packages/itmat-cores/src/coreFunc/dataCore.ts b/packages/itmat-cores/src/coreFunc/dataCore.ts index 27df16f3a..0e9c9d028 100644 --- a/packages/itmat-cores/src/coreFunc/dataCore.ts +++ b/packages/itmat-cores/src/coreFunc/dataCore.ts @@ -137,7 +137,7 @@ export class DataCore { $replaceRoot: { newRoot: '$doc' } - }]).toArray(); + }], { allowDiskUse: true }).toArray(); return fields.filter(el => el.life.deletedTime === null); } /** @@ -882,23 +882,9 @@ export class DataCore { } const fieldIds: string[] | undefined = selectedFieldIds; - - // copied from getDataByRoles const matchFilter: Filter = { studyId: studyId }; - if (fieldIds) { - matchFilter.fieldId = { $in: fieldIds }; - - } - const groupKeys: Record = {}; - for (const key of (config.properties as IStudyConfig).defaultVersioningKeys) { - let usedKey: string = key; - if (key.startsWith('properties.')) { - usedKey = key.split('.')[1]; - } - groupKeys[usedKey] = `$${key}`; - } const roleArr: Filter[] = []; for (const role of roles) { @@ -907,14 +893,17 @@ export class DataCore { if (role.dataPermissions[i].fields.length === 0) { continue; } - const obj: Filter = { + const obj = { fieldId: { $in: role.dataPermissions[i].fields.map(el => new RegExp(el)) } }; if (role.dataPermissions[i].dataProperties) { for (const key of Object.keys(role.dataPermissions[i].dataProperties)) { - obj[`properties.${key}`] = { $in: role.dataPermissions[i].dataProperties[key].map((el: string | RegExp) => new RegExp(el)) }; + obj[`properties.${key}`] = { $in: role.dataPermissions[i].dataProperties[key].map(el => new RegExp(el)) }; } } + if (!role.dataPermissions[i].includeUnVersioned) { + obj['dataVersion'] = { $ne: null }; + } permissionArr.push(obj); } if (permissionArr.length === 0) { @@ -922,15 +911,200 @@ export class DataCore { } roleArr.push({ $or: permissionArr }); } - return await this.db.collections.colddata_collection.aggregate([{ - $match: { ...matchFilter } - }, { - $match: { $or: roleArr } - }, { - $project: { - _id: 0 + const dataVersions: Array = study.dataVersions.map(el => el.id); + dataVersions.push(null); + // we need to query each field based on its properties + const availableFields = (await this.getStudyFields(requester, studyId, dataVersions)).reduce((a, c) => { + a[c.fieldId] = c; + return a; + }, {}); + const availableFieldIds = Object.keys(availableFields); + const refactoredFieldIds = fieldIds ?? Object.keys(availableFields); + let res: IData[] = []; + + const queries = refactoredFieldIds.map(async (fieldId) => { + if (availableFieldIds.includes(fieldId) || availableFieldIds.some(el => new RegExp(el).test(fieldId))) { + const propertyFilter: Record = {}; + if (availableFields[fieldId].properties) { + for (const property of availableFields[fieldId].properties) { + propertyFilter[`${property.name}`] = `$properties.${property.name}`; + } + } + + const data = await this.db.collections.colddata_collection.aggregate([{ + $match: { ...matchFilter, fieldId: fieldId } + }, { + $match: { $or: roleArr } + }, { + $sort: { + 'life.createdTime': -1 + } + }, { + $group: { + _id: { + ...propertyFilter + }, + latestDocument: { $first: '$$ROOT' } + } + }, { + $replaceRoot: { newRoot: '$latestDocument' } + }, { + $project: { + _id: 0, + id: 0, + dataVersion: 0, + life: 0, + metadata: 0 + } + }], { allowDiskUse: true }).toArray(); + + return data; + } else { + return []; } - }], { allowDiskUse: true }).toArray(); + }); + + // Execute all queries in parallel and wait for the results + const results = await Promise.all(queries); + + // Flatten the array of results and concatenate it into the final `res` array + res = results.flat(); + return res; + + } + + /** + * Get the latest files of a study. Note in this case the file will have a version. This function reuse the getData function. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param selectedFieldIds - The list of regular expressions of fields to return. + * @param dataVersion - The list of data versions to return. + * @returns IFile[] - The list of objects of IFile + */ + public async getStudyFilesLatest(requester: IUserWithoutToken | undefined, studyId: string, selectedFieldIds?: string[], dataVersion?: string | null | Array, readable?: boolean, useCache?: boolean, forceUpdate?: boolean) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + const roles = (await this.permissionCore.getRolesOfUser(requester, requester.id, studyId)); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + const study = await this.db.collections.studies_collection.findOne({ 'id': studyId, 'life.deletedTime': null }); + if (!study) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study does not exist.' + ); + } + + const config = await this.db.collections.configs_collection.findOne({ type: enumConfigType.STUDYCONFIG, key: studyId }); + if (!config) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study config not found.' + ); + } + + const readFiles = async () => { + let fieldIds: string[] | undefined = selectedFieldIds; + let availableDataVersions: Array = []; + if (dataVersion === null) { + availableDataVersions.push(null); + } else if (typeof dataVersion === 'string') { + availableDataVersions.push(dataVersion); + } else if (Array.isArray(dataVersion)) { + availableDataVersions.push(...dataVersion); + } else { + availableDataVersions = (study.currentDataVersion === -1 ? [] : study.dataVersions.filter((__unused__el, index) => index <= study.currentDataVersion)).map(el => el.id); + availableDataVersions.push(null); + } + if (!fieldIds) { + fieldIds = (await this.getStudyFields(requester, studyId, availableDataVersions)).filter(el => el.dataType === enumDataTypes.FILE).map(el => el.fieldId); + } else { + const fields = await this.db.collections.field_dictionary_collection.find({ studyId: studyId, fieldId: { $in: fieldIds } }).toArray(); + fieldIds = fields.filter(el => el.dataType === enumDataTypes.FILE).map(el => el.fieldId); + } + if (fieldIds.length === 0) { + return []; + } + const fileDataRecords: IData[] = (await this.getDataLatest( + requester, + studyId, + fieldIds + )) as unknown as IData[]; + if (!Array.isArray(fileDataRecords)) { + return []; + } + const batchSize = 10000; // Define a suitable batch size + const promises: Promise[] = []; + + for (let i = 0; i < fileDataRecords.length; i += batchSize) { + const batchIds = fileDataRecords.slice(i, i + batchSize).map(el => String(el.value)); + const promise = this.db.collections.files_collection.find({ id: { $in: batchIds } }, { allowDiskUse: true }).toArray(); + promises.push(promise); + } + + const files = (await Promise.all(promises)).flat(); // Flatten the array of arrays + if (readable) { + const users = await this.db.collections.users_collection.find({}).toArray(); + const edited = [...files]; + for (const file of edited) { + const user = users.find(el => el.id === file.life.createdUser); + file.life.createdUser = user ? `${user.firstname} ${user.lastname}` : file.life.createdUser; + } + return edited; + } else { + return files; + } + }; + + if (useCache) { + const hash = this.utilsCore.computeHash({ + query: 'getStudyFiles', + studyId: studyId, + roles: roles, + fieldIds: selectedFieldIds + }); + const hashedInfo = await this.db.collections.cache_collection.find({ 'keyHash': hash, 'life.deletedTime': null, 'status': enumCacheStatus.INUSE }).sort({ 'life.createdTime': -1 }).limit(1).toArray(); + // if hash is not found, generate the new summary and cache it + if (forceUpdate || !hashedInfo || hashedInfo.length === 0) { + const newFiles = await readFiles(); + const info = await convertToBufferAndUpload(this.fileCore, requester, { files: newFiles }); + await this.db.collections.cache_collection.insertOne({ + id: uuid(), + keyHash: hash, + uri: info.uri, + status: enumCacheStatus.INUSE, + keys: { + query: 'getStudyFiles', + studyId: studyId, + roles: roles, + fieldIds: selectedFieldIds + }, + type: enumCacheType.API, + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + return newFiles; + } else { + return ((await getJsonFileContents(this.objStore, 'cache', hashedInfo[0].uri)) as unknown as { files: IFile[] }).files; + } + } else { + return await readFiles(); + } } @@ -1007,7 +1181,16 @@ export class DataCore { if (!Array.isArray(fileDataRecords)) { return []; } - const files = await this.db.collections.files_collection.find({ id: { $in: fileDataRecords.map(el => String(el.value)) } }).toArray(); + const batchSize = 10000; // Define a suitable batch size + const promises: Promise[] = []; + + for (let i = 0; i < fileDataRecords.length; i += batchSize) { + const batchIds = fileDataRecords.slice(i, i + batchSize).map(el => String(el.value)); + const promise = this.db.collections.files_collection.find({ id: { $in: batchIds } }, { allowDiskUse: true }).toArray(); + promises.push(promise); + } + + const files = (await Promise.all(promises)).flat(); // Flatten the array of arrays if (readable) { const users = await this.db.collections.users_collection.find({}).toArray(); const edited = [...files]; @@ -1032,7 +1215,7 @@ export class DataCore { // if hash is not found, generate the new summary and cache it if (forceUpdate || !hashedInfo || hashedInfo.length === 0) { const newFiles = await readFiles(); - const info = await convertToBufferAndUpload(this.fileCore, requester, newFiles); + const info = await convertToBufferAndUpload(this.fileCore, requester, { files: newFiles }); await this.db.collections.cache_collection.insertOne({ id: uuid(), keyHash: hash, @@ -1055,7 +1238,7 @@ export class DataCore { }); return newFiles; } else { - return (await getJsonFileContents(this.objStore, 'cache', hashedInfo[0].uri)) as unknown as IFile[]; + return ((await getJsonFileContents(this.objStore, 'cache', hashedInfo[0].uri)) as unknown as { files: IFile[] }).files; } } else { return await readFiles(); @@ -1077,7 +1260,7 @@ export class DataCore { if (role.dataPermissions[i].fields.length === 0) { continue; } - const obj = { + const obj: Filter = { fieldId: { $in: role.dataPermissions[i].fields.map(el => new RegExp(el)) } }; if (role.dataPermissions[i].dataProperties) { @@ -1096,22 +1279,23 @@ export class DataCore { roleArr.push({ $or: permissionArr }); } - // we need to query each field based on its properties const availableFields = (await this.getStudyFields(requester, studyId, dataVersions)).reduce((a, c) => { a[c.fieldId] = c; return a; }, {}); + const availableFieldIds = Object.keys(availableFields); const refactoredFieldIds = fieldIds ?? Object.keys(availableFields); - let res: IData[] = []; - for (const fieldId of refactoredFieldIds) { + + const queryField = async (fieldId: string) => { if (availableFieldIds.includes(fieldId) || availableFieldIds.some(el => new RegExp(el).test(fieldId))) { - const propertyFilter = {}; + const propertyFilter: Record = {}; if (availableFields[fieldId].properties) { for (const property of availableFields[fieldId].properties) { propertyFilter[`${property.name}`] = `$properties.${property.name}`; } } + const data = await this.db.collections.data_collection.aggregate([{ $match: { ...matchFilter, fieldId: fieldId } }, { @@ -1129,10 +1313,32 @@ export class DataCore { } }, { $replaceRoot: { newRoot: '$latestDocument' } + }, { + $project: { + _id: 0, // Exclude the _id field + id: 0, // Exclude the id field + life: 0, // Exclude the life field + metadata: 0 // Exclude the metadata field + } }], { allowDiskUse: true }).toArray(); - res = res.concat(data); + + return data; } - } + + return []; + }; + + const timedQueryField = async (fieldId: string) => { + const result = await queryField(fieldId); + return result; // Return the original result without modification + }; + + let res: IData[] = []; + const promises = refactoredFieldIds.map(async (fieldId) => timedQueryField(fieldId)); + const results = await Promise.all(promises); + + // Optionally flatten the results if necessary + res = results.flat(); return res; } @@ -1364,7 +1570,6 @@ export class DataCore { } } ], { allowDiskUse: true }).toArray(); - console.log('dataByUploaders done'); const events = ['GET_DATA_RECORDS', 'GET_STUDY_FIELDS', 'GET_STUDY', 'data.getStudyFields', 'data.getStudyData', 'data.getStudyDataLatest', 'data.getFiles' ]; @@ -1392,7 +1597,6 @@ export class DataCore { } } ], { allowDiskUse: true }).toArray(); - console.log('dataByUsers done'); return { numberOfDataRecords: numberOfDataRecords, numberOfDataAdds: numberOfDataAdds, diff --git a/packages/itmat-interface/test/GraphQLTests/file.test.ts b/packages/itmat-interface/test/GraphQLTests/file.test.ts index 7e58e2d66..ad2c81ecb 100644 --- a/packages/itmat-interface/test/GraphQLTests/file.test.ts +++ b/packages/itmat-interface/test/GraphQLTests/file.test.ts @@ -232,13 +232,14 @@ if (global.hasMinio) { studyId: createdStudy.id, projectId: null, fileSize: '21', - description: JSON.stringify({ participantId: 'I7N3G6G', deviceId: 'MMM7N3G6G', startDate: 1593827200000, endDate: 1595296000000 }), + description: JSON.stringify({ participantId: 'I7N3G6G', deviceId: 'MMM7N3G6G', startDate: 1593827200000, endDate: 1595296000000, subjectId: 'I7N3G6G' }), uploadedBy: authorisedUserProfile.id, hash: 'b0dc2ae76cdea04dcf4be7fcfbe36e2ce8d864fe70a1895c993ce695274ba7a0', metadata: { deviceId: 'MMM7N3G6G', endDate: 1595296000000, participantId: 'I7N3G6G', + subjectId: 'I7N3G6G', startDate: 1593827200000 } }); @@ -318,13 +319,14 @@ if (global.hasMinio) { studyId: createdStudy.id, projectId: null, fileSize: '0', - description: JSON.stringify({ participantId: 'IR6R4AR', deviceId: 'AX6VJH6F6', startDate: 1590976000000, endDate: 1593740800000 }), + description: JSON.stringify({ participantId: 'IR6R4AR', deviceId: 'AX6VJH6F6', startDate: 1590976000000, endDate: 1593740800000, subjectId: 'IR6R4AR' }), uploadedBy: authorisedUserProfile.id, hash: 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', metadata: { deviceId: 'AX6VJH6F6', endDate: 1593740800000, participantId: 'IR6R4AR', + subjectId: 'IR6R4AR', startDate: 1590976000000 } }); diff --git a/packages/itmat-ui-react/src/components/profile/keys.tsx b/packages/itmat-ui-react/src/components/profile/keys.tsx index fd3f229aa..9198fdae6 100644 --- a/packages/itmat-ui-react/src/components/profile/keys.tsx +++ b/packages/itmat-ui-react/src/components/profile/keys.tsx @@ -1,13 +1,12 @@ import React, { FunctionComponent, useState } from 'react'; import LoadSpinner from '../reusable/loadSpinner'; // import { ProjectSection } from '../users/projectSection'; -import { Form, Input, Button, List, Table, message, Modal, Upload, Popconfirm } from 'antd'; -import { CopyOutlined, UploadOutlined } from '@ant-design/icons'; +import { Button, List, Table, message, Modal, Popconfirm } from 'antd'; +import { CopyOutlined } from '@ant-design/icons'; import css from './profile.module.css'; import { trpc } from '../../utils/trpc'; import copy from 'copy-to-clipboard'; import { Key } from '../../utils/dmpCrypto/dmp.key'; -const { TextArea } = Input; export const MyKeys: FunctionComponent = () => { const whoAmI = trpc.user.whoAmI.useQuery(); @@ -68,13 +67,6 @@ export const MyKeys: FunctionComponent = () => { render: (__unused__value, record) => { return (new Date(record.life.createdTime)).toDateString(); } - }, { - title: 'Token Generation', - dataIndex: 'tokenGeneration', - key: 'value', - render: (_, record) => { - return ; - } }, { title: '', dataIndex: 'delete', @@ -152,8 +144,16 @@ const KeyGeneration: React.FunctionComponent<{ userId: string }> = ({ userId }) open={isKeyGenOpen} width={'80%'} onOk={() => { + void (async () => { + await registerPubkey.mutate({ + pubkey: exportedKeyPair.publicKey, + signature: signature, + associatedUserId: userId + }); + })(); setIsKeyGenOpen(false); }} + okText={completedKeypairGen ? 'Register this key pair' : 'Ok'} onCancel={() => { setIsKeyGenOpen(false); }} @@ -170,39 +170,21 @@ const KeyGeneration: React.FunctionComponent<{ userId: string }> = ({ userId }) })(); }}> Do not have public/private keypair? Generate one (In-browser)! - +

{ completedKeypairGen ?
- Public key: