diff --git a/packages/itmat-cores/config/config.sample.json b/packages/itmat-cores/config/config.sample.json index 622ca4650..284f61f7b 100644 --- a/packages/itmat-cores/config/config.sample.json +++ b/packages/itmat-cores/config/config.sample.json @@ -17,7 +17,9 @@ "files_collection": "FILES_COLLECTION", "sessions_collection": "SESSIONS_COLLECTION", "pubkeys_collection": "PUBKEY_COLLECTION", - "standardizations_collection": "STANDARDIZATION_COLLECTION" + "standardizations_collection": "STANDARDIZATION_COLLECTION", + "colddata_collection": "COLDDATA_COLLECTION", + "cache_collection": "CACHE_COLLECTION" } }, "server": { diff --git a/packages/itmat-cores/src/GraphQLCore/studyCore.ts b/packages/itmat-cores/src/GraphQLCore/studyCore.ts index ad197e194..d81f542d2 100644 --- a/packages/itmat-cores/src/GraphQLCore/studyCore.ts +++ b/packages/itmat-cores/src/GraphQLCore/studyCore.ts @@ -1184,7 +1184,7 @@ export class StudyCore { error = `Field ${dataClip.fieldId}: Cannot parse as decimal.`; break; } - if (!/^\d+(.\d+)?$/.test(dataClip.value)) { + if (!/^-?\d+(\.\d+)?$/.test(dataClip.value)) { error = `Field ${dataClip.fieldId}: Cannot parse as decimal.`; break; } diff --git a/packages/itmat-cores/src/database/database.ts b/packages/itmat-cores/src/database/database.ts index 272edb861..a9da52b1f 100644 --- a/packages/itmat-cores/src/database/database.ts +++ b/packages/itmat-cores/src/database/database.ts @@ -1,4 +1,4 @@ -import type { IField, IFile, IJobEntry, ILogEntry, IOrganisation, IProject, IPubkey, IQueryEntry, IRole, IStudy, IUser, IStandardization, IConfig, IData, IDrive } from '@itmat-broker/itmat-types'; +import type { IField, IFile, IJobEntry, ILogEntry, IOrganisation, IProject, IPubkey, IQueryEntry, IRole, IStudy, IUser, IStandardization, IConfig, IData, IDrive, ICache } from '@itmat-broker/itmat-types'; import { Database as DatabaseBase, IDatabaseBaseConfig } from '@itmat-broker/itmat-commons'; import type { Collection } from 'mongodb'; @@ -18,7 +18,9 @@ export interface IDatabaseConfig extends IDatabaseBaseConfig { data_collection: string, standardizations_collection: string, configs_collection: string, - drives_collection: string + drives_collection: string, + colddata_collection: string, + cache_collection: string }; } @@ -37,6 +39,8 @@ export interface IDatabaseCollectionConfig { data_collection: Collection, standardizations_collection: Collection, configs_collection: Collection, - drives_collection: Collection + drives_collection: Collection, + colddata_collection: Collection, + cache_collection: Collection } export type DBType = DatabaseBase; diff --git a/packages/itmat-cores/src/index.ts b/packages/itmat-cores/src/index.ts index a67431da3..b2f54f81e 100644 --- a/packages/itmat-cores/src/index.ts +++ b/packages/itmat-cores/src/index.ts @@ -15,6 +15,8 @@ export * from './trpcCore/driveCore'; export * from './trpcCore/fileCore'; export * from './trpcCore/studyCore'; export * from './trpcCore/userCore'; +export * from './trpcCore/dataCore'; +export * from './trpcCore/transformationCore'; export * from './trpcCore/permissionCore'; export * from './rest/fileDownload'; export * from './authentication/pubkeyAuthentication'; diff --git a/packages/itmat-cores/src/trpcCore/dataCore.ts b/packages/itmat-cores/src/trpcCore/dataCore.ts new file mode 100644 index 000000000..61d8830c4 --- /dev/null +++ b/packages/itmat-cores/src/trpcCore/dataCore.ts @@ -0,0 +1,1311 @@ +import { IField, enumDataTypes, ICategoricalOption, IValueVerifier, IGenericResponse, enumConfigType, defaultSettings, IAST, enumConditionOps, enumFileTypes, enumFileCategories, IFieldProperty, IFile, IData, enumASTNodeTypes, IRole, IStudyConfig, enumUserTypes, enumCoreErrors, IUserWithoutToken, CoreError, enumDataAtomicPermissions, enumDataTransformationOperation, enumCacheStatus, enumCacheType, FileUpload } from '@itmat-broker/itmat-types'; +import { v4 as uuid } from 'uuid'; +import { DBType } from '../database/database'; +import { TRPCFileCore } from './fileCore'; +import { TRPCPermissionCore } from './permissionCore'; +import { makeGenericReponse } from '../utils'; +import { TRPCUtilsCore } from './utilsCore'; +import { Filter } from 'mongodb'; +import { TRPCDataTransformationCore } from './transformationCore'; +import { Readable } from 'stream'; + +type IDataTransformationClip = Record; + +type IDataTransformationClipArray = IDataTransformationClip[]; + +interface IDataInput { + fieldId: string; + value: string; // null for deleted data + properties?: Record; +} + +interface ValueVerifierInput { + formula: IAST; + condition: enumConditionOps; + value: string | number; + parameters: Record; +} + +interface CategoticalOptionInput { + code: string; + description: string; +} + +interface CreateFieldInput { + studyId: string; + fieldName: string; + fieldId: string; + description?: string; + dataType: enumDataTypes; + categoricalOptions?: CategoticalOptionInput[]; + unit?: string; + comments?: string; + verifier?: ValueVerifierInput[][]; + properties?: IFieldProperty[]; +} + +type EditFieldInput = CreateFieldInput; + +export class TRPCDataCore { + db: DBType; + fileCore: TRPCFileCore; + permissionCore: TRPCPermissionCore; + utilsCore: TRPCUtilsCore; + dataTransformationCore: TRPCDataTransformationCore; + constructor(db: DBType, fileCore: TRPCFileCore, permissionCore: TRPCPermissionCore, utilsCore: TRPCUtilsCore, dataTransformationCore: TRPCDataTransformationCore) { + this.db = db; + this.fileCore = fileCore; + this.permissionCore = permissionCore; + this.utilsCore = utilsCore; + this.dataTransformationCore = dataTransformationCore; + } + + /** + * Get the list of fields of a study. Note, duplicate fields will be joined and only remain the latest one. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param dataVersions - The data version; if not specified, use the latest one (include all previous ones) by default. + * @param selectedFields - The list of ids of fields to return. + * + * @return IField[] - The list of objects of IField. + */ + public async getStudyFields(requester: IUserWithoutToken | undefined, studyId: string, dataVersion?: string | null | Array, selectedFields?: string[]) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + const roles = await this.permissionCore.getRolesOfUser(requester, studyId); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + const study = await this.db.collections.studies_collection.findOne({ 'id': studyId, 'life.deletedTime': null }); + if (!study) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study does not exist.' + ); + } + + const regularExpressions: string[] = []; + for (const role of roles) { + for (const permission of role.dataPermissions) { + for (const fieldRE of permission.fields) { + if ((permission.permission & 4) === 4) { + regularExpressions.push(fieldRE); + } + } + } + } + let availableDataVersions: Array = []; + if (dataVersion === null) { + availableDataVersions.push(null); + } else if (typeof dataVersion === 'string') { + availableDataVersions.push(dataVersion); + } else if (Array.isArray(dataVersion)) { + availableDataVersions.push(...dataVersion); + } else { + availableDataVersions = (study.currentDataVersion === -1 ? [] : study.dataVersions.filter((__unused__el, index) => index <= study.currentDataVersion)).map(el => el.id); + } + + const fields = await this.db.collections.field_dictionary_collection.aggregate([{ + $match: { + $and: [ + { studyId: studyId, dataVersion: { $in: availableDataVersions } }, + { fieldId: selectedFields ? { $in: selectedFields } : { $in: regularExpressions.map(el => new RegExp(el)) } }, + { fieldId: { $in: [new RegExp('^.*$')] } } + ] + } + }, { + $sort: { + 'life.createdTime': -1 + } + }, { + $group: { + _id: '$fieldId', + doc: { $first: '$$ROOT' } + } + }, { + $replaceRoot: { + newRoot: '$doc' + } + }]).toArray(); + return fields.filter(el => el.life.deletedTime === null); + } + /** + * Create a field of a study. To adjust to data versioning, create an existing field wil not throw an error. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param fieldName - The name of the field. + * @param fieldId - The value of the id of the field. Should be unique. + * @param description - The description of the field. + * @param dataType - The dataType of the field. + * @param categoricalOptions - The options of the field if the field is a categorical field. + * @param unit - The unit of the field. + * @param comments - The comments of the field. + * @param verifier - The verifier of the field. + * @param properties - The properties of the field. + * + * @return IField + */ + public async createField(requester: IUserWithoutToken | undefined, fieldInput: CreateFieldInput): Promise { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + + const roles = await this.permissionCore.getRolesOfUser(requester, fieldInput.studyId); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + let hasPermission = false; + for (const role of roles) { + for (const permission of role.dataPermissions) { + for (const fieldRE of permission.fields) { + if (new RegExp(fieldRE).test(fieldInput.fieldId) && (permission.permission & 2) === 2) { + hasPermission = true; + } + } + } + } + + if (!hasPermission) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + 'No permission to create this field.' + ); + } + + const errors = this.validateFieldEntry(fieldInput); + if (errors.length > 0) { + throw new CoreError( + enumCoreErrors.CLIENT_MALFORMED_INPUT, + errors[0] + ); + } + + // add id and life for verifier; + const verifierWithId: IValueVerifier[][] = []; + if (fieldInput.verifier) { + for (let i = 0; i < fieldInput.verifier.length; i++) { + verifierWithId.push([]); + for (let j = 0; j < fieldInput.verifier[i].length; j++) { + verifierWithId[verifierWithId.length - 1].push({ + ...fieldInput.verifier[i][j] + }); + } + } + } + + let categoricalOptionsWithId: ICategoricalOption[] | undefined = undefined; + if (fieldInput.categoricalOptions) { + categoricalOptionsWithId = []; + for (let i = 0; i < fieldInput.categoricalOptions.length; i++) { + categoricalOptionsWithId.push({ + ...fieldInput.categoricalOptions[i], + id: uuid(), + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + } + } + + const fieldEntry: IField = { + id: uuid(), + studyId: fieldInput.studyId, + fieldId: fieldInput.fieldId, + fieldName: fieldInput.fieldName, + description: fieldInput.description, + dataType: fieldInput.dataType, + categoricalOptions: categoricalOptionsWithId, + unit: fieldInput.unit, + comments: fieldInput.comments, + dataVersion: null, + verifier: verifierWithId, + properties: fieldInput.properties, + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + + await this.db.collections.field_dictionary_collection.insertOne(fieldEntry); + + return fieldEntry; + } + /** + * Edit a field of a study. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param fieldName - The name of the field. + * @param fieldId - The value of the id of the field. Should be unique. + * @param description - The description of the field. + * @param dataType - The dataType of the field. + * @param categoricalOptions - The options of the field if the field is a categorical field. + * @param unit - The unit of the field. + * @param comments - The comments of the field. + * @param verifier - The verifier of the field. + * @param properties - The properties of the field. + * + * @return IField + */ + public async editField(requester: IUserWithoutToken | undefined, fieldInput: EditFieldInput): Promise { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + + const roles = await this.permissionCore.getRolesOfUser(requester, fieldInput.studyId); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + let hasPermission = false; + for (const role of roles) { + for (const permission of role.dataPermissions) { + for (const fieldRE of permission.fields) { + if (new RegExp(fieldRE).test(fieldInput.fieldId) && (permission.permission & 2) === 2) { + hasPermission = true; + } + } + } + } + + if (!hasPermission) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + 'No permission to create this field.' + ); + } + + const field = await this.db.collections.field_dictionary_collection.findOne({ 'studyId': fieldInput.studyId, 'fieldId': fieldInput.fieldId, 'life.deletedTime': null }); + if (!field) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Field does not exist.' + ); + } + + const errors = this.validateFieldEntry(fieldInput); + + if (errors.length > 0) { + throw new CoreError( + enumCoreErrors.CLIENT_MALFORMED_INPUT, + errors[0] + ); + } + + const verifierWithId: IValueVerifier[][] = []; + if (fieldInput.verifier) { + for (let i = 0; i < fieldInput.verifier.length; i++) { + verifierWithId.push([]); + for (let j = 0; j < fieldInput.verifier[i].length; j++) { + verifierWithId[verifierWithId.length - 1].push({ + ...fieldInput.verifier[i][j] + }); + } + } + } + + let categoricalOptionsWithId: ICategoricalOption[] | undefined = field.categoricalOptions; + if (fieldInput.categoricalOptions) { + categoricalOptionsWithId = []; + for (let i = 0; i < fieldInput.categoricalOptions.length; i++) { + categoricalOptionsWithId.push({ + ...fieldInput.categoricalOptions[i], + id: uuid(), + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + } + } + + // insert directly + await this.db.collections.field_dictionary_collection.insertOne({ + id: uuid(), + studyId: fieldInput.studyId, + fieldId: fieldInput.fieldId, + fieldName: fieldInput.fieldName ?? field.fieldName, + description: fieldInput.description ?? field.description, + dataType: fieldInput.dataType ?? field.dataType, + categoricalOptions: categoricalOptionsWithId, + unit: fieldInput.unit ?? field.unit, + comments: fieldInput.comments ?? field.comments, + dataVersion: null, + verifier: fieldInput.verifier ? verifierWithId : field.verifier, + properties: fieldInput.properties ?? field.properties, + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + + return makeGenericReponse(fieldInput.fieldId, true, undefined, `Field ${fieldInput.fieldId} has been edited.`); + } + + /** + * Delete a field of a study. + * + * @param requester - The requester. + * @param studyId - The id of the stduy. + * @param fieldId - The id of the field. + * + * @return IGenericResponse + */ + public async deleteField(requester: IUserWithoutToken | undefined, studyId: string, fieldId: string) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + + const roles = await this.permissionCore.getRolesOfUser(requester, studyId); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + let hasPermission = false; + for (const role of roles) { + for (const permission of role.dataPermissions) { + for (const fieldRE of permission.fields) { + if (new RegExp(fieldRE).test(fieldId) && (permission.permission & 1) === 1) { + hasPermission = true; + } + } + } + } + + if (!hasPermission) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + 'No permission to delete this field.' + ); + } + + const field = (await this.db.collections.field_dictionary_collection.find({ studyId: studyId, fieldId: fieldId }).sort({ 'life.createdTime': -1 }).limit(1).toArray())[0]; + if (!field || field.life.deletedTime) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Field does not exist.' + ); + } + + await this.db.collections.field_dictionary_collection.insertOne({ + id: uuid(), + studyId: studyId, + fieldId: fieldId, + fieldName: field.fieldName, + description: field.description, + dataType: field.dataType, + categoricalOptions: field.categoricalOptions, + unit: field.unit, + comments: field.comments, + dataVersion: null, + verifier: field.verifier, + properties: field.properties, + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: Date.now(), + deletedUser: requester.id + }, + metadata: {} + }); + return makeGenericReponse(fieldId, true, undefined, `Field ${fieldId} has been deleted.`); + } + + /** + * Validate field entry. This function only checks the input parameters without interacting with the database. + * + * @param fieldInput - The field input object. + * + * @return array[] - The error array, empty for null errors. + */ + public validateFieldEntry(fieldInput: CreateFieldInput): string[] { + const errors: string[] = []; + // check missing field + const complusoryField: Array = [ + 'fieldId', + 'fieldName', + 'dataType' + ]; + for (const key of complusoryField) { + if (fieldInput[key] === undefined && fieldInput[key] === null) { + errors.push(`${key} should not be empty.`); + return errors; + } + } + + // only english letters, numbers and _ are allowed in fieldIds + if (!/^[a-zA-Z0-9_]*$/.test(fieldInput.fieldId || '')) { + errors.push('FieldId should contain letters, numbers and _ only.'); + return errors; + } + // data types + if (!Object.values(enumDataTypes).includes(fieldInput.dataType)) { + errors.push(`Data type shouldn't be ${fieldInput.dataType}: use 'INTEGER' for integer, 'DECIMAL' for decimal, 'STRING' for string, 'BOOLEAN' for boolean, 'DATETIME' for datetime, 'FILE' for file, 'JSON' for json and 'CATEGORICAL' for categorical.`); + return errors; + } + // check possiblevalues to be not-empty if datatype is categorical + if (fieldInput.dataType === enumDataTypes.CATEGORICAL) { + if (fieldInput.categoricalOptions !== undefined && fieldInput.categoricalOptions !== null) { + if (fieldInput.categoricalOptions.length === 0) { + errors.push(`${fieldInput.fieldId}-${fieldInput.fieldName}: possible values can't be empty if data type is categorical.`); + return errors; + } + for (let i = 0; i < fieldInput.categoricalOptions.length; i++) { + fieldInput.categoricalOptions[i]['id'] = uuid(); + } + } else { + errors.push(`${fieldInput.fieldId}-${fieldInput.fieldName}: possible values can't be empty if data type is categorical.`); + return errors; + } + } + return errors; + } + + /** + * Upload data clips to a study. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param data - The list of data clips. + * + * @return IGenericResponse - The list of objects of IGenericResponse + */ + public async uploadData(requester: IUserWithoutToken | undefined, studyId: string, data: IDataInput[]) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + + const study = await this.db.collections.studies_collection.findOne({ 'id': studyId, 'life.deletedTime': null }); + if (!study) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study does not exist.' + ); + } + + const availableDataVersions: Array = (study.currentDataVersion === -1 ? [] : study.dataVersions.filter((__unused__el, index) => index <= study.currentDataVersion)).map(el => el.id); + availableDataVersions.push(null); + const availableFields = await this.getStudyFields(requester, studyId, availableDataVersions); + const availableFieldsMapping: Record = availableFields.reduce((acc: Record, el: IField) => { + acc[el.fieldId] = el; + return acc; + }, {}); + + + const studyConfig = ((await this.db.collections.configs_collection.findOne({ type: enumConfigType.STUDYCONFIG, key: studyId }))?.properties ?? defaultSettings.studyConfig) as IStudyConfig; + + const response: IGenericResponse[] = []; + let bulk = this.db.collections.data_collection.initializeUnorderedBulkOp(); + let counter = -1; // index of the data + for (const dataClip of data) { + counter++; + const hasPermission = await this.permissionCore.checkFieldOrDataPermission(requester, studyId, dataClip, enumDataAtomicPermissions.WRITE); + if (!hasPermission) { + response.push(makeGenericReponse(counter.toString(), false, enumCoreErrors.NO_PERMISSION_ERROR, enumCoreErrors.NO_PERMISSION_ERROR)); + continue; + } + + if (!(dataClip.fieldId in availableFieldsMapping)) { + response.push(makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, `Field ${dataClip.fieldId}: Field not found`)); + continue; + } + + /* Check value is value */ + let error: IGenericResponse | undefined = undefined; + let parsedValue: unknown; + if (dataClip.value.toString() === studyConfig.defaultRepresentationForMissingValue) { + parsedValue = studyConfig.defaultRepresentationForMissingValue; + } else { + if (!(dataClip.fieldId in availableFieldsMapping)) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, `Field ${dataClip.fieldId}: Field not found`); + response.push(error); + continue; + } + const field = availableFieldsMapping[dataClip.fieldId]; + switch (field.dataType) { + case enumDataTypes.DECIMAL: {// decimal + if (typeof (dataClip.value) !== 'string') { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as decimal.`); + break; + } + if (!/^-?\d+(\.\d+)?$/.test(dataClip.value)) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as decimal.`); + break; + } + parsedValue = parseFloat(dataClip.value); + break; + } + case enumDataTypes.INTEGER: {// integer + if (typeof (dataClip.value) !== 'string') { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as integer.`); + break; + } + if (!/^-?\d+$/.test(dataClip.value)) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as integer.`); + break; + } + parsedValue = parseInt(dataClip.value, 10); + break; + } + case enumDataTypes.BOOLEAN: {// boolean + if (typeof (dataClip.value) !== 'string') { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as boolean.`); + break; + } + if (dataClip.value.toString().toLowerCase() === 'true' || dataClip.value.toString().toLowerCase() === 'false') { + parsedValue = dataClip.value.toLowerCase() === 'true'; + } else { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as boolean.`); + break; + } + break; + } + case enumDataTypes.STRING: { + if (typeof (dataClip.value) !== 'string') { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as string.`); + break; + } + parsedValue = dataClip.value.toString(); + break; + } + case enumDataTypes.DATETIME: { + if (typeof (dataClip.value) !== 'string') { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as date. Value for date type must be in ISO format.`); + break; + } + const matcher = /^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?/; + if (!dataClip.value.match(matcher)) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as date. Value for date type must be in ISO format.`); + break; + } + parsedValue = dataClip.value.toString(); + break; + } + case enumDataTypes.JSON: { + parsedValue = JSON.parse(dataClip.value); + break; + } + case enumDataTypes.FILE: { + parsedValue = dataClip.value; + break; + } + case enumDataTypes.CATEGORICAL: { + if (!(availableFieldsMapping[dataClip.fieldId].categoricalOptions)) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as categorical, possible values not defined.`); + break; + } + if (!((availableFieldsMapping[dataClip.fieldId].categoricalOptions as ICategoricalOption[]).map((el) => el.code).includes(dataClip.value?.toString()))) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Cannot parse as categorical, value not in value list.`); + break; + } else { + parsedValue = dataClip.value?.toString(); + } + break; + } + default: { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Invalid data Type.`); + break; + } + } + const verifier = availableFieldsMapping[dataClip.fieldId].verifier; + if (verifier && verifier.length) { + const resEach: boolean[] = []; + for (let i = 0; i < verifier.length; i++) { + resEach.push(true); + for (let j = 0; j < verifier[i].length; j++) { + if ((typeof parsedValue !== 'string' && typeof parsedValue !== 'number') || !this.utilsCore.validValueWithVerifier(parsedValue, verifier[i][j])) { + resEach[resEach.length - 1] = false; + break; + } + } + } + if (resEach.every(el => !el)) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId} value ${parsedValue}: Failed to pass the verifier.`); + } + } + if (field.properties) { + for (const property of field.properties) { + if (property.required && (!dataClip.properties || !dataClip.properties[property.name])) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId}: Property ${property.name} is required.`); + break; + } + if (property.verifier && dataClip.properties) { + const resEach: boolean[] = []; + for (let i = 0; i < property.verifier.length; i++) { + resEach.push(true); + for (let j = 0; j < property.verifier[i].length; j++) { + if ((typeof dataClip.properties[property.name] !== 'string' && typeof dataClip.properties[property.name] !== 'number') || + !this.utilsCore.validValueWithVerifier(dataClip.properties[property.name] as string | number, property.verifier[i][j])) { + resEach[resEach.length - 1] = false; + break; + } + } + } + if (resEach.every(el => !el)) { + error = makeGenericReponse(counter.toString(), false, enumCoreErrors.CLIENT_MALFORMED_INPUT, `Field ${dataClip.fieldId} value ${dataClip.properties[property.name]}: Property ${property.name} failed to pass the verifier.`); + } + } + } + } + } + if (error) { + response.push(error); + continue; + } else { + response.push(makeGenericReponse(counter.toString(), true, undefined, `Field ${dataClip.fieldId} value ${dataClip.value} successfully uploaded.`)); + } + + bulk.insert({ + id: uuid(), + studyId: study.id, + fieldId: dataClip.fieldId, + dataVersion: null, + value: parsedValue, + properties: dataClip.properties, + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + + if (bulk.batches.length > 999) { + await bulk.execute(); + bulk = this.db.collections.data_collection.initializeUnorderedBulkOp(); + } + } + bulk.batches.length !== 0 && await bulk.execute(); + return response; + } + + /** + * Get the data of a study. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param fieldIds - The list of regular expressions of fields to return. + * @param dataVersions - The list of data versions to return. + * @param aggregation - The pipeline of the data aggregation. + * @param useCache - Whether to use the cached data. + * @param forceUpdate - Whether to force update the cache. + * + * @return Partial[] - The list of objects of Partial + */ + public async getData(requester: IUserWithoutToken | undefined, studyId: string, selectedFieldIds?: string[], dataVersion?: string | null | Array, aggregation?: Record }>>, useCache?: boolean, forceUpdate?: boolean) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + const roles = (await this.permissionCore.getRolesOfUser(requester, studyId)); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + const study = await this.db.collections.studies_collection.findOne({ 'id': studyId, 'life.deletedTime': null }); + if (!study) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study does not exist.' + ); + } + + const config = await this.db.collections.configs_collection.findOne({ type: enumConfigType.STUDYCONFIG, key: studyId }); + if (!config) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study config not found.' + ); + } + + let fieldIds: string[] | undefined = selectedFieldIds; + let availableDataVersions: Array = []; + if (dataVersion === null) { + availableDataVersions.push(null); + } else if (typeof dataVersion === 'string') { + availableDataVersions.push(dataVersion); + } else if (Array.isArray(dataVersion)) { + availableDataVersions.push(...dataVersion); + } else { + availableDataVersions = (study.currentDataVersion === -1 ? [] : study.dataVersions.filter((__unused__el, index) => index <= study.currentDataVersion)).map(el => el.id); + } + if (!fieldIds) { + fieldIds = (await this.getStudyFields(requester, studyId, availableDataVersions)).map(el => el.fieldId); + } + + /** Check hash first */ + let hash: string; + if (useCache) { + hash = this.utilsCore.computeHash({ + query: 'getData', + requester: requester.id, + studyId: studyId, + fieldIds: fieldIds, + dataVersion: dataVersion, + aggregation: aggregation + }); + const hashedInfo = await this.db.collections.cache_collection.find({ 'keyHash': hash, 'life.deletedTime': null, 'status': enumCacheStatus.INUSE }).sort({ 'life.createdTime': -1 }).limit(1).toArray(); + if (hashedInfo.length === 1 && !forceUpdate) { + return hashedInfo[0]; + } else { + // raw data by the permission + const data = await this.getDataByRoles(roles, studyId, availableDataVersions, fieldIds); + // data versioning + const filteredData = this.dataTransformationCore.transformationAggregate(data, { raw: this.genVersioningAggregation((config.properties as IStudyConfig).defaultVersioningKeys, availableDataVersions.includes(null)) }); + if (!Array.isArray(filteredData['raw']) || (filteredData['raw'].length > 0 && Array.isArray(filteredData['raw'][0]))) { + throw new Error('Input data must be of type IDataTransformationClipArray (A[]) and not A[][]'); + } + // data transformation if aggregation is provided + const transformed = aggregation ? this.dataTransformationCore.transformationAggregate(filteredData['raw'] as IDataTransformationClipArray, aggregation) : filteredData; + // write to minio and cache collection + const info = await this.convertToBufferAndUpload(transformed, uuid() + '.json', requester); + const newHashInfo = { + id: uuid(), + keyHash: hash, + uri: info.uri, + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + status: enumCacheStatus.INUSE, + keys: { + query: 'getData', + requester: requester, + studyId: studyId, + fieldIds: fieldIds, + dataVersions: availableDataVersions, + aggregation: aggregation + }, + type: enumCacheType.API, + metadata: {} + }; + await this.db.collections.cache_collection.insertOne(newHashInfo); + return newHashInfo; + } + } else { + // raw data by the permission + const data = await this.getDataByRoles(roles, studyId, availableDataVersions, fieldIds); + // data versioning + const filteredData = this.dataTransformationCore.transformationAggregate(data, { raw: this.genVersioningAggregation((config.properties as IStudyConfig).defaultVersioningKeys, availableDataVersions.includes(null)) }); + if (!Array.isArray(filteredData['raw']) || (filteredData['raw'].length > 0 && Array.isArray(filteredData['raw'][0]))) { + throw new Error('Input data must be of type IDataTransformationClipArray (A[]) and not A[][]'); + } + // data transformation if aggregation is provided + const transformed = aggregation ? this.dataTransformationCore.transformationAggregate(filteredData['raw'] as IDataTransformationClipArray, aggregation) : filteredData; + return transformed; + } + } + + /** + * This is a shortcut function to get the latest data of a study. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param fieldIds - The list of regular expressions of fields to return. + * @returns + */ + public async getDataLatest(requester: IUserWithoutToken | undefined, studyId: string, selectedFieldIds?: string[]) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + const roles = (await this.permissionCore.getRolesOfUser(requester, studyId)); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + const study = await this.db.collections.studies_collection.findOne({ 'id': studyId, 'life.deletedTime': null }); + if (!study) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study does not exist.' + ); + } + + const config = await this.db.collections.configs_collection.findOne({ type: enumConfigType.STUDYCONFIG, key: studyId }); + if (!config) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study config not found.' + ); + } + + const fieldIds: string[] | undefined = selectedFieldIds; + + // copied from getDataByRoles + const matchFilter: Filter = { + studyId: studyId + }; + if (fieldIds) { + matchFilter.fieldId = { $in: fieldIds }; + + } + const groupKeys: Record = {}; + for (const key of (config.properties as IStudyConfig).defaultVersioningKeys) { + let usedKey: string = key; + if (key.startsWith('properties.')) { + usedKey = key.split('.')[1]; + } + groupKeys[usedKey] = `$${key}`; + } + + const roleArr: Filter[] = []; + for (const role of roles) { + const permissionArr: Filter[] = []; + for (let i = 0; i < role.dataPermissions.length; i++) { + if (role.dataPermissions[i].fields.length === 0) { + continue; + } + const obj: Filter = { + fieldId: { $in: role.dataPermissions[i].fields.map(el => new RegExp(el)) } + }; + if (role.dataPermissions[i].dataProperties) { + for (const key of Object.keys(role.dataPermissions[i].dataProperties)) { + obj[`properties.${key}`] = { $in: role.dataPermissions[i].dataProperties[key].map((el: string | RegExp) => new RegExp(el)) }; + } + } + permissionArr.push(obj); + } + if (permissionArr.length === 0) { + return []; + } + roleArr.push({ $or: permissionArr }); + } + return await this.db.collections.colddata_collection.aggregate([{ + $match: { ...matchFilter } + }, { + $match: { $or: roleArr } + }, { + $project: { + _id: 0 + } + }], { allowDiskUse: true }).toArray(); + + } + + /** + * Get the files of a study. This function reuse the getData function. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param selectedFieldIds - The list of regular expressions of fields to return. + * @param dataVersion - The list of data versions to return. + * @returns IFile[] - The list of objects of IFile + */ + public async getStudyFiles(requester: IUserWithoutToken | undefined, studyId: string, selectedFieldIds?: string[], dataVersion?: string | null | Array) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + const roles = (await this.permissionCore.getRolesOfUser(requester, studyId)); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + const study = await this.db.collections.studies_collection.findOne({ 'id': studyId, 'life.deletedTime': null }); + if (!study) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study does not exist.' + ); + } + + const config = await this.db.collections.configs_collection.findOne({ type: enumConfigType.STUDYCONFIG, key: studyId }); + if (!config) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study config not found.' + ); + } + + let fieldIds: string[] | undefined = selectedFieldIds; + let availableDataVersions: Array = []; + if (dataVersion === null) { + availableDataVersions.push(null); + } else if (typeof dataVersion === 'string') { + availableDataVersions.push(dataVersion); + } else if (Array.isArray(dataVersion)) { + availableDataVersions.push(...dataVersion); + } else { + availableDataVersions = (study.currentDataVersion === -1 ? [] : study.dataVersions.filter((__unused__el, index) => index <= study.currentDataVersion)).map(el => el.id); + } + if (!fieldIds) { + fieldIds = (await this.getStudyFields(requester, studyId, availableDataVersions)).filter(el => el.dataType === enumDataTypes.FILE).map(el => el.fieldId); + } else { + const fields = await this.db.collections.field_dictionary_collection.find({ studyId: studyId, fieldId: { $in: fieldIds } }).toArray(); + fieldIds = fields.filter(el => el.dataType === enumDataTypes.FILE).map(el => el.fieldId); + } + + const fileDataRecords = await this.getData( + requester, + studyId, + fieldIds, + availableDataVersions + ); + + if (!Array.isArray(fileDataRecords)) { + return []; + } + return await this.db.collections.files_collection.find({ id: { $in: fileDataRecords.map(el => el.value) } }).toArray(); + } + + + public async getDataByRoles(roles: IRole[], studyId: string, dataVersions: Array, fieldIds?: string[]) { + const matchFilter: Filter = { + studyId: studyId, + dataVersion: { $in: dataVersions } + }; + if (fieldIds && fieldIds[0]) { + // we ask that for regular expressions, ^ and $ must be used + if (fieldIds[0][0] === '^' && fieldIds[0][fieldIds[0].length - 1] === '$') { + matchFilter.fieldId = { $in: fieldIds.map(el => new RegExp(el)) }; + } else { + matchFilter.fieldId = { $in: fieldIds }; + } + } + const roleArr: Filter[] = []; + for (const role of roles) { + const permissionArr: Filter[] = []; + for (let i = 0; i < role.dataPermissions.length; i++) { + if (role.dataPermissions[i].fields.length === 0) { + continue; + } + const obj = { + fieldId: { $in: role.dataPermissions[i].fields.map(el => new RegExp(el)) } + }; + if (role.dataPermissions[i].dataProperties) { + for (const key of Object.keys(role.dataPermissions[i].dataProperties)) { + obj[`properties.${key}`] = { $in: role.dataPermissions[i].dataProperties[key].map(el => new RegExp(el)) }; + } + } + if (!role.dataPermissions[i].includeUnVersioned) { + obj['dataVersion'] = { $ne: null }; + } + permissionArr.push(obj); + } + if (permissionArr.length === 0) { + return []; + } + roleArr.push({ $or: permissionArr }); + } + const res = await this.db.collections.data_collection.aggregate([{ + $match: { ...matchFilter } + }, { + $match: { $or: roleArr } + }], { allowDiskUse: true }).toArray(); + return res; + } + + public genVersioningAggregation(keys: string[], hasVersioning: boolean) { + const aggregation: Array<{ operationName: enumDataTransformationOperation, params: unknown }> = []; + if (!hasVersioning) { + aggregation.push({ + operationName: enumDataTransformationOperation.FILTER, params: { + filters: { + deleted: [{ + formula: { + type: enumASTNodeTypes.VARIABLE, + operation: null, + value: 'dataVersion', + parameter: {}, + children: null + }, + condition: enumConditionOps.GENERALISNOTNULL, + value: '', + parameters: {} + }] + } + } + }); + } + aggregation.push({ operationName: enumDataTransformationOperation.GROUP, params: { keys: keys, skipUnmatch: false } }); + aggregation.push({ operationName: enumDataTransformationOperation.LEAVEONE, params: { scoreFormula: { type: enumASTNodeTypes.VARIABLE, operator: null, value: 'life.createdTime', parameters: {}, children: null }, isDescend: true } }); + aggregation.push({ + operationName: enumDataTransformationOperation.FILTER, params: { + filters: { + deleted: [{ + formula: { + type: enumASTNodeTypes.VARIABLE, + operation: null, + value: 'life.deletedTime', + parameter: {}, + children: null + }, + condition: enumConditionOps.GENERALISNULL, + value: '', + parameters: {} + }] + } + } + }); + return aggregation; + } + + /** + * Delete data of a study. We add a deleted document in the database. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param fieldId - The id of the field. + * @param properties - The properties. + * + * @return IGenreicResponse - The object of IGenericResponse. + */ + public async deleteData(requester: IUserWithoutToken | undefined, studyId: string, fieldId: string, properties: Record): Promise { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + + const study = await this.db.collections.studies_collection.findOne({ 'id': studyId, 'life.deletedTime': null }); + if (!study) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study does not exist.' + ); + } + + await this.permissionCore.checkFieldOrDataPermission(requester, studyId, { fieldId: fieldId, properties: properties }, enumDataAtomicPermissions.DELETE); + await this.db.collections.data_collection.insertOne({ + id: uuid(), + studyId: studyId, + fieldId: fieldId, + dataVersion: null, + value: null, + properties: properties, + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + return makeGenericReponse(undefined, undefined, undefined, 'Data deleted.'); + } + + /** + * Upload a file data. + * + * @param requester - The requester. + * @param studyId - The id of the study. + * @param file - The file to upload. + * @param fieldId - The id of the field. + * @param properties - The properties of the file. Need to match field properties if defined. + * + * @return IData + */ + public async uploadFileData(requester: IUserWithoutToken | undefined, studyId: string, file: FileUpload, fieldId: string, properties?: string) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + + const roles = await this.permissionCore.getRolesOfUser(requester, studyId); + if (roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + const study = await this.db.collections.studies_collection.findOne({ 'id': studyId, 'life.deletedTime': null }); + if (!study) { + throw new CoreError( + enumCoreErrors.CLIENT_ACTION_ON_NON_EXISTENT_ENTRY, + 'Study does not exist.' + ); + } + + const fileType = (file.filename.split('.').pop() as string).toUpperCase(); + if (!Object.keys(enumFileTypes).includes(fileType)) { + throw new CoreError( + enumCoreErrors.CLIENT_MALFORMED_INPUT, + `File type ${fileType} not supported.` + ); + } + const fileEntry = await this.fileCore.uploadFile( + requester, studyId, null, file, enumFileTypes[fileType], enumFileCategories.STUDY_DATA_FILE, undefined, properties ? JSON.parse(properties) : undefined); + const dataInput: IDataInput[] = [{ + fieldId: fieldId, + value: fileEntry.id, + properties: properties ? JSON.parse(properties) : undefined + }]; + const res = await this.uploadData(requester, studyId, dataInput); + if (!res[0].successful) { + throw new CoreError( + enumCoreErrors.CLIENT_MALFORMED_INPUT, + res[0].description ?? 'Failed to upload file.' + ); + } + return makeGenericReponse(fileEntry.id, true, undefined, 'File uploaded.'); + } + + /** + * Get the summary of a study. + * Admins can study managers can access this function. + * + * @param studyId - The id of the study. + * + * @return Record - The object of Record + */ + public async getStudySummary(requester: IUserWithoutToken | undefined, studyId: string) { + if (!requester) { + throw new CoreError( + enumCoreErrors.NOT_LOGGED_IN, + enumCoreErrors.NOT_LOGGED_IN + ); + } + + const roles = await this.permissionCore.getRolesOfUser(requester, studyId); + if (requester.type !== enumUserTypes.ADMIN && roles.length === 0) { + throw new CoreError( + enumCoreErrors.NO_PERMISSION_ERROR, + enumCoreErrors.NO_PERMISSION_ERROR + ); + } + + const numberOfDataLogs: number = await this.db.collections.data_collection.countDocuments({ studyId: studyId, dataVersion: { $ne: null } }); + const numberOfAdds: number = await this.db.collections.data_collection.countDocuments({ studyId: studyId, value: { $ne: null }, dataVersion: { $ne: null } }); + const numberOfDeletes: number = await this.db.collections.data_collection.countDocuments({ studyId: studyId, value: null, dataVersion: { $ne: null } }); + + const numberOfVersionedLogs: number = await this.db.collections.data_collection.countDocuments({ studyId: studyId, dataVersion: { $ne: null } }); + const numberOfUnversionedLogs: number = await this.db.collections.data_collection.countDocuments({ studyId: studyId, dataVersion: null }); + + const numberOfUnversionedAdds: number = await this.db.collections.data_collection.countDocuments({ studyId: studyId, dataVersion: null, value: { $ne: null } }); + const numberOfUnversionedDeletes: number = await this.db.collections.data_collection.countDocuments({ studyId: studyId, dataVersion: null, value: null }); + + const numberOfSubjects: number = (await this.db.collections.data_collection.distinct('subjectId', { stuyId: studyId, dataVersion: { $ne: null } })).length; + const numberOfVisits: number = (await this.db.collections.data_collection.distinct('visitId', { stuyId: studyId, dataVersion: { $ne: null } })).length; + const numberOfFields: number = (await this.db.collections.field_dictionary_collection.distinct('fieldId', { studyId: studyId })).length; + + return { + numberOfDataLogs: numberOfDataLogs, + numberOfAdds: numberOfAdds, + numberOfDeletes: numberOfDeletes, + numberOfVersionedLogs: numberOfVersionedLogs, + numberOfUnversionedLogs: numberOfUnversionedLogs, + numberOfUnversionedAdds: numberOfUnversionedAdds, + numberOfUnversionedDeletes: numberOfUnversionedDeletes, + numberOfSubjects: numberOfSubjects, + numberOfVisits: numberOfVisits, + numberOfFields: numberOfFields + }; + } + + /* TODO: Data Transformation */ + /* TODO: This is a placeholder in case required. */ + // public async dataTransform(fields: IField[], data: IData[], rules: any) { + // } + + /** + * Upload a json object as a file to minio. + * + * @param jsonObject - The json object. + * @param fileName - The name of the file. + * @param requester - The requester. + * @returns + */ + public async convertToBufferAndUpload( + jsonObject: Record, + fileName: string, + requester: IUserWithoutToken + ): Promise { + // Convert the JSON object to a buffer + const buffer = Buffer.from(JSON.stringify(jsonObject)); + + // Create a readable stream from the buffer + const createReadStream = (): Readable => { + const stream = new Readable(); + stream.push(buffer); + stream.push(null); // No more data + return stream; + }; + + // Prepare the file data for upload + const fileUpload: FileUpload = { + createReadStream: createReadStream, + filename: fileName, + mimetype: 'application/json', + encoding: 'utf-8' + }; + + // Upload the file using the provided upload function + return this.fileCore.uploadFile( + requester, + null, // studyId + null, // userId + fileUpload, // fileUpload + enumFileTypes.JSON, + enumFileCategories.CACHE, + undefined // description + ); + } +} \ No newline at end of file diff --git a/packages/itmat-cores/src/trpcCore/driveCore.ts b/packages/itmat-cores/src/trpcCore/driveCore.ts index 44b0acb83..4e87bb2b3 100644 --- a/packages/itmat-cores/src/trpcCore/driveCore.ts +++ b/packages/itmat-cores/src/trpcCore/driveCore.ts @@ -110,7 +110,7 @@ export class TRPCDriveCore { * * @return IDrive */ - public async createDriveFile(requester: IUserWithoutToken | undefined, parentId: string | null, description: string | undefined, fileType: enumFileTypes, file: FileUpload | null) { + public async createDriveFile(requester: IUserWithoutToken | undefined, parentId: string | null, description: string | undefined, fileType: enumFileTypes, file: FileUpload) { if (!requester) { throw new CoreError( enumCoreErrors.NOT_LOGGED_IN, diff --git a/packages/itmat-cores/src/trpcCore/permissionCore.ts b/packages/itmat-cores/src/trpcCore/permissionCore.ts index 455227323..48a771c60 100644 --- a/packages/itmat-cores/src/trpcCore/permissionCore.ts +++ b/packages/itmat-cores/src/trpcCore/permissionCore.ts @@ -1,4 +1,4 @@ -import { IUserWithoutToken } from '@itmat-broker/itmat-types'; +import { IData, IField, IUserWithoutToken, enumDataAtomicPermissions, permissionString } from '@itmat-broker/itmat-types'; import { DBType } from '../database/database'; export class TRPCPermissionCore { @@ -18,4 +18,43 @@ export class TRPCPermissionCore { return studyId ? await this.db.collections.roles_collection.find({ 'studyId': studyId, 'users': user.id, 'life.deletedTime': null }).toArray() : await this.db.collections.roles_collection.find({ 'users': user.id, 'life.deletedTime': null }).toArray(); } + + /** + * Check if a user has a certain permission of a field or data. + * + * @param user - The user to check permission for. + * @param studyId - The id of the study. + * @param entry - The field or data to check permission for. + * @param permission - The permission to check. + * + * @returns boolean + */ + public async checkFieldOrDataPermission(user: IUserWithoutToken, studyId: string, entry: Partial | Partial, permission: enumDataAtomicPermissions) { + const roles = await this.getRolesOfUser(user, studyId); + for (const role of roles) { + const dataPermissions = role.dataPermissions; + for (const dataPermission of dataPermissions) { + for (const field of dataPermission.fields) { + if (!(new RegExp(field).test(String(entry.fieldId)))) { + return false; + } + } + if ('value' in entry && dataPermission.dataProperties) { + if (entry.properties) { + for (const property in dataPermission.dataProperties) { + for (const prop of dataPermission.dataProperties[property]) { + if (!(new RegExp(prop).test(String(entry.properties[property])))) { + return false; + } + } + } + } + } + if (!permissionString[permission].includes(dataPermission.permission)) { + return false; + } + } + } + return true; + } } \ No newline at end of file diff --git a/packages/itmat-cores/src/trpcCore/studyCore.ts b/packages/itmat-cores/src/trpcCore/studyCore.ts index a3ad665e5..ab82ded4f 100644 --- a/packages/itmat-cores/src/trpcCore/studyCore.ts +++ b/packages/itmat-cores/src/trpcCore/studyCore.ts @@ -1,6 +1,6 @@ import { ObjectStore } from '@itmat-broker/itmat-commons'; import { DBType } from '../database/database'; -import { CoreError, FileUpload, IFile, IStudy, IStudyDataVersion, IUserWithoutToken, enumConfigType, enumCoreErrors, enumFileCategories, enumFileTypes, enumStudyRoles, enumUserTypes } from '@itmat-broker/itmat-types'; +import { CoreError, FileUpload, IData, IFile, IStudy, IStudyDataVersion, IUserWithoutToken, enumCacheStatus, enumConfigType, enumCoreErrors, enumFileCategories, enumFileTypes, enumStudyRoles, enumUserTypes } from '@itmat-broker/itmat-types'; import { Filter, UpdateFilter } from 'mongodb'; import { TRPCPermissionCore } from './permissionCore'; import { v4 as uuid } from 'uuid'; @@ -360,19 +360,48 @@ export class TRPCStudyCore { } }); - // TODO: invalidate hash - // await this.db.collections.cache_collection.updateMany({ - // 'keys.studyId': studyId - // }, { - // $set: { - // status: enumCacheStatus.OUTDATED - // } - // }); + // TODO: invalidate cache + await this.db.collections.cache_collection.updateMany({ + 'keys.studyId': studyId + }, { + $set: { + status: enumCacheStatus.OUTDATED + } + }); // TODO: update stds, ontologies // TODO: update cold storage - + const unversionedDocuments = await this.db.collections.data_collection.find({ studyId: studyId, dataVersion: newDataVersionId }).toArray(); + let bulkOperation = this.db.collections.colddata_collection.initializeUnorderedBulkOp(); + for (const doc of unversionedDocuments) { + const filters: Filter = { + studyId: studyId, + fieldId: doc.fieldId, + properties: doc.properties + }; + bulkOperation.find(filters).upsert().update({ + $set: { + id: uuid(), + value: doc.value, + dataVersion: newDataVersionId, + life: { + createdTime: Date.now(), + createdUser: requester.id, + deletedTime: null, + deletedUser: null + }, + metadata: {} + } + }); + if (bulkOperation.batches.length > 999) { + await bulkOperation.execute(); + bulkOperation = this.db.collections.colddata_collection.initializeUnorderedBulkOp(); + } + } + if (bulkOperation.batches.length > 0) { + await bulkOperation.execute(); + } const newDataVersion: IStudyDataVersion = { id: newDataVersionId, @@ -397,6 +426,8 @@ export class TRPCStudyCore { enumCoreErrors.UNQUALIFIED_ERROR, 'Nothing to update.' ); + } else { + return newDataVersion; } } diff --git a/packages/itmat-cores/src/trpcCore/transformationCore.ts b/packages/itmat-cores/src/trpcCore/transformationCore.ts new file mode 100644 index 000000000..8c320a105 --- /dev/null +++ b/packages/itmat-cores/src/trpcCore/transformationCore.ts @@ -0,0 +1,769 @@ +import { IAST, IValueVerifier, enumDataTransformationOperation } from '@itmat-broker/itmat-types'; +import { TRPCUtilsCore } from './utilsCore'; + +type IDataTransformationClip = Record; + +type IDataTransformationClipArray = IDataTransformationClip[]; + +type IDataTransformationType = IDataTransformationClipArray | IDataTransformationClipArray[]; + +abstract class DataTransformation { + abstract transform(data: IDataTransformationType): IDataTransformationType; +} + +/** + * Group data by keys. + * + * @input A[] + * @OUTPUT A[][] + * + * @param keys - The keys to group by. + * @param skipUnmatch - Whether to skip the ungrouped data. + */ +class tGrouping extends DataTransformation { + protected keys: string[]; + protected skipUnmatch: boolean; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { keys: string[], skipUnmatch: boolean }, utilsCore: TRPCUtilsCore) { + super(); + this.keys = params.keys; + this.skipUnmatch = params.skipUnmatch; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationType { + // Check input type + if (!Array.isArray(data) || (data.length > 0 && Array.isArray(data[0]))) { + throw new Error('Input data must be of type IDataTransformationClipArray (A[]) and not A[][]'); + } + + // Type assertion: after this check, `data` is `IDataTransformationClipArray` + const clipsArray = data as IDataTransformationClipArray; + + // We'll use a map to group the records. The key will be a string representation of the values of the keys. + const groupsMap: Record = {}; + const unmatched: IDataTransformationClipArray[] = []; // This will hold arrays + for (const item of clipsArray) { + // Check if all specified keys exist in the data item + const allKeysExist = this.keys.every(key => this.getValueFromNestedKey(item, key) !== undefined); + + if (!allKeysExist) { + if (this.skipUnmatch) { + continue; // skip this item + } else { + unmatched.push([item]); // wrap the unmatched item in an array + continue; + } + } + + // For each record, we'll generate a key by concatenating the values of the specified keys + const groupKey = this.keys.map(key => String(this.getValueFromNestedKey(item, key))).join('|'); + // If this group doesn't exist yet, we'll create it + if (!groupsMap[groupKey]) { + groupsMap[groupKey] = []; + } + + // We'll add the current item to its group + groupsMap[groupKey].push(item); + } + // Now, combine matched groups and unmatched items + const result = [...Object.values(groupsMap), ...unmatched]; + // Otherwise, return the array of groups + return result as IDataTransformationClipArray[]; + } + + getValueFromNestedKey(obj: Record, key: string): unknown { + const keys = key.split('.'); + return this.findValue(obj, keys); + } + + findValue(obj: Record | unknown, keys: string[]): unknown { + if (keys.length === 0 || obj === undefined || obj === null) { + return obj; + } + + const [firstKey, ...remainingKeys] = keys; + + if (typeof obj === 'object' && obj !== null && firstKey in obj) { + return this.findValue((obj as Record)[firstKey], remainingKeys); + } else { + return undefined; + } + } +} + +/** + * Convert or delete each value of the data. Note, by default, the execution order is: adding keys -> affine -> remove keys + * + * @input A[] + * @output A[] + * + * @param removedKeys - Keys to remove. + * @param addedKeyRules - Keys to add. + * @param rules - Rules to convert the values. + */ +class tAffine extends DataTransformation { + protected removedKeys?: string[]; + protected addedKeyRules?: Array<{ key: IAST, value: IAST }>; + protected rules?: Record; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { removedKeys: string[], rules: Record, addedKeyRules: Array<{ key: IAST, value: IAST }> }, utilsCore: TRPCUtilsCore) { + super(); + this.removedKeys = params.removedKeys; + this.addedKeyRules = params.addedKeyRules; + this.rules = params.rules; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationClipArray { + if (!Array.isArray(data) || (data.length > 0 && Array.isArray(data[0]))) { + throw new Error('Input data must be of type IDataTransformationClipArray (A[]) and not A[][]'); + } + + const affinedData: IDataTransformationClipArray = []; + for (const item of data as IDataTransformationClipArray) { + // Add keys + if (this.addedKeyRules) { + for (const pair of this.addedKeyRules) { + const newKey = this.utilsCore.IASTHelper(pair.key, item) as string; + const newValue = this.utilsCore.IASTHelper(pair.value, item); + item[newKey] = newValue; + } + } + + // Apply rules for affine transformations + if (this.rules) { + for (const key of Object.keys(item)) { + if (this.rules[key]) { + item[key] = this.utilsCore.IASTHelper(this.rules[key], item[key] as number | string | Record); + } + } + } + + // Remove specified keys + if (this.removedKeys) { + for (const key of this.removedKeys) { + delete item[key]; + } + } + + // Add transformed item to the result if it has keys left + if (Object.keys(item).length > 0) { + affinedData.push(item); + } + } + + return affinedData; + } +} + +/** + * Leave one data from a group. + * + * @input A[][] + * @output A[] + * + * @param scoreFormula - The formula to give rank of the data. + * @param isDescend - Whether to rank in descend order. + */ +class tLeaveOne extends DataTransformation { + protected scoreFormula: IAST; + protected isDescend: boolean; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { scoreFormula: IAST, isDescend: boolean }, utilsCore: TRPCUtilsCore) { + super(); + this.scoreFormula = params.scoreFormula; + this.isDescend = params.isDescend; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationType { + if (!Array.isArray(data) || (data.length > 0 && !Array.isArray(data[0]))) { + throw new Error('Input data must be of type A[][] (array of arrays) and not A[]'); + } + + const mergedData: IDataTransformationClipArray = []; + + for (const items of data as IDataTransformationClipArray[]) { + const scores: number[] = []; + for (const item of items) { + scores.push(this.utilsCore.IASTHelper(this.scoreFormula, item)); + } + + const index = this.isDescend ? scores.indexOf(Math.max(...scores)) : scores.indexOf(Math.min(...scores)); + mergedData.push(items[index]); + } + return mergedData; + } +} + +class tJoin extends DataTransformation { + protected reservedKeys: string[]; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { reservedKeys: string[] }, utilsCore: TRPCUtilsCore) { + super(); + this.reservedKeys = params.reservedKeys; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationClipArray { + if (!Array.isArray(data) || (data.length > 0 && !Array.isArray(data[0]))) { + throw new Error('Input data must be of type A[][] (array of arrays) and not A[]'); + } + + const joinedData: IDataTransformationClipArray = []; + for (const items of data as IDataTransformationClipArray[]) { + let obj: IDataTransformationClip = {}; + for (const item of items) { + obj = { + ...obj, + ...item + }; + } + joinedData.push(obj); + } + return joinedData; + } +} + +class tConcat extends DataTransformation { + protected concatKeys: string[]; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { concatKeys: string[] }, utilsCore: TRPCUtilsCore) { + super(); + this.concatKeys = params.concatKeys; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationClipArray { + if (!Array.isArray(data) || (data.length > 0 && !Array.isArray(data[0]))) { + throw new Error('Input data must be of type A[][] (array of arrays) and not A[]'); + } + + const results: IDataTransformationClipArray = []; + data.forEach(array => { + const result: IDataTransformationClip = {}; + + array.forEach((item: IDataTransformationClip) => { + Object.keys(item).forEach(key => { + if (this.concatKeys.includes(key)) { + if (!result[key]) { + result[key] = []; + } + (result[key] as unknown[]).push(item[key]); + } else { + if (!result[key]) { + result[key] = item[key]; + } + } + }); + }); + + results.push(result); + }); + + return results; + } +} + +/** + * Deconcat values into an array. + * + * @input A[] + * @output A[][] + */ +class tDeconcat extends DataTransformation { + protected deconcatKeys: string[]; + protected matchMode: 'combinations' | 'sequential'; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { deconcatKeys: string[], matchMode?: 'combinations' | 'sequential' }, utilsCore: TRPCUtilsCore) { + super(); + this.deconcatKeys = params.deconcatKeys; + this.matchMode = params.matchMode || 'combinations'; + this.utilsCore = utilsCore; + } + + private cartesianProduct(arr: unknown[][]): unknown[][] { + return arr.reduce((a: unknown[][], b: unknown[]): unknown[][] => { + return a.flatMap((x: unknown[]): unknown[][] => + b.map((y: unknown): unknown[] => x.concat([y])) + ); + }, [[]]); + } + + transform(data: IDataTransformationClipArray): IDataTransformationClipArray[] { + const results: IDataTransformationClipArray[] = []; + + data.forEach((item: IDataTransformationClip) => { + const subResults: IDataTransformationClip[] = []; + + if (this.matchMode === 'combinations') { + const arraysToDeconcat: unknown[][] = this.deconcatKeys.map(key => item[key] as unknown[] || []); + + const product = this.cartesianProduct(arraysToDeconcat); + + product.forEach(combination => { + const newObj = this.createDeconcatObject(item, combination); + subResults.push(newObj); + }); + } else if (this.matchMode === 'sequential') { + const maxLength = Math.max(...this.deconcatKeys.map(key => (item[key] as unknown[])?.length || 0)); + + for (let i = 0; i < maxLength; i++) { + const sequentialValues = this.deconcatKeys.map(key => (item[key] as unknown[])?.[i]); + const newObj = this.createDeconcatObject(item, sequentialValues); + subResults.push(newObj); + } + } + + results.push(subResults); + }); + + return results; + } + + private createDeconcatObject(item: IDataTransformationClip, values: unknown[]): IDataTransformationClip { + const newObj: IDataTransformationClip = {}; + + this.deconcatKeys.forEach((key, index) => { + newObj[key] = values[index]; + }); + + Object.keys(item).forEach((key: string) => { + if (!this.deconcatKeys.includes(key)) { + newObj[key] = item[key]; + } + }); + + return newObj; + } +} +/** + * Filter the data. + * + * @input A[] | A[][] + * @output A[] | A[][] + */ +class tFilter extends DataTransformation { + protected filters: Record; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { filters: Record }, utilsCore: TRPCUtilsCore) { + super(); + this.filters = params.filters; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationType { + if (Array.isArray(data) && data.length && Array.isArray(data[0])) { + return (data as IDataTransformationClipArray[]).map(subArray => + subArray.filter(item => this.isValidItem(item)) + ); + } else { + return (data as IDataTransformationClipArray).filter(item => this.isValidItem(item)); + } + } + + private isValidItem(data: IDataTransformationClip): boolean { + const x = Object.keys(this.filters).some(key => { + return this.filters[key].every(el => { + return (this.utilsCore.validValueWithVerifier(data as number | string | IDataTransformationClip, el)); + }); + }); + return x; + } +} + +/** + * Count the data. This should be used after grouping. + * + * @input A[][] + * @output { count: ..., ...}[] + */ +class tCount extends DataTransformation { + protected addedKeyRules?: Array<{ key: IAST, value: IAST }>; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { addedKeyRules: Array<{ key: IAST, value: IAST }> }, utilsCore: TRPCUtilsCore) { + super(); + this.addedKeyRules = params.addedKeyRules; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationClipArray { + if (!Array.isArray(data) || (data.length > 0 && !Array.isArray(data[0]))) { + throw new Error('Input data must be of type A[][] (array of arrays) and not A[]'); + } + const mergerArrays: IDataTransformationClipArray = []; + for (const group of data as IDataTransformationClipArray[]) { + const item: IDataTransformationClip = { count: group.length }; + if (this.addedKeyRules) { + for (const pair of this.addedKeyRules) { + item[this.utilsCore.IASTHelper(pair.key, group[0]) as string] = this.utilsCore.IASTHelper(pair.value, group[0]); + } + } + mergerArrays.push(item); + } + return mergerArrays; + } +} + +/** + * Split a data into multiple data. + * + * @input A[] + * @output A[][] + * + * @param sharedKeys - The kyes to kept in the new data. + * @param targetKeyGroups - The keys to add with the shared keys. + */ +class tDegroup extends DataTransformation { + protected sharedKeys: string[]; + protected targetKeyGroups: string[][]; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { sharedKeys: string[], targetKeyGroups: string[][] }, utilsCore: TRPCUtilsCore) { + super(); + this.sharedKeys = params.sharedKeys; + this.targetKeyGroups = params.targetKeyGroups; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationClipArray[] { + if (!Array.isArray(data) || (data.length > 0 && Array.isArray(data[0]))) { + throw new Error('Input data must be of type IDataTransformationClipArray (A[]) and not A[][]'); + } + + const splitData: IDataTransformationClipArray[] = []; + for (const item of data as IDataTransformationClipArray) { + const saved: IDataTransformationClipArray = []; + for (let i = 0; i < this.targetKeyGroups.length; i++) { + const obj: IDataTransformationClip = {}; + for (const key of this.sharedKeys) { + obj[key] = item[key]; + } + for (const key of this.targetKeyGroups[i]) { + obj[key] = item[key]; + } + saved.push(obj); + } + splitData.push(saved); + } + return splitData; + } +} +/** + * Flatten an object. Keys within the object will be keys in the data clip. + * + * @input A[] | A[][] + * @output A[] | A[][] + * + * @param keepFlattened - Whether to keep the values from the object if conflicts. + * @param flattenedKey - The key to flatten. + * @param keepFlattenedKey - Whether to keep the flattened key. + */ +class tFlatten extends DataTransformation { + protected keepFlattened: boolean; + protected flattenedKey: string; + protected keepFlattenedKey: boolean; + protected utilsCore: TRPCUtilsCore; + + constructor(params: { keepFlattened: boolean, flattenedKey: string, keepFlattenedKey: boolean }, utilsCore: TRPCUtilsCore) { + super(); + this.keepFlattened = params.keepFlattened; + this.flattenedKey = params.flattenedKey; + this.keepFlattenedKey = params.keepFlattenedKey; + this.utilsCore = utilsCore; + } + + transform(data: IDataTransformationType): IDataTransformationType { + if (Array.isArray(data) && data.length > 0 && Array.isArray(data[0])) { + return (data as IDataTransformationClipArray[]).map(group => group.map(item => this.flattenItem(item))); + } else if (Array.isArray(data)) { + return (data as IDataTransformationClipArray).map(item => this.flattenItem(item)); + } else { + throw new Error('Invalid input format for tFlatten transform.'); + } + } + + private flattenItem(item: IDataTransformationClip): IDataTransformationClip { + const objectToFlatten = item[this.flattenedKey]; + if (typeof objectToFlatten !== 'object' || objectToFlatten === null) { + return item; + } + + const flattenedItem: IDataTransformationClip = { ...item }; + for (const key in objectToFlatten) { + if (!this.keepFlattened || !(key in item)) { + flattenedItem[key] = objectToFlatten[key]; + } + } + + if (!this.keepFlattenedKey) { + delete flattenedItem[this.flattenedKey]; + } + + return flattenedItem; + } +} + + + + +export class TRPCDataTransformationCore { + protected utilsCore: TRPCUtilsCore; + constructor(utilsCore: TRPCUtilsCore) { + this.utilsCore = utilsCore; + } + + public transformationRegistry = { + GROUP: tGrouping, + AFFINE: tAffine, + LEAVEONE: tLeaveOne, + CONCAT: tConcat, + DECONCAT: tDeconcat, + JOIN: tJoin, + DEGROUP: tDegroup, + FILTER: tFilter, + FLATTEN: tFlatten, + COUNT: tCount + }; + + public transformationCompose(data: IDataTransformationClipArray, transformations: Array<{ operationName: string, params: unknown }>): IDataTransformationType { + return transformations.reduce((currentData, transformation) => { + const TransformationClass = this.transformationRegistry[transformation.operationName]; + + if (!TransformationClass) { + throw new Error(`Transformation ${transformation.operationName} is not registered.`); + } + + const instance = new TransformationClass(transformation.params, this.utilsCore); + return instance.transform(currentData); + }, data); + } + + public transformationAggregate( + data: IDataTransformationClipArray, + transformationsAgg: Record> + ): Record { + if (!transformationsAgg || Object.keys(transformationsAgg).length === 0) { + return { raw: data }; + } + + const aggregation: Record = {}; + for (const key of Object.keys(transformationsAgg)) { + const transformedData = this.transformationCompose(data, transformationsAgg[key]); + + // Ensure the final result is always a flat array + // aggregation[key] = this.flattenArray(transformedData); + aggregation[key] = transformedData; + } + return aggregation; + } + + private flattenArray(data: IDataTransformationType): IDataTransformationClipArray { + if (Array.isArray(data) && Array.isArray(data[0])) { + return (data as IDataTransformationClipArray[]).flat(); + } + return data as IDataTransformationClipArray; + } +} + +/** + * Examples of data transformation + 1. Generate a simplified version of data: + { + "aggregation": { + "device": [ + { + "params": { + "keys": [ + "properties.Device Type", + "properties.Participant ID", + "properties.Device ID", + "properties.End Date", + "properties.Start Date" + ], + "skipUnmatch": true + }, + "operationName": "Group" + }, + { + "params": { + "scoreFormula": { + "operator": null, + "type": "VARIABLE", + "value": "life.createdTime", + "children": null, + "parameters": {} + }, + "isDescend": true + }, + "operationName": "LeaveOne" + }, + { + "operationName": "Filter", + "params": { + "filters": { + "deleted": { + "formula": { + "value": "life.deletedTime", + "operation": null, + "type": "VARIABLE", + "parameter": {}, + "children": null + }, + "value": "", + "condition": "general:=null", + "parameters": {} + } + } + } + }, + { + "operationName": "Affine", + "params": { + "removedKeys": [ + "_id", + "id", + "studyId", + "dataVersion", + "life", + "metadata" + ], + "addedKeyRules": [], + "rules": {} + } + } + ], + "clinical": [ + { + "operationName": "Group", + "params": { + "keys": [ + "properties.Visit ID", + "properties.Participant ID", + "fieldId" + ], + "skipUnmatch": true + } + }, + { + "operationName": "LeaveOne", + "params": { + "scoreFormula": { + "operator": null, + "children": null, + "type": "VARIABLE", + "value": "life.createdTime", + "parameters": {} + }, + "isDescend": true + } + }, + { + "params": { + "filters": { + "deleted": { + "parameters": {}, + "value": "", + "condition": "general:=null", + "formula": { + "parameter": {}, + "value": "life.deletedTime", + "operation": null, + "children": null, + "type": "VARIABLE" + } + } + } + }, + "operationName": "Filter" + }, + { + "operationName": "Affine", + "params": { + "removedKeys": [ + "_id", + "id", + "studyId", + "dataVersion", + "life", + "metadata" + ], + "addedKeyRules": [], + "rules": {} + } + }, + { + "operationName": "Flatten", + "params": { + "keepFlattened": true, + "flattenedKey": "properties", + "keepFlattenedKey": false + } + }, + { + "operationName": "Affine", + "params": { + "removedKeys": [ + "fieldId", + "value" + ], + "addedKeyRules": [ + { + "key": { + "type": "VARIABLE", + "operator": null, + "value": "fieldId", + "parameters": {}, + "children": null + }, + "value": { + "type": "VARIABLE", + "operator": null, + "value": "value", + "parameters": {}, + "children": null + } + } + ], + "rules": {} + } + }, + { + "operationName": "Group", + "params": { + "keys": [ + "Participant ID", + "Visit ID" + ], + "skipUnMatch": false + } + }, + { + "operationName": "Join", + "params": { + "reservedKeys": [ + "Participant ID", + "Visit ID" + ] + } + } + ] + }, + "useCache": false, + "studyId": "96f17282-e0a3-43d3-8f38-326949b786ef", + "versionId": null, + "forceUpdate": false +} + + 2. Generate data standardization pipeline: + + */ \ No newline at end of file diff --git a/packages/itmat-cores/src/trpcCore/utilsCore.ts b/packages/itmat-cores/src/trpcCore/utilsCore.ts new file mode 100644 index 000000000..a15adb2f1 --- /dev/null +++ b/packages/itmat-cores/src/trpcCore/utilsCore.ts @@ -0,0 +1,157 @@ +import { IValueVerifier, IAST, enumASTNodeTypes, enumMathOps, enumConditionOps, CoreError, enumCoreErrors } from '@itmat-broker/itmat-types'; +import crypto from 'crypto'; + +type IDataTransformationClip = Record; + +export class TRPCUtilsCore { + /** + * This function checks scalar data only. + * + * @param value - The input value. + * @param verifier - The input IAST. + * @returns If the value pass the verifier. + */ + public validValueWithVerifier(value: number | string | IDataTransformationClip, verifier: IValueVerifier): boolean { + const calculatedValue = this.IASTHelper(verifier.formula, value); + if (verifier.condition === enumConditionOps.NUMERICALEQUAL) { + return calculatedValue === this.parseInputToNumber(verifier.value); + } else if (verifier.condition === enumConditionOps.NUMERICALNOTEQUAL) { + return calculatedValue !== verifier.value; + } else if (verifier.condition === enumConditionOps.NUMERICALLESSTHAN) { + return calculatedValue < verifier.value; + } else if (verifier.condition === enumConditionOps.NUMERICALGREATERTHAN) { + return calculatedValue > verifier.value; + } else if (verifier.condition === enumConditionOps.NUMERICALNOTLESSTHAN) { + return calculatedValue >= verifier.value; + } else if (verifier.condition === enumConditionOps.NUMERICALNOTGREATERTHAN) { + return calculatedValue <= verifier.value; + } else if (verifier.condition === enumConditionOps.STRINGREGEXMATCH) { + return new RegExp(verifier.value.toString()).test(calculatedValue.toString()); + } else if (verifier.condition === enumConditionOps.STRINGEQUAL) { + return calculatedValue === verifier.value; + } else if (verifier.condition === enumConditionOps.GENERALISNOTNULL) { + return calculatedValue !== null; + } else if (verifier.condition === enumConditionOps.GENERALISNULL) { + return calculatedValue === null; + } + return false; + } + + public parseInputToNumber(input: number | string): number { + // If the input is already a number, return it + if (typeof input === 'number') { + return input; + } + + // If the input is a string + if (typeof input === 'string') { + // If the string contains a decimal point, use parseFloat + if (input.includes('.')) { + return parseFloat(input); + } + // Otherwise, use parseInt + return parseInt(input, 10); + } + + // If the input is neither a number nor a string, throw an error + throw new Error('Input must be a number or a string'); + } + + public IASTHelper(root: IAST, data: number | string | IDataTransformationClip) { + if (root.type === enumASTNodeTypes.VALUE) { + return root.value; + } + if (root.type === enumASTNodeTypes.SELF) { + return data; + } + if (typeof data === 'string' && root.type === enumASTNodeTypes.MAP) { + return root.parameters[data] ?? data; + } + // in this case, the data should be a json + if (root.type === enumASTNodeTypes.VARIABLE) { + if (root.value) { + const keys = (root.value as string).split('.'); + let current = data; + + for (const k of keys) { + if (current[k] !== undefined) { + current = current[k]; + } else { + return undefined; + } + } + + return current; + } + } + if ((typeof data === 'number' || typeof data === 'string') && root.type === enumASTNodeTypes.OPERATION) { + if (!root.operator) { + throw new CoreError( + enumCoreErrors.CLIENT_MALFORMED_INPUT, + 'OPEARTION node must have an operator' + ); + } + if (root.operator === enumMathOps.NUMERICALADD && root.children && root.children.length === 2) { + return this.IASTHelper(root.children[0], this.parseInputToNumber(data)) + this.IASTHelper(root.children[1], this.parseInputToNumber(data)); + } else if (root.operator === enumMathOps.NUMERICALMINUS && root.children && root.children.length === 2) { + return this.IASTHelper(root.children[0], this.parseInputToNumber(data)) - this.IASTHelper(root.children[1], this.parseInputToNumber(data)); + } else if (root.operator === enumMathOps.NUMERICALMULTIPLY && root.children && root.children.length === 2) { + return this.IASTHelper(root.children[0], this.parseInputToNumber(data)) * this.IASTHelper(root.children[1], this.parseInputToNumber(data)); + } else if (root.operator === enumMathOps.NUMERICALDIVIDE && root.children && root.children.length === 2) { + return this.IASTHelper(root.children[0], this.parseInputToNumber(data)) / this.IASTHelper(root.children[1], this.parseInputToNumber(data)); + } else if (root.operator === enumMathOps.NUMERICALPOW && root.children && root.children.length === 2) { + return Math.pow(this.IASTHelper(root.children[0], this.parseInputToNumber(data)), this.IASTHelper(root.children[1], this.parseInputToNumber(data))); + } else if (root.operator === enumMathOps.STRINGCONCAT && root.children && root.children.length) { + return root.children.reduce((a, c) => { + return a + this.IASTHelper(c, data).toString(); + }, ''); + } else if (root.operator === enumMathOps.STRINGSUBSTR && root.children && root.children.length === 3) { + return (this.IASTHelper(root.children[0], data).toString()).substr(this.IASTHelper(root.children[1], data), this.IASTHelper(root.children[2], data.toString())); + } else if (root.operator === enumMathOps.TYPECONVERSION && root.children && root.children.length === 2) { + const newType = this.IASTHelper(root.children[0], data); + if (newType === 'INT') { + return Math.floor(Number(this.IASTHelper(root.children[0], data))); + } else if (newType === 'FLOAT') { + return parseFloat((this.IASTHelper(root.children[0], data) as string | number).toString()); + } else if (newType === 'STRING') { + return this.IASTHelper(root.children[0], data).toString(); + } else { + throw new CoreError( + enumCoreErrors.CLIENT_MALFORMED_INPUT, + 'Type converstion only supports INT, FLOAT and STRING.' + ); + } + } else { + throw new CoreError( + enumCoreErrors.CLIENT_MALFORMED_INPUT, + 'Operator and children does not match.' + ); + } + } + throw new CoreError( + enumCoreErrors.CLIENT_MALFORMED_INPUT, + 'Node type must be OPERATION,, SELF or VALUE' + ); + } + + public normalize(obj) { + if (Array.isArray(obj)) { + return obj.map(this.normalize.bind(this)).sort(); + } else if (typeof obj === 'object' && obj !== null) { + const sortedObj: { [key: string]: unknown } = {}; + Object.keys(obj).sort().forEach(key => { + sortedObj[key] = this.normalize.bind(this)(obj[key]); + }); + return sortedObj; + } else { + return obj; + } + } + + public computeHash(inputObject): string { + const normalizedObj = this.normalize.bind(this)(inputObject); + const str = JSON.stringify(normalizedObj); + return crypto.createHash('sha256').update(str).digest('hex'); + } + +} diff --git a/packages/itmat-docker/config/config.sample.json b/packages/itmat-docker/config/config.sample.json index a9bfc6b4f..ecb8c7a21 100644 --- a/packages/itmat-docker/config/config.sample.json +++ b/packages/itmat-docker/config/config.sample.json @@ -19,7 +19,9 @@ "pubkeys_collection": "PUBKEY_COLLECTION", "standardizations_collection": "STANDARDIZATION_COLLECTION", "configs_collection": "CONFIG_COLLECTION", - "drives_collection": "DRIVE_COLLECTION" + "drives_collection": "DRIVE_COLLECTION", + "colddata_collection": "COLDDATA_COLLECTION", + "cache_collection": "CACHE_COLLECTION" } }, "server": { diff --git a/packages/itmat-interface/config/config.sample.json b/packages/itmat-interface/config/config.sample.json index 994e641bf..4f119bef1 100644 --- a/packages/itmat-interface/config/config.sample.json +++ b/packages/itmat-interface/config/config.sample.json @@ -19,7 +19,9 @@ "pubkeys_collection": "PUBKEY_COLLECTION", "standardizations_collection": "STANDARDIZATION_COLLECTION", "configs_collection": "CONFIG_COLLECTION", - "drives_collection": "DRIVE_COLLECTION" + "drives_collection": "DRIVE_COLLECTION", + "colddata_collection": "COLDDATA_COLLECTION", + "cache_collection": "CACHE_COLLECTION" } }, "server": { diff --git a/packages/itmat-interface/src/server/router.ts b/packages/itmat-interface/src/server/router.ts index 3e08ad313..df3b41eca 100644 --- a/packages/itmat-interface/src/server/router.ts +++ b/packages/itmat-interface/src/server/router.ts @@ -279,7 +279,8 @@ export class Router { createReadStream: () => fileStream, filename: file.originalname, mimetype: file.mimetype, - encoding: file.encoding + encoding: file.encoding, + fieldName: file.fieldname }); } req.body.files = transformedFiles; // Attach the transformed files to the request body for later use diff --git a/packages/itmat-interface/src/trpc/dataProcedure.ts b/packages/itmat-interface/src/trpc/dataProcedure.ts new file mode 100644 index 000000000..845b04f9d --- /dev/null +++ b/packages/itmat-interface/src/trpc/dataProcedure.ts @@ -0,0 +1,353 @@ +import { FileUploadSchema, IAST, enumASTNodeTypes, enumConditionOps, enumDataTransformationOperation, enumDataTypes, enumMathOps } from '@itmat-broker/itmat-types'; +import { z } from 'zod'; +import { baseProcedure, router } from './trpc'; +import { TRPCDataCore, TRPCDataTransformationCore, TRPCFileCore, TRPCPermissionCore } from '@itmat-broker/itmat-cores'; +import { db } from '../database/database'; +import { objStore } from '../objStore/objStore'; +import { TRPCUtilsCore } from 'packages/itmat-cores/src/trpcCore/utilsCore'; + + +const ZAST: z.ZodType = z.lazy(() => z.object({ + type: z.nativeEnum(enumASTNodeTypes), + operator: z.union([z.nativeEnum(enumMathOps), z.null()]), + value: z.union([z.number(), z.string(), z.null()]), + parameters: z.record(z.string(), z.unknown()), + children: z.union([z.array(ZAST), z.null()]) // null for lead node; OPERATION type should not be a lead node. +})); + +const ZValueVerifier = z.object({ + formula: ZAST, + condition: z.nativeEnum(enumConditionOps), + value: z.union([z.string(), z.number()]), + parameters: z.record(z.string(), z.unknown()) +}); + +const ZFieldProperty = z.object({ + name: z.string(), + verifier: z.optional(z.array(z.array(ZValueVerifier))), + description: z.optional(z.string()), + required: z.boolean() +}); + +const ZCategoricalOption = z.object({ + code: z.string(), + description: z.string() +}); + +const CreateFieldInputSchema = z.object({ + studyId: z.string(), + fieldName: z.string(), + fieldId: z.string(), + description: z.optional(z.string()), + dataType: z.nativeEnum(enumDataTypes), + categoricalOptions: z.optional(z.array(ZCategoricalOption)), + unit: z.optional(z.string()), + comments: z.optional(z.string()), + verifier: z.optional(z.array(z.array(ZValueVerifier))), // Ensure this matches your interface + properties: z.optional(z.array(ZFieldProperty)) // Ensure this matches your interface +}); + +const EditFieldInputSchema = CreateFieldInputSchema; + +const ZDataClipInput = z.object({ + fieldId: z.string(), + value: z.string(), + timestamps: z.optional(z.number()), + properties: z.optional(z.any()) +}); + +const utilsCore = new TRPCUtilsCore(); +const dataCore = new TRPCDataCore(db, new TRPCFileCore(db, objStore), new TRPCPermissionCore(db), utilsCore, new TRPCDataTransformationCore(utilsCore)); + +export const dataRouter = router({ + /** + * Get the list of fields of a study. + * + * @param studyId - The id of the study. + * @param projectId - The id of the project. + * @param versionId - The id of the version. By default, we will return data until this version. If not specificed, will return the latest versioned data. + * + * @return IField - The list of objects of IField. + */ + getStudyFields: baseProcedure.input(z.object({ + studyId: z.string(), + versionId: z.optional(z.union([z.string(), z.null(), z.array(z.union([z.string(), z.null()]))])), + fieldIds: z.optional(z.array(z.string())) + })).query(async (opts) => { + return await dataCore.getStudyFields(opts.ctx.user, opts.input.studyId, opts.input.versionId, opts.input.fieldIds); + }), + /** + * Create a field of a study. To adjust to data versioning, create an existing field wil not throw an error. + * + * @param studyId - The id of the study. + * @param fieldName - The name of the field. + * @param fieldId - The value of the id of the field. Should be unique. + * @param description - The description of the field. + * @param dataType - The dataType of the field. + * @param categoricalOptions - The options of the field if the field is a categorical field. + * @param unit - The unit of the field. + * @param comments - The comments of the field. + * @param verifier - The verifier of the field. + * @param properties - The properties of the field. + * + * @return IField + */ + createStudyField: baseProcedure.input(CreateFieldInputSchema).mutation(async (opts) => { + return await dataCore.createField(opts.ctx.user, { + studyId: opts.input.studyId, + fieldName: opts.input.fieldName, + fieldId: opts.input.fieldId, + description: opts.input.description, + dataType: opts.input.dataType, + categoricalOptions: opts.input.categoricalOptions, + unit: opts.input.unit, + comments: opts.input.comments, + verifier: opts.input.verifier, + properties: opts.input.properties + }); + }), + /** + * Edit a field of a study. + * + * @param studyId - The id of the study. + * @param fieldName - The name of the field. + * @param fieldId - The value of the id of the field. Should be unique. + * @param description - The description of the field. + * @param dataType - The dataType of the field. + * @param categoricalOptions - The options of the field if the field is a categorical field. + * @param unit - The unit of the field. + * @param comments - The comments of the field. + * @param verifier - The verifier of the field. + * @param properties - The properties of the field. + * + * @return IField + */ + editStudyField: baseProcedure.input(EditFieldInputSchema).mutation(async (opts) => { + return await dataCore.editField(opts.ctx.user, { + studyId: opts.input.studyId, + fieldName: opts.input.fieldName, + fieldId: opts.input.fieldId, + description: opts.input.description, + dataType: opts.input.dataType, + categoricalOptions: opts.input.categoricalOptions, + unit: opts.input.unit, + comments: opts.input.comments, + verifier: opts.input.verifier, + properties: opts.input.properties + }); + }), + /** + * Delete a field of a study. + * + * @param studyId - The id of the stduy. + * @param fieldId - The id of the field. + * + * @return IGenericResponse + */ + deleteStudyField: baseProcedure.input(z.object({ + studyId: z.string(), + fieldId: z.string() + })).mutation(async (opts) => { + return await dataCore.deleteField(opts.ctx.user, opts.input.studyId, opts.input.fieldId); + }), + /** + * Upload data clips to a study. + * + * @param requester - The id of the requester. + * @param studyId - The id of the study. + * @param data - The list of data clips. + * + * @return IGenericResponse - The list of objects of IGenericResponse + */ + uploadStudyData: baseProcedure.input(z.object({ + studyId: z.string(), + data: z.array(ZDataClipInput) + })).mutation(async (opts) => { + return await dataCore.uploadData( + opts.ctx.req.user, + opts.input.studyId, + opts.input.data + ); + }), + /** + * Get the data of a study. + * + * @param studyId - The id of the study. + * @param versionId - The id of the data version. By default not specified for the latest version. + * @param aggregation - The aggregation pipeline. Used for data post preocessing. + * @param fieldIds - The list of fields to return. + * @param useCache - Whether to use fetch the data from cache. + * @param forceUpdate - Whether to update the cache with the results from this call. + * + * @return Partial[] - The list of objects of Partial + */ + getStudyData: baseProcedure.input(z.object({ + studyId: z.string(), + versionId: z.optional(z.union([z.string(), z.null(), z.array(z.union([z.string(), z.null()]))])), + aggregation: z.optional(z.record(z.string(), z.array(z.object({ + operationName: z.nativeEnum(enumDataTransformationOperation), + params: z.record(z.string(), z.unknown()) + })))), + fieldIds: z.optional(z.array(z.string())), + useCache: z.optional(z.boolean()), + forceUpdate: z.optional(z.boolean()), + formatted: z.optional(z.string()) + })).query(async (opts) => { + return await dataCore.getData( + opts.ctx.req.user, + opts.input.studyId, + opts.input.fieldIds, + opts.input.versionId, + opts.input.aggregation, + opts.input.useCache, + opts.input.forceUpdate + ); + }), + /** + * Get the data of a study filtered by dataVersion. This is a simplified version of the getData function. + * + * @param studyId - The id of the study. + * @param versionId - The id of the data version. By default not specified for the latest version. + * @param aggregation - The aggregation pipeline. Used for data post preocessing. + * @param fieldIds - The list of fields to return. + * @param useCache - Whether to use fetch the data from cache. + * @param forceUpdate - Whether to update the cache with the results from this call. + * + * @return Partial[] - The list of objects of Partial + */ + getStudyDataLatest: baseProcedure.input(z.object({ + studyId: z.string(), + fieldIds: z.optional(z.array(z.string())) + })).query(async (opts) => { + return await dataCore.getDataLatest( + opts.ctx.req.user, + opts.input.studyId, + opts.input.fieldIds + ); + }), + /** + * Delete data of a study. We add a deleted document in the database. + * + * @param requester - The id of the requester. + * @param studyId - The id of the study. + * @param documentId - The id of the mongo document. + * + * @return IGenreicResponse - The object of IGenericResponse. + */ + deleteStudyData: baseProcedure.input(z.object({ + studyId: z.string(), + fieldId: z.string(), + properties: z.optional(z.any()) + })).mutation(async (opts) => { + return await dataCore.deleteData( + opts.ctx.req.user, + opts.input.studyId, + opts.input.fieldId, + opts.input.properties + ); + }), + /** + * Upload a data file. + * + * @param studyId - The id of the study. + * @param file - The file to upload. + * @param properties - The properties of the file. Need to match field properties if defined. + * @param fieldId - The id of the field. + * + * @return IData + */ + uploadStudyFileData: baseProcedure.input(z.object({ + studyId: z.string(), + files: z.object({ + file: z.array(FileUploadSchema) + }), + properties: z.optional(z.string()), // pass the json string of the properties + fieldId: z.string() + })).mutation(async (opts) => { + return await dataCore.uploadFileData( + opts.ctx.req.user, + opts.input.studyId, + opts.input.files.file?.[0], + opts.input.fieldId, + opts.input.properties + ); + }), + /** + * Get the files of a study. + * + * @param studyId - The id of the study. + * @param versionId - The id of the data version. By default not specified for the latest version. + * @param fieldIds - The list of fields to return. + * @param aggregation - The aggregation pipeline. Used for data post preocessing. + * @param useCache - Whether to use fetch the data from cache. + * @param forceUpdate - Whether to update the cache with the results from this call. + * @param readable - Whether to return the readable stream. + * + * @return IFile[] - The list of objects of IFile. + */ + getFiles: baseProcedure.input(z.object({ + studyId: z.string(), + versionId: z.optional(z.string()), + fieldIds: z.optional(z.array(z.string())) + })).query(async (opts) => { + return await dataCore.getStudyFiles( + opts.ctx.req.user, + opts.input.studyId, + opts.input.fieldIds, + opts.input.versionId + ); + }) +}); + + +/** Example of data versioning aggregation */ +/** +{ + clinical: [ + { operationName: 'Group', params: { keys: ['fieldId', 'properties.Participant ID', 'properties.Visit ID'], skipUnmatch: true } }, + { operationName: 'LeaveOne', params: { scoreFormula: { type: enumASTNodeTypes.VARIABLE, operator: null, value: 'life.createdTime', parameters: {}, children: null }, isDescend: true } }, + { + operationName: 'Filter', params: { + filters: { + deleted: { + formula: { + type: enumASTNodeTypes.VARIABLE, + operation: null, + value: 'life.deletedTime', + parameter: {}, + children: null + }, + condition: enumConditionOps.GENERALISNULL, + value: '', + parameters: {} + } + } + } + } + ], + device: [ + { operationName: 'Group', params: { keys: ['properties.Participant ID', 'properties.Device Type', 'properties.Device ID', 'properties.Start Date', 'properties.End Date'], skipUnmatch: true } }, + { operationName: 'LeaveOne', params: { scoreFormula: { type: enumASTNodeTypes.VARIABLE, operator: null, value: 'life.createdTime', parameters: {}, children: null }, isDescend: true } }, + { + operationName: 'Filter', params: { + filters: { + deleted: { + formula: { + type: enumASTNodeTypes.VARIABLE, + operation: null, + value: 'life.deletedTime', + parameter: {}, + children: null + }, + condition: enumConditionOps.GENERALISNULL, + value: '', + parameters: {} + } + } + } + } + // { operationName: 'Concat', params: { concatKeys: ['properties', 'life'] } } + ] + } + +*/ \ No newline at end of file diff --git a/packages/itmat-interface/src/trpc/tRPCRouter.ts b/packages/itmat-interface/src/trpc/tRPCRouter.ts index 32e890945..146f7ae34 100644 --- a/packages/itmat-interface/src/trpc/tRPCRouter.ts +++ b/packages/itmat-interface/src/trpc/tRPCRouter.ts @@ -1,3 +1,4 @@ +import { dataRouter } from './dataProcedure'; import { driveRouter } from './driveProcedure'; import { studyRouter } from './studyProcedure'; import { router } from './trpc'; @@ -6,7 +7,8 @@ import { userRouter } from './userProcedure'; export const tRPCRouter = router({ user: userRouter, drive: driveRouter, - study: studyRouter + study: studyRouter, + data: dataRouter }); export type APPTRPCRouter = typeof tRPCRouter; \ No newline at end of file diff --git a/packages/itmat-interface/test/GraphQLTests/study.test.ts b/packages/itmat-interface/test/GraphQLTests/study.test.ts index 801652751..1dc6eddf1 100644 --- a/packages/itmat-interface/test/GraphQLTests/study.test.ts +++ b/packages/itmat-interface/test/GraphQLTests/study.test.ts @@ -33,7 +33,6 @@ import { enumUserTypes, studyType, enumDataTypes, - IDataEntry, IUser, IFile, IField, @@ -1815,7 +1814,7 @@ if (global.hasMinio) { expect(res.body.errors).toBeUndefined(); // check both data collection and file collection const fileFirst = await db.collections.files_collection.findOne({ 'studyId': createdStudy.id, 'life.deletedTime': null }); - const dataFirst = await db.collections.data_collection.findOne({ 'studyId': createdStudy.id, 'properties.m_visitId': '1', 'fieldId': '33' }); + const dataFirst = await db.collections.data_collection.findOne({ 'studyId': createdStudy.id, 'properties.m_visitId': '1', 'fieldId': '33' }); expect(dataFirst?.value).toBe(fileFirst.id); expect(dataFirst?.life.deletedTime).toBe(null); }); diff --git a/packages/itmat-interface/test/filesForTests/RandomFile.random b/packages/itmat-interface/test/filesForTests/RandomFile.random new file mode 100644 index 000000000..41bc2ceff --- /dev/null +++ b/packages/itmat-interface/test/filesForTests/RandomFile.random @@ -0,0 +1 @@ +just testing Random. \ No newline at end of file diff --git a/packages/itmat-interface/test/trpcTests/data.test.ts b/packages/itmat-interface/test/trpcTests/data.test.ts new file mode 100644 index 000000000..9cb23b95a --- /dev/null +++ b/packages/itmat-interface/test/trpcTests/data.test.ts @@ -0,0 +1,2896 @@ +/** + * @with Minio + */ +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-nocheck +import { MongoMemoryServer } from 'mongodb-memory-server'; +import { db } from '../../src/database/database'; +import { Express } from 'express'; +import { objStore } from '../../src/objStore/objStore'; +import request from 'supertest'; +import { connectAdmin, connectUser, connectAgent } from './_loginHelper'; +import { Router } from '../../src/server/router'; +import { MongoClient } from 'mongodb'; +import { setupDatabase } from '@itmat-broker/itmat-setup'; +import config from '../../config/config.sample.json'; +import { v4 as uuid } from 'uuid'; +import { enumUserTypes, enumStudyRoles, enumDataTypes, enumASTNodeTypes, enumConditionOps, enumConfigType, enumCoreErrors } from '@itmat-broker/itmat-types'; +import { encodeQueryParams } from './helper'; +import path from 'path'; + +if (global.hasMinio) { + let app: Express; + let mongodb: MongoMemoryServer; + let admin: request.SuperTest; + let user: request.SuperTest; + let authorisedUser: request.SuperTest; + let mongoConnection: MongoClient; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + let mongoClient: Db; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + let adminProfile; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + let userProfile; + let authorisedUserProfile; + let study; + let fullPermissionRole: IRole; + + afterAll(async () => { + await db.closeConnection(); + await mongoConnection?.close(); + await mongodb.stop(); + + /* claer all mocks */ + jest.clearAllMocks(); + }); + + beforeAll(async () => { // eslint-disable-line no-undef + /* Creating a in-memory MongoDB instance for testing */ + const dbName = uuid(); + mongodb = await MongoMemoryServer.create({ instance: { dbName } }); + const connectionString = mongodb.getUri(); + await setupDatabase(connectionString, dbName); + /* Wiring up the backend server */ + config.objectStore.port = (global as any).minioContainerPort; + config.database.mongo_url = connectionString; + config.database.database = dbName; + await db.connect(config.database, MongoClient); + await objStore.connect(config.objectStore); + const router = new Router(config); + await router.init(); + /* Connect mongo client (for test setup later / retrieve info later) */ + mongoConnection = await MongoClient.connect(connectionString); + mongoClient = mongoConnection.db(dbName); + + /* Connecting clients for testing later */ + app = router.getApp(); + admin = request.agent(app); + user = request.agent(app); + await connectAdmin(admin); + await connectUser(user); + + // add the root node for each user + const users = await db.collections.users_collection.find({}).toArray(); + adminProfile = users.filter(el => el.type === enumUserTypes.ADMIN)[0]; + userProfile = users.filter(el => el.type === enumUserTypes.STANDARD)[0]; + + const username = uuid(); + authorisedUserProfile = { + username, + type: enumUserTypes.STANDARD, + firstname: `${username}_firstname`, + lastname: `${username}_lastname`, + password: '$2b$04$j0aSK.Dyq7Q9N.r6d0uIaOGrOe7sI4rGUn0JNcaXcPCv.49Otjwpi', + otpSecret: 'H6BNKKO27DPLCATGEJAZNWQV4LWOTMRA', + email: `${username}@example.com`, + description: 'I am a new user.', + emailNotificationsActivated: true, + organisation: 'organisation_system', + id: `new_user_id_${username}`, + emailNotificationsStatus: { expiringNotification: false }, + resetPasswordRequests: [], + expiredAt: 1991134065000, + life: { + createdTime: 1591134065000, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await mongoClient.collection(config.database.collections.users_collection).insertOne(authorisedUserProfile); + authorisedUser = request.agent(app); + await connectAgent(authorisedUser, username, 'admin', authorisedUserProfile.otpSecret); + }); + + beforeEach(async () => { + study = { + id: uuid(), + name: 'Test Study', + currentDataVersion: -1, // index; dataVersions[currentDataVersion] gives current version; // -1 if no data + dataVersions: [], + description: null, + profile: null, + webLayout: [], + life: { + createdTime: 0, + createdUser: enumUserTypes.SYSTEM, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.configs_collection.insertOne({ + id: uuid(), + type: enumConfigType.STUDYCONFIG, + key: study.id, + properties: { + id: uuid(), + life: { + createdTime: 1648060684215, + createdUser: '8a51bda7-64b8-46af-b087-43caad743a81', + deletedTime: null, + deletedUser: null + }, + metadata: { + + }, + defaultStudyProfile: null, + defaultMaximumFileSize: 8589934592, + defaultMaximumProfileSize: 10485760, + defaultRepresentationForMissingValue: '99999', + defaultFileColumns: [ + { + title: 'Participant ID', + type: 'string' + }, + { + title: 'Device ID', + type: 'string' + }, + { + title: 'Device Type', + type: 'string' + }, + { + title: 'Start Date', + type: 'UNIX timestamps' + }, + { + title: 'End Date', + type: 'UNIX timestamps' + } + ], + defaultFileColumnsPropertyColor: 'orange', + defaultFileDirectoryStructure: { + pathLabels: [ + 'SubjectId', + 'VisitId' + ], + description: null + }, + defaultVersioningKeys: ['fieldId', 'properties.SubjectId', 'properties.VisitId'] + } + }); + await db.collections.studies_collection.insertOne(study); + fullPermissionRole = { + id: 'full_permission_role_id', + studyId: study.id, + name: 'Full Permissison Role', + description: '', + // data permissions for studyId + dataPermissions: [{ + fields: ['^1.*$'], + dataProperties: { + }, + permission: parseInt('111', 2), + includeUnVersioned: true + }], + studyRole: enumStudyRoles.STUDY_USER, + users: [authorisedUserProfile.id], + groups: [], + life: { + createdTime: 0, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.roles_collection.insertOne(fullPermissionRole); + }); + + afterEach(async () => { + await db.collections.studies_collection.deleteMany({}); + await db.collections.field_dictionary_collection.deleteMany({}); + await db.collections.data_collection.deleteMany({}); + await db.collections.roles_collection.deleteMany({}); + await db.collections.files_collection.deleteMany({}); + await db.collections.cache_collection.deleteMany({}); + await db.collections.configs_collection.deleteMany({}); + }); + + describe('tRPC data APIs', () => { + test('Create a field', async () => { + const response = await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALGREATERTHAN, + value: 10, + parameters: {} + }, { + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALLESSTHAN, + value: 50, + parameters: {} + }]], + properties: [{ + name: 'SubjectId', + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.STRINGREGEXMATCH, + value: '^I.*$', + parameters: {} + }]], + required: true + }] + }); + expect(response.status).toBe(200); + expect(response.body.result.data.fieldId).toBe('1'); + const field = await db.collections.field_dictionary_collection.findOne({ 'studyId': study.id, 'fieldId': '1', 'life.deletedTime': null }); + expect(field).toBeDefined(); + expect(field?.fieldId).toBe('1'); + }); + test('Create a field (study does not exist)', async () => { + const response = await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: 'random', + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + const field = await db.collections.field_dictionary_collection.findOne({ 'studyId': study.id, 'fieldId': '1', 'life.deletedTime': null }); + expect(field).toBe(null); + }); + test('Create a field (no permission)', async () => { + const response = await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '2', + description: '', + dataType: enumDataTypes.INTEGER + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('No permission to create this field.'); + const field = await db.collections.field_dictionary_collection.findOne({ 'studyId': study.id, 'fieldId': '1', 'life.deletedTime': null }); + expect(field).toBe(null); + }); + test('Create a field (multiple times)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const response = await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + expect(response.status).toBe(200); + const field = await db.collections.field_dictionary_collection.find({ 'studyId': study.id, 'fieldId': '1', 'life.deletedTime': null }).toArray(); + expect(field).toHaveLength(2); + }); + test('Create a field (undefined categorical options for categorical field)', async () => { + const response = await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.CATEGORICAL + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('1-Test Field 1: possible values can\'t be empty if data type is categorical.'); + const field = await db.collections.field_dictionary_collection.find({ 'studyId': study.id, 'fieldId': '1', 'life.deletedTime': null }).toArray(); + expect(field).toHaveLength(0); + }); + test('Create a field (unauthorised user)', async () => { + const response = await user.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.CATEGORICAL + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + }); + test('Create a field (authorised user but no data permission)', async () => { + const response = await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '2', + description: '', + dataType: enumDataTypes.CATEGORICAL + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('No permission to create this field.'); + }); + test('Get fields (unversioned)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const paramteres = { + studyId: study.id, + versionId: null + }; + const response = await authorisedUser.get('/trpc/data.getStudyFields?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data).toHaveLength(1); + expect(response.body.result.data[0].fieldId).toBe('1'); + }); + test('Get fields (unauthorised user)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const paramteres = { + studyId: study.id, + versionId: null + }; + const response = await user.get('/trpc/data.getStudyFields?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + }); + test('Get fields (authorised user on unauthorised fields)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await db.collections.field_dictionary_collection.insertOne({ + studyId: study.id, + fieldId: '2', + fieldName: 'Test Field 2', + description: '', + dataType: enumDataTypes.INTEGER, + life: { + createdTime: 0, + createdUser: enumUserTypes.SYSTEM, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + const paramteres = { + studyId: study.id, + versionId: null + }; + const response = await authorisedUser.get('/trpc/data.getStudyFields?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data).toHaveLength(1); + expect(response.body.result.data[0].fieldId).toBe('1'); + }); + test('Get fields (versioned)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await db.collections.roles_collection.updateMany({}, { + $set: { + studyRole: enumStudyRoles.STUDY_MANAGER + } + }); + await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: study.id, + dataVersion: '1.0', + tag: '1.0' + }); + const paramteres = { + studyId: study.id + }; + const response = await authorisedUser.get('/trpc/data.getStudyFields?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data).toHaveLength(1); + expect(response.body.result.data[0].fieldId).toBe('1'); + }); + test('Delete a field', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const response = await authorisedUser.post('/trpc/data.deleteStudyField') + .send({ + studyId: study.id, + fieldId: '1' + }); + expect(response.status).toBe(200); + expect(response.body.result.data.id).toBe('1'); + const field = await db.collections.field_dictionary_collection.find({ studyId: study.id, fieldId: '1' }).toArray(); + expect(field).toHaveLength(2); + const paramteres = { + studyId: study.id, + versionId: null + }; + const response2 = await authorisedUser.get('/trpc/data.getStudyFields?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response2.body.result.data).toHaveLength(0); + }); + test('Delete a field (unauthorised user)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const response = await user.post('/trpc/data.deleteStudyField') + .send({ + studyId: study.id, + fieldId: '1' + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + }); + test('Delete a field (authorised user but unauthorised field)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await db.collections.field_dictionary_collection.insertOne({ + studyId: study.id, + fieldId: '2', + fieldName: 'Test Field 2', + description: '', + dataType: enumDataTypes.INTEGER, + life: { + createdTime: 0, + createdUser: enumUserTypes.SYSTEM, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + const response = await authorisedUser.post('/trpc/data.deleteStudyField') + .send({ + studyId: study.id, + fieldId: '2' + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('No permission to delete this field.'); + }); + test('Delete a field (study does not exist)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const response = await authorisedUser.post('/trpc/data.deleteStudyField') + .send({ + studyId: 'random', + fieldId: '1' + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + const fields = await db.collections.field_dictionary_collection.find({ fieldId: '1' }).toArray(); + expect(fields).toHaveLength(1); + }); + test('Delete a field (field does not exist)', async () => { + const response = await authorisedUser.post('/trpc/data.deleteStudyField') + .send({ + studyId: study.id, + fieldId: '1' + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('Field does not exist.'); + const fields = await db.collections.field_dictionary_collection.find({ fieldId: '1' }).toArray(); + expect(fields).toHaveLength(0); + }); + test('Upload a data clip (integer)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(1); + expect(data[0].fieldId).toBe('1'); + }); + test('Upload a data clip (decimal)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.DECIMAL + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1.1' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(1); + expect(data[0].fieldId).toBe('1'); + }); + test('Upload a data clip (boolean)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.BOOLEAN + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: 'true' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(1); + expect(data[0].fieldId).toBe('1'); + }); + test('Upload a data clip (date)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.DATETIME + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '2023-12-06T17:28:32.397Z' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(1); + expect(data[0].fieldId).toBe('1'); + }); + test('Upload a data clip (categorical)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.CATEGORICAL, + categoricalOptions: [ + { code: '1', description: '1' }, + { code: '2', description: '2' } + ] + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(1); + expect(data[0].fieldId).toBe('1'); + }); + test('Upload a data clip (study does not exist)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: 'random', + data: [{ + fieldId: '1', + value: '10' + }] + }); + expect(respone.status).toBe(400); + expect(respone.body.error.message).toBe('Study does not exist.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Upload a data clip (unauthorised user)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const respone = await user.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '2', + value: '10' + }] + }); + expect(respone.status).toBe(400); + expect(respone.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + }); + test('Upload a data clip (authorised user with unauthorised field)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await db.collections.field_dictionary_collection.insertOne({ + studyId: study.id, + fieldId: '2', + fieldName: 'Test Field 2', + description: '', + dataType: enumDataTypes.INTEGER, + life: { + createdTime: 0, + createdUser: enumUserTypes.SYSTEM, + deletedTime: null, + deletedUser: null + }, + metadata: {} + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '2', + value: '10' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].description).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + }); + test('Upload a data clip (field does not exist)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '11', + value: '10' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 11: Field not found'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Upload a data clip (value is invalid as integer)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: 'random' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 1: Cannot parse as integer.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Upload a data clip (value is invalid as decimal)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.DECIMAL + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: 'random' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 1: Cannot parse as decimal.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Upload a data clip (value is invalid as boolean)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.BOOLEAN + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: 'random' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 1: Cannot parse as boolean.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Upload a data clip (value is invalid as date)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.DATETIME + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: 'random' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 1: Cannot parse as date. Value for date type must be in ISO format.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Upload a data clip (value is invalid as categorical)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.CATEGORICAL, + categoricalOptions: [ + { code: '1', description: '1' }, + { code: '2', description: '2' } + ] + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: 'random' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 1: Cannot parse as categorical, value not in value list.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Upload a data clip (duplicate data clips)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1' + }] + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(2); + expect(data[0].fieldId).toBe(data[1].fieldId); + }); + test('Upload a data clip (pass the verifier)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALGREATERTHAN, + value: 10, + parameters: {} + }, { + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALLESSTHAN, + value: 50, + parameters: {} + }]] + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '25' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(1); + }); + test('Upload a data clip (failed the verifier)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALGREATERTHAN, + value: 10, + parameters: {} + }, { + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALLESSTHAN, + value: 50, + parameters: {} + }]] + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '5' + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 1 value 5: Failed to pass the verifier.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Upload a data clip (pass the property verifier)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALGREATERTHAN, + value: 10, + parameters: {} + }, { + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALLESSTHAN, + value: 50, + parameters: {} + }]], + properties: [{ + name: 'SubjectId', + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.STRINGREGEXMATCH, + value: '^I.*$', + parameters: {} + }]], + required: true + }] + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '25', + properties: { + SubjectId: 'I7' + } + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(1); + }); + test('Upload a data clip (failed the property verifier, required not satisfy)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALGREATERTHAN, + value: 10, + parameters: {} + }, { + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALLESSTHAN, + value: 50, + parameters: {} + }]], + properties: [{ + name: 'SubjectId', + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.STRINGREGEXMATCH, + value: '^I.*$', + parameters: {} + }]], + required: true + }] + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '25' + }], + properties: { + random: 'random' + } + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 1: Property SubjectId is required.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + //Field 1 value 25: Property SubjectId failed to pass the verifier. + }); + test('Upload a data clip (failed the property verifier, value not satisfy)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALGREATERTHAN, + value: 10, + parameters: {} + }, { + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.NUMERICALLESSTHAN, + value: 50, + parameters: {} + }]], + properties: [{ + name: 'SubjectId', + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.STRINGREGEXMATCH, + value: '^I.*$', + parameters: {} + }]], + required: true + }] + }); + const respone = await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '25', + properties: { + SubjectId: 'K7' + } + }] + }); + expect(respone.status).toBe(200); + expect(respone.body.result.data[0].successful).toBe(false); + expect(respone.body.result.data[0].description).toBe('Field 1 value K7: Property SubjectId failed to pass the verifier.'); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(0); + }); + test('Delete a data clip', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + const response2 = await authorisedUser.post('/trpc/data.deleteStudyData') + .send({ + studyId: study.id, + fieldId: '1' + }); + expect(response2.status).toBe(200); + expect(response2.body.result.data.successful).toBe(true); + const data = await db.collections.data_collection.find({ studyId: study.id }).toArray(); + expect(data).toHaveLength(2); + expect(data[0].life.deletedTime).toBeNull(); + expect(data[1].life.deletedTime).toBeNull(); + }); + test('Delete a data clip (study does not exist)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + const response2 = await authorisedUser.post('/trpc/data.deleteStudyData') + .send({ + studyId: 'random', + fieldId: '1' + }); + expect(response2.status).toBe(400); + expect(response2.body.error.message).toBe('Study does not exist.'); + }); + test('Delete a data clip (delete twice)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + await authorisedUser.post('/trpc/data.deleteStudyData') + .send({ + studyId: study.id, + fieldId: '1' + }); + const response2 = await authorisedUser.post('/trpc/data.deleteStudyData') + .send({ + studyId: study.id, + fieldId: '1' + }); + expect(response2.status).toBe(200); + const data = await db.collections.data_collection.find({ studyId: study.id, fieldId: '1' }, { sort: { 'life.createdTime': -1 } }).toArray(); + expect(data).toHaveLength(3); + expect(data[2].value).toBe(10); + expect(data[0].value).toBeNull(); + expect(data[1].value).toBeNull(); + }); + test('Upload a file data', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.FILE + }); + const filePath = path.join(__dirname, '../filesForTests/I7N3G6G-MMM7N3G6G-20200704-20200721.txt'); + const request = authorisedUser.post('/trpc/data.uploadStudyFileData') + .attach('file', filePath) + .field('studyId', study.id) + .field('fieldId', '1') + .field('properties', JSON.stringify({ + SubjectId: '1A', + VisitId: '1' + })); + const response = await request; + expect(response.status).toBe(200); + const fileObj = await db.collections.files_collection.findOne({}); + const dataObj = await db.collections.data_collection.findOne({}); + expect(response.body.result.data.id).toBe(fileObj.id); + expect(dataObj?.value).toBe(fileObj.id); + + }); + test('Upload a file data (study does not exist)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.FILE + }); + const filePath = path.join(__dirname, '../filesForTests/I7N3G6G-MMM7N3G6G-20200704-20200721.txt'); + const request = authorisedUser.post('/trpc/data.uploadStudyFileData') + .attach('file', filePath) + .field('studyId', 'random') + .field('fieldId', '1'); + const response = await request; + expect(response.status).toBe(400); + expect(response.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + }); + test('Upload a file data (field does not exist)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.FILE + }); + const filePath = path.join(__dirname, '../filesForTests/I7N3G6G-MMM7N3G6G-20200704-20200721.txt'); + const request = authorisedUser.post('/trpc/data.uploadStudyFileData') + .attach('file', filePath) + .field('studyId', study.id) + .field('fieldId', 'random'); + const response = await request; + expect(response.status).toBe(400); + expect(response.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + }); + test('Upload a file data (file format not supported)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.FILE + }); + const filePath = path.join(__dirname, '../filesForTests/RandomFile.random'); + const request = authorisedUser.post('/trpc/data.uploadStudyFileData') + .attach('file', filePath) + .field('studyId', study.id) + .field('fieldId', '1'); + const response = await request; + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('File type RANDOM not supported.'); + }); + test('Upload a file data (with properties)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.FILE, + properties: [{ + name: 'SubjectId', + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.STRINGREGEXMATCH, + value: '^I.*$', + parameters: {} + }]], + required: true + }] + }); + const filePath = path.join(__dirname, '../filesForTests/I7N3G6G-MMM7N3G6G-20200704-20200721.txt'); + const request = authorisedUser.post('/trpc/data.uploadStudyFileData') + .attach('file', filePath) + .field('studyId', study.id) + .field('fieldId', '1') + .field('properties', JSON.stringify({ + SubjectId: 'I7' + })); + const response = await request; + expect(response.status).toBe(200); + const fileObj = await db.collections.files_collection.findOne({}); + const dataObj = await db.collections.data_collection.findOne({}); + expect(response.body.result.data.id).toBe(fileObj.id); + expect(dataObj?.value).toBe(fileObj.id); + }); + test('Upload a file data (missing properties)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.FILE, + properties: [{ + name: 'SubjectId', + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.STRINGREGEXMATCH, + value: '^I.*$', + parameters: {} + }]], + required: true + }] + }); + const filePath = path.join(__dirname, '../filesForTests/I7N3G6G-MMM7N3G6G-20200704-20200721.txt'); + const request = authorisedUser.post('/trpc/data.uploadStudyFileData') + .attach('file', filePath) + .field('studyId', study.id) + .field('fieldId', '1'); + const response = await request; + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('Field 1: Property SubjectId is required.'); + }); + test('Upload a file data (incorrect properties)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.FILE, + properties: [{ + name: 'SubjectId', + verifier: [[{ + formula: { + type: enumASTNodeTypes.SELF, + operator: null, + value: null, + parameters: {}, + children: [] + }, + condition: enumConditionOps.STRINGREGEXMATCH, + value: '^I.*$', + parameters: {} + }]], + required: true + }] + }); + const filePath = path.join(__dirname, '../filesForTests/I7N3G6G-MMM7N3G6G-20200704-20200721.txt'); + const request = authorisedUser.post('/trpc/data.uploadStudyFileData') + .attach('file', filePath) + .field('studyId', study.id) + .field('fieldId', '1') + .field('properties', JSON.stringify({ + SubjectId: 'K7' + })); + const response = await request; + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('Field 1 value K7: Property SubjectId failed to pass the verifier.'); + }); + test('Get data', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '10', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: undefined, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response2 = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response2.status).toBe(200); + expect(response2.body.result.data.raw).toHaveLength(1); + expect(response2.body.result.data.raw[0].fieldId).toBe('1'); + }); + test('Get data (aggregation with group)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'GROUP', + params: { + keys: [ + 'properties.SubjectId' + ], + skipUnmatch: false + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(2); + expect(response.body.result.data.clinical[0]).toHaveLength(2); + expect(response.body.result.data.clinical[1]).toHaveLength(2); + expect(response.body.result.data.clinical[0][0].properties.SubjectId).toBe(response.body.result.data.clinical[0][1].properties.SubjectId); + expect(response.body.result.data.clinical[1][0].properties.SubjectId).toBe(response.body.result.data.clinical[1][1].properties.SubjectId); + }); + test('Get data (aggregation with affine)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'AFFINE', + params: { + removedKeys: [ + '_id', + 'id', + 'studyId', + 'dataVersion', + 'life', + 'metadata' + ], + addedKeyRules: [{ + key: { + type: enumASTNodeTypes.VALUE, + operator: null, + value: 'randomKey', + parameters: {}, + children: null + }, + value: { + type: enumASTNodeTypes.VALUE, + operator: null, + value: 'randomValue', + parameters: {}, + children: null + } + }], + rules: {} + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(Object.keys(response.body.result.data.clinical[0])).toHaveLength(4); + expect(Object.keys(response.body.result.data.clinical[1])).toHaveLength(4); + expect(Object.keys(response.body.result.data.clinical[2])).toHaveLength(4); + expect(Object.keys(response.body.result.data.clinical[3])).toHaveLength(4); + expect(response.body.result.data.clinical[0].randomKey).toBe('randomValue'); + expect(response.body.result.data.clinical[1].randomKey).toBe('randomValue'); + expect(response.body.result.data.clinical[2].randomKey).toBe('randomValue'); + expect(response.body.result.data.clinical[3].randomKey).toBe('randomValue'); + }); + test('Get data (aggregation with leaveOne)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'GROUP', + params: { + keys: [ + 'properties.SubjectId' + ], + skipUnmatch: false + } + }, { + operationName: 'LEAVEONE', + params: { + scoreFormula: { + operator: null, + type: 'VARIABLE', + value: 'life.createdTime', + children: null, + parameters: {} + }, + isDescend: true + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(2); + }); + test('Get data (aggregation with concat)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'GROUP', + params: { + keys: [ + 'properties.SubjectId' + ], + skipUnmatch: false + } + }, { + operationName: 'CONCAT', + params: { + concatKeys: ['value'] + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(2); + expect(response.body.result.data.clinical[0].value).toHaveLength(2); + expect(response.body.result.data.clinical[0].value).toHaveLength(2); + }); + test('Get data (aggregation with deconcat)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.JSON, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: JSON.stringify(['1', '2', '3']), + properties: { + SubjectId: '1A', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'DECONCAT', + params: { + deconcatKeys: [ + 'value' + ], + matchMode: 'combinations' + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(1); + expect(response.body.result.data.clinical[0]).toHaveLength(3); + expect(response.body.result.data.clinical[0][0].value).toBe('1'); + expect(response.body.result.data.clinical[0][1].value).toBe('2'); + expect(response.body.result.data.clinical[0][2].value).toBe('3'); + }); + test('Get data (aggregation with join)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'AFFINE', + params: { + removedKeys: [ + '_id', + 'id', + 'studyId', + 'dataVersion', + 'life', + 'metadata', + 'properties', + 'fieldId', + 'value' + ], + addedKeyRules: [{ + key: { + type: enumASTNodeTypes.VALUE, + operator: null, + value: 'SubjectId', + parameters: {}, + children: null + }, + value: { + type: enumASTNodeTypes.VARIABLE, + operator: null, + value: 'properties.SubjectId', + parameters: {}, + children: null + } + }, { + key: { + type: enumASTNodeTypes.VARIABLE, + operator: null, + value: 'fieldId', + parameters: {}, + children: null + }, + value: { + type: enumASTNodeTypes.VARIABLE, + operator: null, + value: 'value', + parameters: {}, + children: null + } + }], + rules: {} + } + }, { + operationName: 'GROUP', + params: { + keys: [ + 'SubjectId' + ], + skipUnmatch: false + } + }, { + operationName: 'JOIN', + params: { + reservedKeys: [ + 'SubjectId' + ] + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(2); + expect(response.body.result.data.clinical[0]).toMatchObject({ + SubjectId: '1A', + 1: 1, + 11: 2 + }); + expect(response.body.result.data.clinical[1]).toMatchObject({ + SubjectId: '2B', + 1: 11, + 11: 22 + }); + }); + test('Get data (aggregation with degroup)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'AFFINE', + params: { + removedKeys: [ + '_id', + 'id', + 'studyId', + 'dataVersion', + 'life', + 'metadata', + 'properties', + 'fieldId', + 'value' + ], + addedKeyRules: [{ + key: { + type: enumASTNodeTypes.VALUE, + operator: null, + value: 'SubjectId', + parameters: {}, + children: null + }, + value: { + type: enumASTNodeTypes.VARIABLE, + operator: null, + value: 'properties.SubjectId', + parameters: {}, + children: null + } + }, { + key: { + type: enumASTNodeTypes.VARIABLE, + operator: null, + value: 'fieldId', + parameters: {}, + children: null + }, + value: { + type: enumASTNodeTypes.VARIABLE, + operator: null, + value: 'value', + parameters: {}, + children: null + } + }], + rules: {} + } + }, { + operationName: 'GROUP', + params: { + keys: [ + 'SubjectId' + ], + skipUnmatch: false + } + }, { + operationName: 'JOIN', + params: { + reservedKeys: [ + 'SubjectId' + ] + } + }, { + operationName: 'DEGROUP', + params: { + sharedKeys: ['SubjectId'], + targetKeyGroups: [['1'], ['11']] + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(2); + expect(response.body.result.data.clinical[0]).toHaveLength(2); + expect(response.body.result.data.clinical[1]).toHaveLength(2); + expect(response.body.result.data.clinical[0][0]).toMatchObject({ + 1: 1, + SubjectId: '1A' + }); + expect(response.body.result.data.clinical[0][1]).toMatchObject({ + 11: 2, + SubjectId: '1A' + }); + expect(response.body.result.data.clinical[1][0]).toMatchObject({ + 1: 11, + SubjectId: '2B' + }); + expect(response.body.result.data.clinical[1][1]).toMatchObject({ + 11: 22, + SubjectId: '2B' + }); + }); + test('Get data (aggregation with filter)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'FILTER', + params: { + filters: { + tag: [{ + formula: { + value: 'fieldId', + operation: null, + type: 'VARIABLE', + parameter: {}, + children: null + }, + value: '11', + condition: enumConditionOps.STRINGEQUAL, + parameters: {} + }] + } + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(2); + expect(response.body.result.data.clinical[0].fieldId).toBe('11'); + expect(response.body.result.data.clinical[1].fieldId).toBe('11'); + }); + test('Get data (aggregation with flatten)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'FLATTEN', + params: { + keepFlattened: true, + flattenedKey: 'properties', + keepFlattenedKey: false + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(4); + expect(response.body.result.data.clinical[0].SubjectId).toBeDefined(); + expect(response.body.result.data.clinical[1].SubjectId).toBeDefined(); + expect(response.body.result.data.clinical[2].SubjectId).toBeDefined(); + expect(response.body.result.data.clinical[3].SubjectId).toBeDefined(); + }); + test('Get data (aggregation with count)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: { + clinical: [{ + operationName: 'GROUP', + params: { + keys: [ + 'properties.SubjectId' + ], + skipUnmatch: false + } + }, { + operationName: 'COUNT', + params: { + + } + }] + }, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.clinical).toHaveLength(2); + expect(response.body.result.data.clinical[0].count).toBe(2); + expect(response.body.result.data.clinical[1].count).toBe(2); + }); + test('Get data (with fieldIds)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 11', + fieldId: '11', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '11', + value: '2', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }, { + fieldId: '1', + value: '11', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }, { + fieldId: '11', + value: '22', + properties: { + SubjectId: '2B', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: undefined, + fieldIds: ['1'], + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.raw).toHaveLength(2); + expect(response.body.result.data.raw[0].fieldId).toBe('1'); + expect(response.body.result.data.raw[1].fieldId).toBe('1'); + }); + test('Get data (cache initialized)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: undefined, + fieldIds: undefined, + useCache: true, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + const cache = await db.collections.cache_collection.findOne({}); + const file = await db.collections.files_collection.findOne({}); + expect(response.status).toBe(200); + expect(response.body.result.data.uri).toBe(cache?.uri); + expect(cache?.uri).toBe(file?.uri); + }); + test('Get data (cache existing)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + description: null, + required: true + }, { + name: 'VisitId', + description: null, + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: undefined, + fieldIds: undefined, + useCache: true, + forceUpdate: undefined + }; + await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + const cache = await db.collections.cache_collection.find({}).toArray(); + const file = await db.collections.files_collection.find({}).toArray(); + expect(cache).toHaveLength(1); + expect(file).toHaveLength(1); + expect(response.status).toBe(200); + expect(response.body.result.data.uri).toBe(cache[0]?.uri); + expect(cache[0]?.uri).toBe(file[0]?.uri); + }); + test('Get data (cache existing but force update)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + required: true + }, { + name: 'VisitId', + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: undefined, + fieldIds: undefined, + useCache: true, + forceUpdate: true + }; + await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + const cache1 = await db.collections.cache_collection.find({}).toArray(); + expect(cache1).toHaveLength(1); + const file1 = await db.collections.files_collection.find({}).toArray(); + expect(file1).toHaveLength(1); + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + const cache = await db.collections.cache_collection.find({}).toArray(); + const file = await db.collections.files_collection.find({}).toArray(); + expect(cache).toHaveLength(2); + expect(file).toHaveLength(2); + expect(response.status).toBe(200); + expect(response.body.result.data.uri).toBe(cache[1]?.uri); + expect(cache[0]?.uri).not.toBe(cache[1]?.uri); + }); + test('Get data (study does not exist)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + description: null, + required: true + }, { + name: 'VisitId', + description: null, + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: 'random', + versionId: null, + aggregation: undefined, + fieldIds: undefined, + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe(enumCoreErrors.NO_PERMISSION_ERROR); + }); + test('Get data (no permission)', async () => { + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: study.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER, + properties: [{ + name: 'SubjectId', + description: null, + required: true + }, { + name: 'VisitId', + description: null, + required: true + }] + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: study.id, + data: [{ + fieldId: '1', + value: '1', + properties: { + SubjectId: '1A', + VisitId: '1' + } + }] + }); + const paramteres = { + studyId: study.id, + versionId: null, + aggregation: undefined, + fieldIds: ['2'], + useCache: undefined, + forceUpdate: undefined + }; + const response = await authorisedUser.get('/trpc/data.getStudyData?input=' + encodeQueryParams(paramteres)) + .query({}); + expect(response.status).toBe(200); + expect(response.body.result.data.raw).toHaveLength(0); + }); + }); +} else { + test(`${__filename.split(/[\\/]/).pop()} skipped because it requires Minio on Docker`, () => { + expect(true).toBe(true); + }); +} \ No newline at end of file diff --git a/packages/itmat-interface/test/trpcTests/study.test.ts b/packages/itmat-interface/test/trpcTests/study.test.ts index 5cd5e4eeb..58c3197ed 100644 --- a/packages/itmat-interface/test/trpcTests/study.test.ts +++ b/packages/itmat-interface/test/trpcTests/study.test.ts @@ -14,7 +14,7 @@ import { Db, MongoClient } from 'mongodb'; import { setupDatabase } from '@itmat-broker/itmat-setup'; import config from '../../config/config.sample.json'; import { v4 as uuid } from 'uuid'; -import { enumUserTypes, enumStudyRoles, IUser, IRole } from '@itmat-broker/itmat-types'; +import { enumUserTypes, enumStudyRoles, IUser, IRole, enumDataTypes } from '@itmat-broker/itmat-types'; import path from 'path'; import { encodeQueryParams } from './helper'; if (global.hasMinio) { @@ -200,9 +200,7 @@ if (global.hasMinio) { .field('name', 'Test Study') .field('description', ''); const response1 = await request; - await db.collections.roles_collection.updateMany({}, { $set: { studyId: response1.body.result.data.id } }); - const request2 = authorisedUser.post('/trpc/study.editStudy') .field('studyId', response1.body.result.data.id) .attach('profile', filePath) @@ -374,453 +372,512 @@ if (global.hasMinio) { expect(response2.status).toBe(200); expect(response2.body.result.data).toHaveLength(0); }); - // test('Create a new data version', async () => { - // const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); - // const request = admin.post('/trpc/study.createStudy'); - // request.attach('profile', filePath); - // request.field('name', 'Test Study'); - // request.field('description', ''); - // const response1 = await request; - // await db.collections.roles_collection.updateMany({}, { $set: { studyId: response1.body.result.data.id } }); - // await user.post('/trpc/data.createStudyField') - // .send({ - // studyId: response1.body.result.data.id, - // fieldName: 'Test Field 1', - // fieldId: '1', - // description: '', - // dataType: enumDataTypes.INTEGER - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '10' - // }] - // }); - // const response2 = await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0', - // tag: '1.0' - // }); - // expect(response2.status).toBe(200); - // expect(response2.body.result.data.version).toBe('1.0'); - // const data = await db.collections.data_collection.findOne({}); - // const field = await db.collections.field_dictionary_collection.findOne({}); - // const study = await db.collections.studies_collection.findOne({}); - // expect(field?.dataVersion).toBe(response2.body.result.data.id); - // expect(data?.dataVersion).toBe(response2.body.result.data.id); - // expect(study?.currentDataVersion).toBe(0); - // expect(study?.dataVersions).toHaveLength(1); - // expect(study?.dataVersions[0].id).toBe(response2.body.result.data.id); - // }); - // test('Create a new data version (no permission)', async () => { - // const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); - // const request = admin.post('/trpc/study.createStudy'); - // request.attach('profile', filePath); - // request.field('name', 'Test Study'); - // request.field('description', ''); - // const response1 = await request; - // const fullPermissionRole = { - // id: 'full_permission_role_id', - // studyId: response1.body.result.data.id, - // name: 'Full Permissison Role', - // description: '', - // // data permissions for studyId - // dataPermissions: [{ - // fields: ['^1.*$'], - // dataProperties: { - // }, - // permission: parseInt('110', 2) - // }], - // studyRole: enumStudyRoles.STUDY_USER, - // users: [userProfile.id], - // groups: [] - // }; - // await db.collections.roles_collection.insertOne(fullPermissionRole); - // await user.post('/trpc/data.createStudyField') - // .send({ - // studyId: response1.body.result.data.id, - // fieldName: 'Test Field 1', - // fieldId: '1', - // description: '', - // dataType: enumDataTypes.INTEGER - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '10' - // }] - // }); - // const response2 = await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0', - // tag: '1.0' - // }); - // expect(response2.status).toBe(400); - // expect(response2.body.error.message).toBe(errorCodes.NO_PERMISSION_ERROR); - // }); - // test('Create a new data version (version is not float string)', async () => { - // const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); - // const request = admin.post('/trpc/study.createStudy'); - // request.attach('profile', filePath); - // request.field('name', 'Test Study'); - // request.field('description', ''); - // const response1 = await request; - // const fullPermissionRole = { - // id: 'full_permission_role_id', - // studyId: response1.body.result.data.id, - // name: 'Full Permissison Role', - // description: '', - // // data permissions for studyId - // dataPermissions: [{ - // fields: ['^1.*$'], - // dataProperties: { - // }, - // permission: parseInt('110', 2) - // }], - // studyRole: enumStudyRoles.STUDY_MANAGER, - // users: [userProfile.id], - // groups: [] - // }; - // await db.collections.roles_collection.insertOne(fullPermissionRole); - // await user.post('/trpc/data.createStudyField') - // .send({ - // studyId: response1.body.result.data.id, - // fieldName: 'Test Field 1', - // fieldId: '1', - // description: '', - // dataType: enumDataTypes.INTEGER - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '10' - // }] - // }); - // const response2 = await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0.5', - // tag: '1.0' - // }); - // expect(response2.status).toBe(400); - // expect(response2.body.error.message).toBe('Version must be a float number.'); - // }); - // test('Create a new data version (duplicate versions)', async () => { - // const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); - // const request = admin.post('/trpc/study.createStudy'); - // request.attach('profile', filePath); - // request.field('name', 'Test Study'); - // request.field('description', ''); - // const response1 = await request; - // const fullPermissionRole = { - // id: 'full_permission_role_id', - // studyId: response1.body.result.data.id, - // name: 'Full Permissison Role', - // description: '', - // // data permissions for studyId - // dataPermissions: [{ - // fields: ['^1.*$'], - // dataProperties: { - // }, - // permission: parseInt('110', 2) - // }], - // studyRole: enumStudyRoles.STUDY_MANAGER, - // users: [userProfile.id], - // groups: [] - // }; - // await db.collections.roles_collection.insertOne(fullPermissionRole); - // await user.post('/trpc/data.createStudyField') - // .send({ - // studyId: response1.body.result.data.id, - // fieldName: 'Test Field 1', - // fieldId: '1', - // description: '', - // dataType: enumDataTypes.INTEGER - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '10' - // }] - // }); - // await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0', - // tag: '1.0' - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '11' - // }] - // }); - // const response2 = await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0', - // tag: '1.0' - // }); - // expect(response2.status).toBe(400); - // expect(response2.body.error.message).toBe('Version has been used.'); - // }); - // test('Create a new data version (nothing update)', async () => { - // const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); - // const request = admin.post('/trpc/study.createStudy'); - // request.attach('profile', filePath); - // request.field('name', 'Test Study'); - // request.field('description', ''); - // const response1 = await request; - // const fullPermissionRole = { - // id: 'full_permission_role_id', - // studyId: response1.body.result.data.id, - // name: 'Full Permissison Role', - // description: '', - // // data permissions for studyId - // dataPermissions: [{ - // fields: ['^1.*$'], - // dataProperties: { - // }, - // permission: parseInt('110', 2) - // }], - // studyRole: enumStudyRoles.STUDY_MANAGER, - // users: [userProfile.id], - // groups: [] - // }; - // await db.collections.roles_collection.insertOne(fullPermissionRole); - // const response = await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0', - // tag: '1.0' - // }); - // expect(response.status).toBe(400); - // expect(response.body.error.message).toBe('Nothing to update.'); - // }); - // test('Set to a previous data version', async () => { - // const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); - // const request = admin.post('/trpc/study.createStudy'); - // request.attach('profile', filePath); - // request.field('name', 'Test Study'); - // request.field('description', ''); - // const response1 = await request; - // const fullPermissionRole = { - // id: 'full_permission_role_id', - // studyId: response1.body.result.data.id, - // name: 'Full Permissison Role', - // description: '', - // // data permissions for studyId - // dataPermissions: [{ - // fields: ['^1.*$'], - // dataProperties: { - // }, - // permission: parseInt('110', 2) - // }], - // studyRole: enumStudyRoles.STUDY_MANAGER, - // users: [userProfile.id], - // groups: [] - // }; - // await db.collections.roles_collection.insertOne(fullPermissionRole); - // await user.post('/trpc/data.createStudyField') - // .send({ - // studyId: response1.body.result.data.id, - // fieldName: 'Test Field 1', - // fieldId: '1', - // description: '', - // dataType: enumDataTypes.INTEGER - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '10' - // }] - // }); - // const response2 = await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0', - // tag: '1.0' - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '11' - // }] - // }); - // await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.1', - // tag: '1.1' - // }); - // const response3 = await user.post('/trpc/study.setDataversionAsCurrent') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersionId: response2.body.result.data.id - // }); - // expect(response3.status).toBe(200); - // expect(response3.body.result.data.successful).toBe(true); - // const study = await db.collections.studies_collection.findOne({}); - // expect(study?.currentDataVersion).toBe(0); - // expect(study?.dataVersions).toHaveLength(2); - // }); - // test('Set to a previous data version (no permission)', async () => { - // const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); - // const request = admin.post('/trpc/study.createStudy'); - // request.attach('profile', filePath); - // request.field('name', 'Test Study'); - // request.field('description', ''); - // const response1 = await request; - // const fullPermissionRole = { - // id: 'full_permission_role_id', - // studyId: response1.body.result.data.id, - // name: 'Full Permissison Role', - // description: '', - // // data permissions for studyId - // dataPermissions: [{ - // fields: ['^1.*$'], - // dataProperties: { - // }, - // permission: parseInt('110', 2) - // }], - // studyRole: enumStudyRoles.STUDY_MANAGER, - // users: [userProfile.id], - // groups: [] - // }; - // await db.collections.roles_collection.insertOne(fullPermissionRole); - // await user.post('/trpc/data.createStudyField') - // .send({ - // studyId: response1.body.result.data.id, - // fieldName: 'Test Field 1', - // fieldId: '1', - // description: '', - // dataType: enumDataTypes.INTEGER - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '10' - // }] - // }); - // const response2 = await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0', - // tag: '1.0' - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '11' - // }] - // }); - // await db.collections?.roles_collection.updateOne({}, { - // $set: { - // studyRole: enumStudyRoles.STUDY_USER - // } - // }); - // await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.1', - // tag: '1.1' - // }); - // const response3 = await user.post('/trpc/study.setDataversionAsCurrent') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersionId: response2.body.result.data.id - // }); - // expect(response3.status).toBe(400); - // expect(response3.body.error.message).toBe(errorCodes.NO_PERMISSION_ERROR); - // }); - // test('Set to a previous data version (version id not exist)', async () => { - // const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); - // const request = admin.post('/trpc/study.createStudy'); - // request.attach('profile', filePath); - // request.field('name', 'Test Study'); - // request.field('description', ''); - // const response1 = await request; - // const fullPermissionRole = { - // id: 'full_permission_role_id', - // studyId: response1.body.result.data.id, - // name: 'Full Permissison Role', - // description: '', - // // data permissions for studyId - // dataPermissions: [{ - // fields: ['^1.*$'], - // dataProperties: { - // }, - // permission: parseInt('110', 2) - // }], - // studyRole: enumStudyRoles.STUDY_MANAGER, - // users: [userProfile.id], - // groups: [] - // }; - // await db.collections.roles_collection.insertOne(fullPermissionRole); - // await user.post('/trpc/data.createStudyField') - // .send({ - // studyId: response1.body.result.data.id, - // fieldName: 'Test Field 1', - // fieldId: '1', - // description: '', - // dataType: enumDataTypes.INTEGER - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '10' - // }] - // }); - // await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.0', - // tag: '1.0' - // }); - // await user.post('/trpc/data.uploadStudyData') - // .send({ - // studyId: response1.body.result.data.id, - // data: [{ - // fieldId: '1', - // value: '11' - // }] - // }); - // await user.post('/trpc/study.createDataVersion') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersion: '1.1', - // tag: '1.1' - // }); - // const response3 = await user.post('/trpc/study.setDataversionAsCurrent') - // .send({ - // studyId: response1.body.result.data.id, - // dataVersionId: 'random' - // }); - // expect(response3.status).toBe(400); - // expect(response3.body.error.message).toBe('Data version does not exist.'); - // }); + test('Create a new data version', async () => { + const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); + const request = admin.post('/trpc/study.createStudy') + .attach('profile', filePath) + .field('name', 'Test Study') + .field('description', ''); + const response1 = await request; + await db.collections.roles_collection.updateMany({}, { $set: { studyId: response1.body.result.data.id } }); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: response1.body.result.data.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + const response2 = await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0', + tag: '1.0' + }); + expect(response2.status).toBe(200); + expect(response2.body.result.data.version).toBe('1.0'); + const data = await db.collections.data_collection.findOne({}); + const field = await db.collections.field_dictionary_collection.findOne({}); + const study = await db.collections.studies_collection.findOne({}); + expect(field?.dataVersion).toBe(response2.body.result.data.id); + expect(data?.dataVersion).toBe(response2.body.result.data.id); + expect(study?.currentDataVersion).toBe(0); + expect(study?.dataVersions).toHaveLength(1); + expect(study?.dataVersions[0].id).toBe(response2.body.result.data.id); + // check cold storage + const colddata = await db.collections.colddata_collection.findOne({}); + expect(colddata?.dataVersion).toBe(response2.body.result.data.id); + }); + test('Create a new data version (no permission)', async () => { + const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); + const request = admin.post('/trpc/study.createStudy') + .attach('profile', filePath) + .field('name', 'Test Study') + .field('description', ''); + const response1 = await request; + const fullPermissionRole: IRole = { + id: 'full_permission_role_id', + studyId: response1.body.result.data.id, + name: 'Full Permissison Role', + description: '', + // data permissions for studyId + dataPermissions: [{ + fields: ['^1.*$'], + dataProperties: { + }, + permission: parseInt('110', 2) + }], + studyRole: enumStudyRoles.STUDY_MANAGER, + users: [authorisedUserProfile.id], + life: { + createdTime: 1591134065000, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.roles_collection.insertOne(fullPermissionRole); + await user.post('/trpc/data.createStudyField') + .send({ + studyId: response1.body.result.data.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await user.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + const response2 = await user.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0', + tag: '1.0' + }); + expect(response2.status).toBe(400); + expect(response2.body.error.message).toBe('Only admin or study manager can create a study version.'); + // clear database + await db.collections.roles_collection.deleteMany({ id: fullPermissionRole.id }); + }); + test('Create a new data version (version is not float string)', async () => { + const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); + const request = admin.post('/trpc/study.createStudy') + .attach('profile', filePath) + .field('name', 'Test Study') + .field('description', ''); + const response1 = await request; + const fullPermissionRole: IRole = { + id: 'full_permission_role_id', + studyId: response1.body.result.data.id, + name: 'Full Permissison Role', + description: '', + // data permissions for studyId + dataPermissions: [{ + fields: ['^1.*$'], + dataProperties: { + }, + permission: parseInt('110', 2) + }], + studyRole: enumStudyRoles.STUDY_MANAGER, + users: [authorisedUserProfile.id], + life: { + createdTime: 1591134065000, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.roles_collection.insertOne(fullPermissionRole); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: response1.body.result.data.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + const response2 = await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0.5', + tag: '1.0' + }); + expect(response2.status).toBe(400); + expect(response2.body.error.message).toBe('Version must be a float number.'); + // clear database + await db.collections.roles_collection.deleteMany({ id: fullPermissionRole.id }); + }); + test('Create a new data version (duplicate versions)', async () => { + const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); + const request = admin.post('/trpc/study.createStudy') + .attach('profile', filePath) + .field('name', 'Test Study') + .field('description', ''); + const response1 = await request; + const fullPermissionRole: IRole = { + id: 'full_permission_role_id', + studyId: response1.body.result.data.id, + name: 'Full Permissison Role', + description: '', + // data permissions for studyId + dataPermissions: [{ + fields: ['^1.*$'], + dataProperties: { + }, + permission: parseInt('110', 2) + }], + studyRole: enumStudyRoles.STUDY_MANAGER, + users: [authorisedUserProfile.id], + life: { + createdTime: 1591134065000, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.roles_collection.insertOne(fullPermissionRole); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: response1.body.result.data.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0', + tag: '1.0' + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '11' + }] + }); + const response2 = await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0', + tag: '1.0' + }); + expect(response2.status).toBe(400); + expect(response2.body.error.message).toBe('Version has been used.'); + // clear database + await db.collections.roles_collection.deleteMany({ id: fullPermissionRole.id }); + }); + test('Create a new data version (nothing update)', async () => { + const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); + const request = admin.post('/trpc/study.createStudy') + .attach('profile', filePath) + .field('name', 'Test Study') + .field('description', ''); + const response1 = await request; + const fullPermissionRole: IRole = { + id: 'full_permission_role_id', + studyId: response1.body.result.data.id, + name: 'Full Permissison Role', + description: '', + // data permissions for studyId + dataPermissions: [{ + fields: ['^1.*$'], + dataProperties: { + }, + permission: parseInt('110', 2) + }], + studyRole: enumStudyRoles.STUDY_MANAGER, + users: [authorisedUserProfile.id], + life: { + createdTime: 1591134065000, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.roles_collection.insertOne(fullPermissionRole); + const response = await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0', + tag: '1.0' + }); + expect(response.status).toBe(400); + expect(response.body.error.message).toBe('Nothing to update.'); + // clear database + await db.collections.roles_collection.deleteMany({ id: fullPermissionRole.id }); + }); + test('Set to a previous data version', async () => { + const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); + const request = admin.post('/trpc/study.createStudy') + .attach('profile', filePath) + .field('name', 'Test Study') + .field('description', ''); + const response1 = await request; + const fullPermissionRole: IRole = { + id: 'full_permission_role_id', + studyId: response1.body.result.data.id, + name: 'Full Permissison Role', + description: '', + // data permissions for studyId + dataPermissions: [{ + fields: ['^1.*$'], + dataProperties: { + }, + permission: parseInt('110', 2) + }], + studyRole: enumStudyRoles.STUDY_MANAGER, + users: [authorisedUserProfile.id], + life: { + createdTime: 1591134065000, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.roles_collection.insertOne(fullPermissionRole); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: response1.body.result.data.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + const response2 = await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0', + tag: '1.0' + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '11' + }] + }); + await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.1', + tag: '1.1' + }); + const response3 = await authorisedUser.post('/trpc/study.setDataversionAsCurrent') + .send({ + studyId: response1.body.result.data.id, + dataVersionId: response2.body.result.data.id + }); + expect(response3.status).toBe(200); + expect(response3.body.result.data.successful).toBe(true); + const study = await db.collections.studies_collection.findOne({}); + expect(study?.currentDataVersion).toBe(0); + expect(study?.dataVersions).toHaveLength(2); + // clear database + await db.collections.roles_collection.deleteMany({ id: fullPermissionRole.id }); + }); + test('Set to a previous data version (no permission)', async () => { + const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); + const request = admin.post('/trpc/study.createStudy') + .attach('profile', filePath) + .field('name', 'Test Study') + .field('description', ''); + const response1 = await request; + const fullPermissionRole: IRole = { + id: 'full_permission_role_id', + studyId: response1.body.result.data.id, + name: 'Full Permissison Role', + description: '', + // data permissions for studyId + dataPermissions: [{ + fields: ['^1.*$'], + dataProperties: { + }, + permission: parseInt('110', 2) + }], + studyRole: enumStudyRoles.STUDY_MANAGER, + users: [authorisedUserProfile.id], + life: { + createdTime: 1591134065000, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.roles_collection.insertOne(fullPermissionRole); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: response1.body.result.data.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + const response2 = await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0', + tag: '1.0' + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '11' + }] + }); + await db.collections?.roles_collection.updateOne({}, { + $set: { + studyRole: enumStudyRoles.STUDY_USER + } + }); + await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.1', + tag: '1.1' + }); + const response3 = await user.post('/trpc/study.setDataversionAsCurrent') + .send({ + studyId: response1.body.result.data.id, + dataVersionId: response2.body.result.data.id + }); + expect(response3.status).toBe(400); + expect(response3.body.error.message).toBe('Only admin or study manager can set a study version.'); + // clear database + await db.collections.roles_collection.deleteMany({ id: fullPermissionRole.id }); + }); + test('Set to a previous data version (version id not exist)', async () => { + const filePath = path.join(__dirname, '../filesForTests/dsi.jpeg'); + const request = admin.post('/trpc/study.createStudy') + .attach('profile', filePath) + .field('name', 'Test Study') + .field('description', ''); + const response1 = await request; + const fullPermissionRole: IRole = { + id: 'full_permission_role_id', + studyId: response1.body.result.data.id, + name: 'Full Permissison Role', + description: '', + // data permissions for studyId + dataPermissions: [{ + fields: ['^1.*$'], + dataProperties: { + }, + permission: parseInt('110', 2) + }], + studyRole: enumStudyRoles.STUDY_MANAGER, + users: [authorisedUserProfile.id], + life: { + createdTime: 1591134065000, + createdUser: 'admin', + deletedTime: null, + deletedUser: null + }, + metadata: {} + }; + await db.collections.roles_collection.insertOne(fullPermissionRole); + await authorisedUser.post('/trpc/data.createStudyField') + .send({ + studyId: response1.body.result.data.id, + fieldName: 'Test Field 1', + fieldId: '1', + description: '', + dataType: enumDataTypes.INTEGER + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '10' + }] + }); + await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.0', + tag: '1.0' + }); + await authorisedUser.post('/trpc/data.uploadStudyData') + .send({ + studyId: response1.body.result.data.id, + data: [{ + fieldId: '1', + value: '11' + }] + }); + await authorisedUser.post('/trpc/study.createDataVersion') + .send({ + studyId: response1.body.result.data.id, + dataVersion: '1.1', + tag: '1.1' + }); + const response3 = await authorisedUser.post('/trpc/study.setDataversionAsCurrent') + .send({ + studyId: response1.body.result.data.id, + dataVersionId: 'random' + }); + expect(response3.status).toBe(400); + expect(response3.body.error.message).toBe('Data version does not exist.'); + // clear database + await db.collections.roles_collection.deleteMany({ id: fullPermissionRole.id }); + }); }); } else { test(`${__filename.split(/[\\/]/).pop()} skipped because it requires Minio on Docker`, () => { diff --git a/packages/itmat-job-executor/config/config.sample.json b/packages/itmat-job-executor/config/config.sample.json index c5672ccd7..dd9989535 100644 --- a/packages/itmat-job-executor/config/config.sample.json +++ b/packages/itmat-job-executor/config/config.sample.json @@ -18,7 +18,9 @@ "pubkeys_collection": "PUBKEY_COLLECTION", "standardizations_collection": "STANDARDIZATION_COLLECTION", "configs_collection": "CONFIG_COLLECTION", - "drives_collection": "DRIVE_COLLECTION" + "drives_collection": "DRIVE_COLLECTION", + "colddata_collection": "COLDDATA_COLLECTION", + "cache_collection": "CACHE_COLLECTION" } }, "server": { diff --git a/packages/itmat-job-executor/src/database/database.ts b/packages/itmat-job-executor/src/database/database.ts index 40df0458d..a2b9938ab 100644 --- a/packages/itmat-job-executor/src/database/database.ts +++ b/packages/itmat-job-executor/src/database/database.ts @@ -1,4 +1,4 @@ -import type { IFile, IJobEntry, IProject, IQueryEntry, IDataEntry, IField } from '@itmat-broker/itmat-types'; +import type { IFile, IJobEntry, IProject, IQueryEntry, IField, IData } from '@itmat-broker/itmat-types'; import { Database as DatabaseBase, IDatabaseBaseConfig } from '@itmat-broker/itmat-commons'; import type { Collection } from 'mongodb'; @@ -17,7 +17,7 @@ export interface IDatabaseCollectionConfig { jobs_collection: Collection, field_dictionary_collection: Collection, files_collection: Collection, - data_collection: Collection, + data_collection: Collection, queries_collection: Collection, projects_collection: Collection } diff --git a/packages/itmat-setup/src/databaseSetup/collectionsAndIndexes.ts b/packages/itmat-setup/src/databaseSetup/collectionsAndIndexes.ts index 55a3628a4..b6fbbb660 100644 --- a/packages/itmat-setup/src/databaseSetup/collectionsAndIndexes.ts +++ b/packages/itmat-setup/src/databaseSetup/collectionsAndIndexes.ts @@ -54,7 +54,7 @@ const collections = { name: 'ROLE_COLLECTION', indexes: [ { key: { id: 1 }, unique: true }, - { key: { name: 1, studyId: 1, projectId: 1, deleted: 1 }, unique: true } + { key: { 'name': 1, 'studyId': 1, 'life.createdTime': 1 }, unique: true } ] }, field_dictionary_collection: { @@ -108,6 +108,20 @@ const collections = { indexes: [ { key: { id: 1 }, unique: true } ] + }, + colddata_collection: { + name: 'COLDDATA_COLLECTION', + indexes: [ + { key: { id: 1 }, unique: true }, + { key: { 'studyId': 1, 'fieldId': 1, 'life.createdTime': 1, 'life.deletedTime': 1, 'properties': 1 }, unique: true } + ] + }, + cache_collection: { + name: 'CACHE_COLLECTION', + indexes: [ + { key: { id: 1 }, unique: true }, + { key: { 'keyHash': 1, 'type': 1, 'life.createdTime': 1 }, unique: true } + ] } }; diff --git a/packages/itmat-types/src/types/cache.ts b/packages/itmat-types/src/types/cache.ts new file mode 100644 index 000000000..7fcd9a551 --- /dev/null +++ b/packages/itmat-types/src/types/cache.ts @@ -0,0 +1,19 @@ +import { IBase } from './base'; + +/** We store large data as json in minio as a cache. */ +export interface ICache extends IBase { + keyHash: string; + keys: Record, + uri: string; + status: enumCacheStatus; + type: enumCacheType +} + +export enum enumCacheType { + API = 'API', +} + +export enum enumCacheStatus { + OUTDATED = 'OUTDATED', + INUSE = 'INUSE' +} \ No newline at end of file diff --git a/packages/itmat-types/src/types/data.ts b/packages/itmat-types/src/types/data.ts index 8acbce300..99041a640 100644 --- a/packages/itmat-types/src/types/data.ts +++ b/packages/itmat-types/src/types/data.ts @@ -1,22 +1,5 @@ import { IBase } from './base'; -export interface IDataEntry { - id: string; - m_studyId: string; - m_subjectId: string; // patient Id - m_visitId: string; // visit Id - m_versionId: string | null; // data version Id - m_fieldId: string; - metadata?: { - add?: string[]; - remove?: string[]; - [key: string]: unknown; - }; - value: unknown; - uploadedBy?: string; - uploadedAt: number; -} - export interface IData extends IBase { studyId: string; fieldId: string; @@ -31,4 +14,17 @@ export interface IGroupedData { [key: string]: unknown } } +} + +export enum enumDataTransformationOperation { + GROUP = 'GROUP', + AFFINE = 'AFFINE', + LEAVEONE = 'LEAVEONE', + CONCAT = 'CONCAT', + DECONCAT = 'DECONCAT', + JOIN = 'JOIN', + DEGROUP = 'DEGROUP', + FILTER = 'FILTER', + FLATTEN = 'FLATTEN', + COUNT = 'COUNT' } \ No newline at end of file diff --git a/packages/itmat-types/src/types/index.ts b/packages/itmat-types/src/types/index.ts index 83fd94ff0..d9396f750 100644 --- a/packages/itmat-types/src/types/index.ts +++ b/packages/itmat-types/src/types/index.ts @@ -17,6 +17,7 @@ import * as ZodSchema from './zod'; import * as Utils from './utils'; import * as Drive from './drive'; import * as Permission from './permission'; +import * as Cache from './cache'; export * from './field'; export * from './file'; @@ -37,5 +38,6 @@ export * from './zod'; export * from './utils'; export * from './drive'; export * from './permission'; +export * from './cache'; -export const Types = { File, Job, Log, User, Organisation, Pubkey, Study, Query, Field, Data, Standardization, Common, Base, CoreErrors, Config, ZodSchema, Utils, Drive, Permission }; +export const Types = { File, Job, Log, User, Organisation, Pubkey, Study, Query, Field, Data, Standardization, Common, Base, CoreErrors, Config, ZodSchema, Utils, Drive, Permission, Cache }; diff --git a/packages/itmat-types/src/types/zod.ts b/packages/itmat-types/src/types/zod.ts index 315b251e1..9c5cb05fc 100644 --- a/packages/itmat-types/src/types/zod.ts +++ b/packages/itmat-types/src/types/zod.ts @@ -1,9 +1,14 @@ import { Readable } from 'stream'; import { z } from 'zod'; +const readStreamRefinement = z.instanceof(Readable).refine( + (val): val is Readable => val instanceof Readable +); + export const FileUploadSchema = z.object({ - createReadStream: z.function().returns(z.instanceof(Readable)), + createReadStream: z.function().returns(readStreamRefinement), filename: z.string(), mimetype: z.string(), - encoding: z.string() + encoding: z.string(), + fieldName: z.string() }); \ No newline at end of file