diff --git a/api/dao/hierarchy.py b/api/dao/hierarchy.py index e3bce623a..e64832bc0 100644 --- a/api/dao/hierarchy.py +++ b/api/dao/hierarchy.py @@ -144,7 +144,15 @@ def check_req(cont, req_k, req_v): """ Return True if container satisfies specific requirement. """ - cont_v = cont.get(req_k) + + # If looking at classification, translate to list rather than dictionary + if req_k == 'classification': + cont_v = [] + for v in cont.get('classification', {}).itervalues(): + cont_v.extend(v) + else: + cont_v = cont.get(req_k) + if cont_v: if isinstance(req_v, dict): for k,v in req_v.iteritems(): diff --git a/api/dao/liststorage.py b/api/dao/liststorage.py index ffff2ac4b..1d0d16e85 100644 --- a/api/dao/liststorage.py +++ b/api/dao/liststorage.py @@ -2,7 +2,6 @@ import bson.objectid import copy import datetime -import pymongo from ..web.errors import APIStorageException, APIConflictException, APINotFoundException from . import consistencychecker @@ -117,12 +116,23 @@ def _get_el(self, _id, query_params): if result and result.get(self.list_name): return result.get(self.list_name)[0] + def _update_session_compliance(self, _id): + if self.cont_name in ['sessions', 'acquisitions']: + if self.cont_name == 'sessions': + session_id = _id + else: + session_id = AcquisitionStorage().get_container(_id).get('session') + SessionStorage().recalc_session_compliance(session_id) + class FileStorage(ListStorage): def __init__(self, cont_name): super(FileStorage,self).__init__(cont_name, 'files', use_object_id=True) + def _create_jobs(self, container_before): + container_after = self.get_container(container_before['_id']) + return rules.create_jobs(config.db, container_before, container_after, self.cont_name) def _update_el(self, _id, query_params, payload, exclude_params): container_before = self.get_container(_id) @@ -147,11 +157,9 @@ def _update_el(self, _id, query_params, payload, exclude_params): '$set': mod_elem } - container_after = self.dbc.find_one_and_update(query, update, return_document=pymongo.collection.ReturnDocument.AFTER) - if not container_after: - raise APINotFoundException('Could not find and modify {} {}. file not updated'.format(_id, self.cont_name)) - - jobs_spawned = rules.create_jobs(config.db, container_before, container_after, self.cont_name) + self.dbc.find_one_and_update(query, update) + self._update_session_compliance(_id) + jobs_spawned = self._create_jobs(container_before) return { 'modified': 1, @@ -164,12 +172,7 @@ def _delete_el(self, _id, query_params): if f['name'] == query_params['name']: f['deleted'] = datetime.datetime.utcnow() result = self.dbc.update_one({'_id': _id}, {'$set': {'files': files, 'modified': datetime.datetime.utcnow()}}) - if self.cont_name in ['sessions', 'acquisitions']: - if self.cont_name == 'sessions': - session_id = _id - else: - session_id = AcquisitionStorage().get_container(_id).get('session') - SessionStorage().recalc_session_compliance(session_id) + self._update_session_compliance(_id) return result def _get_el(self, _id, query_params): @@ -217,9 +220,13 @@ def modify_info(self, _id, query_params, payload): else: update['$set']['modified'] = datetime.datetime.utcnow() - return self.dbc.update_one(query, update) + result = self.dbc.update_one(query, update) + self._update_session_compliance(_id) + return result + def modify_classification(self, _id, query_params, payload): + container_before = self.get_container(_id) update = {'$set': {'modified': datetime.datetime.utcnow()}} if self.use_object_id: @@ -265,6 +272,9 @@ def modify_classification(self, _id, query_params, payload): self.dbc.update_one(query, d_update) + self._update_session_compliance(_id) + self._create_jobs(container_before) + class StringListStorage(ListStorage): diff --git a/bin/database.py b/bin/database.py index 3863b7f99..e5c34b441 100755 --- a/bin/database.py +++ b/bin/database.py @@ -1425,6 +1425,25 @@ def adjust_type(r): config.db.project_rules.replace_one({'_id': rule['_id']}, rule) + return True + +def upgrade_templates_to_43(project): + """ + Set any measurements keys to classification + """ + + template = project['template'] + + for a in template.get('acquisitions', []): + for f in a.get('files', []): + if 'measurements' in f: + cl = f.pop('measurements') + f['classification'] = cl + + config.db.projects.update_one({'_id': project['_id']}, {'$set': {'template': template}}) + + return True + def upgrade_to_43(): """ Update classification for all files with existing measurements field @@ -1443,6 +1462,9 @@ def upgrade_to_43(): ]}) process_cursor(cursor, upgrade_rules_to_43) + cursor = config.db.projects.find({'template': {'$exists': True }}) + process_cursor(cursor, upgrade_templates_to_43) + diff --git a/tests/integration_tests/python/test_containers.py b/tests/integration_tests/python/test_containers.py index a5e72e4b2..f41a9e6fb 100644 --- a/tests/integration_tests/python/test_containers.py +++ b/tests/integration_tests/python/test_containers.py @@ -63,6 +63,8 @@ def test_project_template(data_builder, file_form, as_admin): assert as_admin.post('/acquisitions/' + acquisition_2 + '/files', files=file_form('non-compliant.txt')).ok assert as_admin.post('/acquisitions/' + acquisition_2 + '/files', files=file_form('compliant1.csv')).ok assert as_admin.post('/acquisitions/' + acquisition_2 + '/files', files=file_form('compliant2.csv')).ok + assert as_admin.post('/acquisitions/' + acquisition_2 + '/files/compliant1.csv/classification', json={'add': {'custom': ['diffusion']}}) + assert as_admin.post('/acquisitions/' + acquisition_2 + '/files/compliant2.csv/classification', json={'add': {'custom': ['diffusion']}}) # test the session before setting the template r = as_admin.get('/sessions/' + session) @@ -79,6 +81,7 @@ def test_project_template(data_builder, file_form, as_admin): 'files': [{ 'minimum': 2, 'mimetype': 'text/csv', + 'classification': 'diffusion' }] }] }) @@ -95,6 +98,7 @@ def test_project_template(data_builder, file_form, as_admin): 'files': [{ 'minimum': 2, 'mimetype': 'text/csv', + 'classification': 'diffusion' }] }] }) @@ -152,6 +156,8 @@ def satisfies_template(): assert as_admin.delete('/acquisitions/' + acquisition_2 + '/files/compliant2.csv').ok assert not satisfies_template() assert as_admin.post('/acquisitions/' + acquisition_2 + '/files', files=file_form('compliant2.csv')).ok + assert not satisfies_template() + assert as_admin.post('/acquisitions/' + acquisition_2 + '/files/compliant2.csv/classification', json={'add': {'custom': ['diffusion']}}) # acquisitions.minimum assert satisfies_template() diff --git a/tests/integration_tests/python/test_upgrades.py b/tests/integration_tests/python/test_upgrades.py index 4d0b51a05..055a98b4e 100644 --- a/tests/integration_tests/python/test_upgrades.py +++ b/tests/integration_tests/python/test_upgrades.py @@ -2,6 +2,7 @@ import sys import bson +import copy import pytest @@ -32,3 +33,118 @@ def test_42(data_builder, api_db, as_admin, database): # Verify archived was removed when false as well session_data = as_admin.get('/sessions/' + session2).json() assert 'archived' not in session_data + + +def test_43(data_builder, randstr, api_db, as_admin, database, file_form): + + # Set up files with measurements + + assert True + + containers = [ + ('collections', data_builder.create_collection()), + ('projects', data_builder.create_project()), + ('sessions', data_builder.create_session()), + ('acquisitions', data_builder.create_acquisition()) + ] + + for c in containers: + assert as_admin.post('/{}/{}/files'.format(c[0], c[1]), files=file_form('test.csv')).ok + assert as_admin.post('/{}/{}/files'.format(c[0], c[1]), files=file_form('test2.csv')).ok + api_db[c[0]].update_one({'_id': bson.ObjectId(c[1])}, + {'$set': { # Mangoes ... + 'files.0.measurements': ['diffusion', 'functional'], + 'files.1.measurements': ['diffusion', 'functional'] + }}) + + + # Set up rules referencing measurements + + rule = { + 'all' : [ + {'type' : 'file.measurement', 'value' : 'diffusion'}, + {'type' : 'container.has-measurement', 'value' : 'tests', 'regex': True} + ], + 'any' : [ + {'type' : 'file.measurement', 'value' : 'diffusion'}, + {'type' : 'container.has-measurement', 'value' : 'tests', 'regex': True} + ], + 'name' : 'Run dcm2niix on dicom', + 'alg' : 'dcm2niix', + 'project_id' : 'site' + } + + api_db.project_rules.insert(copy.deepcopy(rule)) + api_db.project_rules.insert(copy.deepcopy(rule)) + + + # Set up session templates referencing measurements + + t_project1 = data_builder.create_project() + t_project2 = data_builder.create_project() + + template = { + 'session': {'subject': {'code': '^compliant$'}}, + 'acquisitions': [{ + 'minimum': 1, + 'files': [{ + 'minimum': 2, + 'measurements': 'diffusion' + }] + }] + } + + assert as_admin.post('/projects/' + t_project1 + '/template', json=template).ok + assert as_admin.post('/projects/' + t_project2 + '/template', json=template).ok + + + ### RUN UPGRADE + + database.upgrade_to_43() + + #### + + + # Ensure files were updated + for c in containers: + files = as_admin.get('/{}/{}'.format(c[0], c[1])).json()['files'] + for f in files: + assert f['classification'] == {'Custom': ['diffusion', 'functional']} + + + # Ensure rules were updated + rule_after = { + 'all' : [ + {'type' : 'file.classification', 'value' : 'diffusion'}, + {'type' : 'container.has-classification', 'value' : 'tests', 'regex': True} + ], + 'any' : [ + {'type' : 'file.classification', 'value' : 'diffusion'}, + {'type' : 'container.has-classification', 'value' : 'tests', 'regex': True} + ], + 'name' : 'Run dcm2niix on dicom', + 'alg' : 'dcm2niix' + } + + rules = as_admin.get('/site/rules').json() + for r in rules: + r.pop('_id') + assert r == rule_after + + + # Ensure templates were updated + template_after = { + 'session': {'subject': {'code': '^compliant$'}}, + 'acquisitions': [{ + 'minimum': 1, + 'files': [{ + 'minimum': 2, + 'classification': 'diffusion' + }] + }] + } + for p in [t_project1, t_project2]: + assert as_admin.get('/projects/' + p).json()['template'] == template_after + + +