Skip to content

Commit

Permalink
Clean up tests, add db upgrade test
Browse files Browse the repository at this point in the history
  • Loading branch information
nagem committed Mar 13, 2018
1 parent 19e1272 commit 64cc6f7
Show file tree
Hide file tree
Showing 5 changed files with 176 additions and 14 deletions.
10 changes: 9 additions & 1 deletion api/dao/hierarchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,15 @@ def check_req(cont, req_k, req_v):
"""
Return True if container satisfies specific requirement.
"""
cont_v = cont.get(req_k)

# If looking at classification, translate to list rather than dictionary
if req_k == 'classification':
cont_v = []
for v in cont.get('classification', {}).itervalues():
cont_v.extend(v)
else:
cont_v = cont.get(req_k)

if cont_v:
if isinstance(req_v, dict):
for k,v in req_v.iteritems():
Expand Down
36 changes: 23 additions & 13 deletions api/dao/liststorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import bson.objectid
import copy
import datetime
import pymongo

from ..web.errors import APIStorageException, APIConflictException, APINotFoundException
from . import consistencychecker
Expand Down Expand Up @@ -117,12 +116,23 @@ def _get_el(self, _id, query_params):
if result and result.get(self.list_name):
return result.get(self.list_name)[0]

def _update_session_compliance(self, _id):
if self.cont_name in ['sessions', 'acquisitions']:
if self.cont_name == 'sessions':
session_id = _id
else:
session_id = AcquisitionStorage().get_container(_id).get('session')
SessionStorage().recalc_session_compliance(session_id)


class FileStorage(ListStorage):

def __init__(self, cont_name):
super(FileStorage,self).__init__(cont_name, 'files', use_object_id=True)

def _create_jobs(self, container_before):
container_after = self.get_container(container_before['_id'])
return rules.create_jobs(config.db, container_before, container_after, self.cont_name)

def _update_el(self, _id, query_params, payload, exclude_params):
container_before = self.get_container(_id)
Expand All @@ -147,11 +157,9 @@ def _update_el(self, _id, query_params, payload, exclude_params):
'$set': mod_elem
}

container_after = self.dbc.find_one_and_update(query, update, return_document=pymongo.collection.ReturnDocument.AFTER)
if not container_after:
raise APINotFoundException('Could not find and modify {} {}. file not updated'.format(_id, self.cont_name))

jobs_spawned = rules.create_jobs(config.db, container_before, container_after, self.cont_name)
self.dbc.find_one_and_update(query, update)
self._update_session_compliance(_id)
jobs_spawned = self._create_jobs(container_before)

return {
'modified': 1,
Expand All @@ -164,12 +172,7 @@ def _delete_el(self, _id, query_params):
if f['name'] == query_params['name']:
f['deleted'] = datetime.datetime.utcnow()
result = self.dbc.update_one({'_id': _id}, {'$set': {'files': files, 'modified': datetime.datetime.utcnow()}})
if self.cont_name in ['sessions', 'acquisitions']:
if self.cont_name == 'sessions':
session_id = _id
else:
session_id = AcquisitionStorage().get_container(_id).get('session')
SessionStorage().recalc_session_compliance(session_id)
self._update_session_compliance(_id)
return result

def _get_el(self, _id, query_params):
Expand Down Expand Up @@ -217,9 +220,13 @@ def modify_info(self, _id, query_params, payload):
else:
update['$set']['modified'] = datetime.datetime.utcnow()

return self.dbc.update_one(query, update)
result = self.dbc.update_one(query, update)
self._update_session_compliance(_id)
return result


def modify_classification(self, _id, query_params, payload):
container_before = self.get_container(_id)
update = {'$set': {'modified': datetime.datetime.utcnow()}}

if self.use_object_id:
Expand Down Expand Up @@ -265,6 +272,9 @@ def modify_classification(self, _id, query_params, payload):

self.dbc.update_one(query, d_update)

self._update_session_compliance(_id)
self._create_jobs(container_before)



class StringListStorage(ListStorage):
Expand Down
22 changes: 22 additions & 0 deletions bin/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -1425,6 +1425,25 @@ def adjust_type(r):

config.db.project_rules.replace_one({'_id': rule['_id']}, rule)

return True

def upgrade_templates_to_43(project):
"""
Set any measurements keys to classification
"""

template = project['template']

for a in template.get('acquisitions', []):
for f in a.get('files', []):
if 'measurements' in f:
cl = f.pop('measurements')
f['classification'] = cl

config.db.projects.update_one({'_id': project['_id']}, {'$set': {'template': template}})

return True

def upgrade_to_43():
"""
Update classification for all files with existing measurements field
Expand All @@ -1443,6 +1462,9 @@ def upgrade_to_43():
]})
process_cursor(cursor, upgrade_rules_to_43)

cursor = config.db.projects.find({'template': {'$exists': True }})
process_cursor(cursor, upgrade_templates_to_43)




Expand Down
6 changes: 6 additions & 0 deletions tests/integration_tests/python/test_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ def test_project_template(data_builder, file_form, as_admin):
assert as_admin.post('/acquisitions/' + acquisition_2 + '/files', files=file_form('non-compliant.txt')).ok
assert as_admin.post('/acquisitions/' + acquisition_2 + '/files', files=file_form('compliant1.csv')).ok
assert as_admin.post('/acquisitions/' + acquisition_2 + '/files', files=file_form('compliant2.csv')).ok
assert as_admin.post('/acquisitions/' + acquisition_2 + '/files/compliant1.csv/classification', json={'add': {'custom': ['diffusion']}})
assert as_admin.post('/acquisitions/' + acquisition_2 + '/files/compliant2.csv/classification', json={'add': {'custom': ['diffusion']}})

# test the session before setting the template
r = as_admin.get('/sessions/' + session)
Expand All @@ -79,6 +81,7 @@ def test_project_template(data_builder, file_form, as_admin):
'files': [{
'minimum': 2,
'mimetype': 'text/csv',
'classification': 'diffusion'
}]
}]
})
Expand All @@ -95,6 +98,7 @@ def test_project_template(data_builder, file_form, as_admin):
'files': [{
'minimum': 2,
'mimetype': 'text/csv',
'classification': 'diffusion'
}]
}]
})
Expand Down Expand Up @@ -152,6 +156,8 @@ def satisfies_template():
assert as_admin.delete('/acquisitions/' + acquisition_2 + '/files/compliant2.csv').ok
assert not satisfies_template()
assert as_admin.post('/acquisitions/' + acquisition_2 + '/files', files=file_form('compliant2.csv')).ok
assert not satisfies_template()
assert as_admin.post('/acquisitions/' + acquisition_2 + '/files/compliant2.csv/classification', json={'add': {'custom': ['diffusion']}})

# acquisitions.minimum
assert satisfies_template()
Expand Down
116 changes: 116 additions & 0 deletions tests/integration_tests/python/test_upgrades.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import sys

import bson
import copy
import pytest


Expand Down Expand Up @@ -32,3 +33,118 @@ def test_42(data_builder, api_db, as_admin, database):
# Verify archived was removed when false as well
session_data = as_admin.get('/sessions/' + session2).json()
assert 'archived' not in session_data


def test_43(data_builder, randstr, api_db, as_admin, database, file_form):

# Set up files with measurements

assert True

containers = [
('collections', data_builder.create_collection()),
('projects', data_builder.create_project()),
('sessions', data_builder.create_session()),
('acquisitions', data_builder.create_acquisition())
]

for c in containers:
assert as_admin.post('/{}/{}/files'.format(c[0], c[1]), files=file_form('test.csv')).ok
assert as_admin.post('/{}/{}/files'.format(c[0], c[1]), files=file_form('test2.csv')).ok
api_db[c[0]].update_one({'_id': bson.ObjectId(c[1])},
{'$set': { # Mangoes ...
'files.0.measurements': ['diffusion', 'functional'],
'files.1.measurements': ['diffusion', 'functional']
}})


# Set up rules referencing measurements

rule = {
'all' : [
{'type' : 'file.measurement', 'value' : 'diffusion'},
{'type' : 'container.has-measurement', 'value' : 'tests', 'regex': True}
],
'any' : [
{'type' : 'file.measurement', 'value' : 'diffusion'},
{'type' : 'container.has-measurement', 'value' : 'tests', 'regex': True}
],
'name' : 'Run dcm2niix on dicom',
'alg' : 'dcm2niix',
'project_id' : 'site'
}

api_db.project_rules.insert(copy.deepcopy(rule))
api_db.project_rules.insert(copy.deepcopy(rule))


# Set up session templates referencing measurements

t_project1 = data_builder.create_project()
t_project2 = data_builder.create_project()

template = {
'session': {'subject': {'code': '^compliant$'}},
'acquisitions': [{
'minimum': 1,
'files': [{
'minimum': 2,
'measurements': 'diffusion'
}]
}]
}

assert as_admin.post('/projects/' + t_project1 + '/template', json=template).ok
assert as_admin.post('/projects/' + t_project2 + '/template', json=template).ok


### RUN UPGRADE

database.upgrade_to_43()

####


# Ensure files were updated
for c in containers:
files = as_admin.get('/{}/{}'.format(c[0], c[1])).json()['files']
for f in files:
assert f['classification'] == {'Custom': ['diffusion', 'functional']}


# Ensure rules were updated
rule_after = {
'all' : [
{'type' : 'file.classification', 'value' : 'diffusion'},
{'type' : 'container.has-classification', 'value' : 'tests', 'regex': True}
],
'any' : [
{'type' : 'file.classification', 'value' : 'diffusion'},
{'type' : 'container.has-classification', 'value' : 'tests', 'regex': True}
],
'name' : 'Run dcm2niix on dicom',
'alg' : 'dcm2niix'
}

rules = as_admin.get('/site/rules').json()
for r in rules:
r.pop('_id')
assert r == rule_after


# Ensure templates were updated
template_after = {
'session': {'subject': {'code': '^compliant$'}},
'acquisitions': [{
'minimum': 1,
'files': [{
'minimum': 2,
'classification': 'diffusion'
}]
}]
}
for p in [t_project1, t_project2]:
assert as_admin.get('/projects/' + p).json()['template'] == template_after



0 comments on commit 64cc6f7

Please sign in to comment.