Skip to content

Commit

Permalink
cnmf/hdf5: Improve handling of serialisation of NoneType. Also fix so…
Browse files Browse the repository at this point in the history
…me typos.
  • Loading branch information
pgunn committed May 20, 2022
1 parent 00d32b6 commit eb4ba34
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 7 deletions.
10 changes: 5 additions & 5 deletions caiman/source_extraction/cnmf/estimates.py
Original file line number Diff line number Diff line change
Expand Up @@ -1421,7 +1421,7 @@ def remove_small_large_neurons(self, min_size_neuro, max_size_neuro,
indeces of components with size within the acceptable range
'''
if self.A_thr is None:
raise Exception('You need to compute thresolded components before calling remove_duplicates: use the threshold_components method')
raise Exception('You need to compute thresholded components before calling remove_duplicates: use the threshold_components method')

A_gt_thr_bin = self.A_thr.toarray() > 0
size_neurons_gt = A_gt_thr_bin.sum(0)
Expand Down Expand Up @@ -1451,7 +1451,7 @@ def remove_duplicates(self, predictions=None, r_values=None, dist_thr=0.1,
plot_duplicates
'''
if self.A_thr is None:
raise Exception('You need to compute thresolded components before calling remove_duplicates: use the threshold_components method')
raise Exception('You need to compute thresholded components before calling remove_duplicates: use the threshold_components method')

A_gt_thr_bin = (self.A_thr.toarray() > 0).reshape([self.dims[0], self.dims[1], -1], order='F').transpose([2, 0, 1]) * 1.

Expand Down Expand Up @@ -1492,7 +1492,7 @@ def masks_2_neurofinder(self, dataset_name):
"""
if self.A_thr is None:
raise Exception(
'You need to compute thresolded components before calling this method: use the threshold_components method')
'You need to compute thresholded components before calling this method: use the threshold_components method')
bin_masks = self.A_thr.reshape([self.dims[0], self.dims[1], -1], order='F').transpose([2, 0, 1])
return nf_masks_to_neurof_dict(bin_masks, dataset_name)

Expand Down Expand Up @@ -1711,10 +1711,10 @@ def compare_components(estimate_gt, estimate_cmp, Cn=None, thresh_cost=.8, min_
labels=['GT', 'CMP'], plot_results=False):
if estimate_gt.A_thr is None:
raise Exception(
'You need to compute thresolded components for first argument before calling remove_duplicates: use the threshold_components method')
'You need to compute thresholded components for first argument before calling remove_duplicates: use the threshold_components method')
if estimate_cmp.A_thr is None:
raise Exception(
'You need to compute thresolded components for second argument before calling remove_duplicates: use the threshold_components method')
'You need to compute thresholded components for second argument before calling remove_duplicates: use the threshold_components method')

if plot_results:
plt.figure(figsize=(20, 10))
Expand Down
2 changes: 1 addition & 1 deletion caiman/source_extraction/cnmf/spatial.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ def construct_ellipse_parallel(pars):
return np.sqrt(np.sum([old_div((dist_cm * V[:, k]) ** 2, dkk[k]) for k in range(len(dkk))], 0)) <= dist

def threshold_components(A, dims, medw=None, thr_method='max', maxthr=0.1, nrgthr=0.9999, extract_cc=True,
se=None, ss=None, dview=None):
se=None, ss=None, dview=None) -> np.ndarray:
"""
Post-processing of spatial components which includes the following steps
Expand Down
7 changes: 6 additions & 1 deletion caiman/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -544,6 +544,10 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic
Starting with Caiman 1.9.9 we started saving strings as attributes rather than independent datasets,
which gets us a better syntax and less damage to the strings, at the cost of scanning properly for them
being a little more involved. In future versions of Caiman we may store all scalars as attributes.
There's some special casing here that should be solved in a more general way; anything serialised into
hdf5 and then deserialised should probably go back through the class constructor, and revalidated
so all the fields end up with appropriate data types.
'''

ans:Dict = {}
Expand All @@ -560,7 +564,6 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic
ans[key] = item[()]

elif key in ['dims', 'medw', 'sigma_smooth_snmf', 'dxy', 'max_shifts', 'strides', 'overlaps']:

if isinstance(item[()], np.ndarray):
ans[key] = tuple(item[()])
else:
Expand All @@ -570,6 +573,8 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> Dic
ans[key] = bool(item[()])
else:
ans[key] = item[()]
if isinstance(ans[key], bytes) and ans[key] == b'NoneType':
ans[key] = None

elif isinstance(item, h5py._hl.group.Group):
if key in ('A', 'W', 'Ab', 'downscale_matrix', 'upscale_matrix'):
Expand Down

0 comments on commit eb4ba34

Please sign in to comment.