Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Logging cleanup #1378

Merged
merged 6 commits into from
Jul 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
149 changes: 91 additions & 58 deletions caiman/base/movies.py

Large diffs are not rendered by default.

63 changes: 29 additions & 34 deletions caiman/base/rois.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,7 @@ def nf_match_neurons_in_binary_masks(masks_gt,
indices false pos

"""
logger = logging.getLogger("caiman")

_, d1, d2 = np.shape(masks_gt)
dims = d1, d2
Expand Down Expand Up @@ -252,7 +253,7 @@ def nf_match_neurons_in_binary_masks(masks_gt,
performance['precision'] = TP / (TP + FP)
performance['accuracy'] = (TP + TN) / (TP + FP + FN + TN)
performance['f1_score'] = 2 * TP / (2 * TP + FP + FN)
logging.debug(performance)
logger.debug(performance)

idx_tp = np.where(np.array(costs) < thresh_cost)[0]
idx_tp_ben = matches[0][idx_tp] # ground truth
Expand Down Expand Up @@ -297,8 +298,8 @@ def nf_match_neurons_in_binary_masks(masks_gt,
pl.show()
pl.axis('off')
except Exception as e:
logging.warning("not able to plot precision recall: graphics failure")
logging.warning(e)
logger.warning("not able to plot precision recall: graphics failure")
logger.warning(e)
return idx_tp_gt, idx_tp_comp, idx_fn_gt, idx_fp_comp, performance


Expand Down Expand Up @@ -392,11 +393,7 @@ def register_ROIs(A1,
ROIs from session 2 aligned to session 1

"""

# if 'csc_matrix' not in str(type(A1)):
# A1 = scipy.sparse.csc_matrix(A1)
# if 'csc_matrix' not in str(type(A2)):
# A2 = scipy.sparse.csc_matrix(A2)
logger = logging.getLogger("caiman")

if 'ndarray' not in str(type(A1)):
A1 = A1.toarray()
Expand Down Expand Up @@ -488,7 +485,7 @@ def register_ROIs(A1,
performance['precision'] = TP / (TP + FP)
performance['accuracy'] = (TP + TN) / (TP + FP + FN + TN)
performance['f1_score'] = 2 * TP / (2 * TP + FP + FN)
logging.info(performance)
logger.info(performance)

if plot_results:
if Cn is None:
Expand Down Expand Up @@ -521,11 +518,6 @@ def register_ROIs(A1,
pl.title('Mismatches')
pl.axis('off')


# except Exception as e:
# logging.warning("not able to plot precision recall usually because we are on travis")
# logging.warning(e)

return matched_ROIs1, matched_ROIs2, non_matched1, non_matched2, performance, A2


Expand Down Expand Up @@ -586,6 +578,7 @@ def register_multisession(A,
by component k in A_union

"""
logger = logging.getLogger("caiman")

n_sessions = len(A)
templates = list(templates)
Expand Down Expand Up @@ -615,7 +608,7 @@ def register_multisession(A,
enclosed_thr=enclosed_thr)

mat_sess, mat_un, nm_sess, nm_un, _, A2 = reg_results
logging.info(len(mat_sess))
logger.info(len(mat_sess))
A_union = A2.copy()
A_union[:, mat_un] = A[sess][:, mat_sess]
A_union = np.concatenate((A_union.toarray(), A[sess][:, nm_sess]), axis=1)
Expand Down Expand Up @@ -763,6 +756,7 @@ def distance_masks(M_s:list, cm_s: list[list], max_dist: float, enclosed_thr: Op

def find_matches(D_s, print_assignment: bool = False) -> tuple[list, list]:
# todo todocument
logger = logging.getLogger("caiman")

matches = []
costs = []
Expand All @@ -771,7 +765,7 @@ def find_matches(D_s, print_assignment: bool = False) -> tuple[list, list]:
# we make a copy not to set changes in the original
DD = D.copy()
if np.sum(np.where(np.isnan(DD))) > 0:
logging.error('Exception: Distance Matrix contains invalid value NaN')
logger.error('Exception: Distance Matrix contains invalid value NaN')
raise Exception('Distance Matrix contains invalid value NaN')

# we do the hungarian
Expand All @@ -784,10 +778,10 @@ def find_matches(D_s, print_assignment: bool = False) -> tuple[list, list]:
for row, column in indexes2:
value = DD[row, column]
if print_assignment:
logging.debug(('(%d, %d) -> %f' % (row, column, value)))
logger.debug(f'({row}, {column}) -> {value}')
total.append(value)
logging.debug(('FOV: %d, shape: %d,%d total cost: %f' % (ii, DD.shape[0], DD.shape[1], np.sum(total))))
logging.debug((time.time() - t_start))
logger.debug(f'FOV: {ii}, shape: {DD.shape[0]},{DD.shape[1]} total cost: {np.sum(total)}')
logger.debug(time.time() - t_start)
costs.append(total)
# send back the results in the format we want
return matches, costs
Expand Down Expand Up @@ -818,6 +812,8 @@ def link_neurons(matches: list[list[tuple]],
neurons: list of arrays representing the indices of neurons in each FOV

"""
logger = logging.getLogger("caiman")

if min_FOV_present is None:
min_FOV_present = len(matches)

Expand All @@ -842,7 +838,7 @@ def link_neurons(matches: list[list[tuple]],
neurons.append(neuron)

neurons = np.array(neurons).T
logging.info(f'num_neurons: {num_neurons}')
logger.info(f'num_neurons: {num_neurons}')
return neurons


Expand Down Expand Up @@ -917,6 +913,8 @@ def nf_read_roi(fileobj) -> np.ndarray:

Adapted from https://gist.github.com/luispedro/3437255
'''
logger = logging.getLogger("caiman")

# This is based on:
# http://rsbweb.nih.gov/ij/developer/source/ij/io/RoiDecoder.java.html
# http://rsbweb.nih.gov/ij/developer/source/ij/io/RoiEncoder.java.html
Expand Down Expand Up @@ -957,8 +955,7 @@ def getfloat():

magic = fileobj.read(4)
if magic != 'Iout':
# raise IOError('Magic number not found')
logging.warning('Magic number not found')
logger.warning('Magic number not found')
version = get16()

# It seems that the roi type field occupies 2 Bytes, but only one is used
Expand All @@ -967,13 +964,6 @@ def getfloat():
# Discard second Byte:
get8()

# if not (0 <= roi_type < 11):
# logging.error(('roireader: ROI type %s not supported' % roi_type))
#
# if roi_type != 7:
#
# logging.error(('roireader: ROI type %s not supported (!= 7)' % roi_type))

top = get16()
left = get16()
bottom = get16()
Expand Down Expand Up @@ -1050,6 +1040,8 @@ def nf_merge_roi_zip(fnames: list[str], idx_to_keep: list[list], new_fold: str):
name of the output zip file (without .zip extension)

"""
logger = logging.getLogger("caiman")

folders_rois = []
files_to_keep = []
# unzip the files and keep only the ones that are requested
Expand All @@ -1058,7 +1050,7 @@ def nf_merge_roi_zip(fnames: list[str], idx_to_keep: list[list], new_fold: str):
folders_rois.append(dirpath)
with zipfile.ZipFile(fn) as zf:
name_rois = zf.namelist()
logging.debug(len(name_rois))
logger.debug(len(name_rois))
zip_ref = zipfile.ZipFile(fn, 'r')
zip_ref.extractall(dirpath)
files_to_keep.append([os.path.join(dirpath, ff) for ff in np.array(name_rois)[idx]])
Expand Down Expand Up @@ -1109,6 +1101,8 @@ def extract_binary_masks_blob(A,
neg_examples:

"""
logger = logging.getLogger("caiman")

params = cv2.SimpleBlobDetector_Params()
params.minCircularity = minCircularity
params.minInertiaRatio = minInertiaRatio
Expand Down Expand Up @@ -1136,7 +1130,7 @@ def extract_binary_masks_blob(A,
neg_examples = []

for count, comp in enumerate(A.tocsc()[:].T):
logging.debug(count)
logger.debug(count)
comp_d = np.array(comp.todense())
gray_image = np.reshape(comp_d, dims, order='F')
gray_image = (gray_image - np.min(gray_image)) / \
Expand All @@ -1161,7 +1155,7 @@ def extract_binary_masks_blob(A,
edges = (label_objects == (1 + idx_largest))
edges = scipy.ndimage.binary_fill_holes(edges)
else:
logging.warning('empty component')
logger.warning('empty component')
edges = np.zeros_like(edges)

masks_ws.append(edges)
Expand Down Expand Up @@ -1265,14 +1259,15 @@ def detect_duplicates_and_subsets(binary_masks,
dist_thr: float = 0.1,
min_dist=10,
thresh_subset: float = 0.8):
logger = logging.getLogger("caiman")

cm = [scipy.ndimage.center_of_mass(mm) for mm in binary_masks]
sp_rois = scipy.sparse.csc_matrix(np.reshape(binary_masks, (binary_masks.shape[0], -1)).T)
D = distance_masks([sp_rois, sp_rois], [cm, cm], min_dist)[0]
np.fill_diagonal(D, 1)
overlap = sp_rois.T.dot(sp_rois).toarray()
sz = np.array(sp_rois.sum(0))
logging.info(sz.shape)
logger.info(sz.shape)
overlap = overlap / sz.T
np.fill_diagonal(overlap, 0)
# pairs of duplicate indices
Expand All @@ -1287,7 +1282,7 @@ def detect_duplicates_and_subsets(binary_masks,
metric = r_values.squeeze()
else:
metric = sz.squeeze()
logging.debug('***** USING MAX AREA BY DEFAULT')
logger.debug('***** USING MAX AREA BY DEFAULT')

overlap_tmp = overlap.copy() >= thresh_subset
overlap_tmp = overlap_tmp * metric[:, None]
Expand Down
17 changes: 10 additions & 7 deletions caiman/base/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,17 +173,19 @@ def save(self,
Exception 'Extension Unknown'

"""
logger = logging.getLogger("caiman")

file_name = caiman.paths.fn_relocated(file_name)
name, extension = os.path.splitext(file_name)[:2] # name is only used by the memmap saver
extension = extension.lower()
logging.debug("Parsing extension " + str(extension))
logger.debug(f"Parsing extension {extension}")

if extension in ['.tif', '.tiff', '.btf']:
with tifffile.TiffWriter(file_name, bigtiff=bigtiff, imagej=imagej) as tif:
if "%4d%02d%02d" % tuple(map(int, tifffile.__version__.split('.'))) >= '20200813':
def foo(i):
if i % 200 == 0:
logging.debug(str(i) + ' frames saved')
logger.debug(f'{i} frames saved')
curfr = self[i].copy()
if to32 and not ('float32' in str(self.dtype)):
curfr = curfr.astype(np.float32)
Expand All @@ -196,7 +198,7 @@ def foo(i):
else:
for i in range(self.shape[0]):
if i % 200 == 0:
logging.debug(str(i) + ' frames saved')
logger.debug(f'{i} frames saved')
curfr = self[i].copy()
if to32 and not ('float32' in str(self.dtype)):
curfr = curfr.astype(np.float32)
Expand Down Expand Up @@ -295,9 +297,9 @@ def foo(i):
try:
dset.attrs["file_name"] = [a.encode('utf8') for a in self.file_name]
except:
logging.warning('No file saved')
logger.warning('No file saved')
if self.meta_data[0] is not None:
logging.debug("Metadata for saved file: " + str(self.meta_data))
logger.debug("Metadata for saved file: " + str(self.meta_data))
dset.attrs["meta_data"] = cpk.dumps(self.meta_data)
return file_name
elif extension == '.mmap':
Expand Down Expand Up @@ -370,7 +372,7 @@ def foo(i):
return file_name

else:
logging.error("Extension " + str(extension) + " unknown")
logger.error(f"Extension {extension} unknown")
raise Exception('Extension Unknown')


Expand All @@ -382,6 +384,7 @@ def concatenate(*args, **kwargs):
mov: XMovie object
"""
# todo: todocument return
logger = logging.getLogger("caiman")

frRef = None
for arg in args:
Expand All @@ -400,6 +403,6 @@ def concatenate(*args, **kwargs):
try:
return obj.__class__(np.concatenate(*args, **kwargs), **obj.__dict__)
except:
logging.debug('no meta information passed')
logger.debug('no meta information passed')
return obj.__class__(np.concatenate(*args, **kwargs))

6 changes: 4 additions & 2 deletions caiman/base/traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,18 +58,20 @@ def computeDFF(self, window_sec=5, minQuantile=20):
ValueError "All traces must be positive"
ValueError "The window must be shorter than the total length"
"""
logger = logging.getLogger("caiman")

if np.min(self) <= 0:
raise ValueError("All traces must be positive")

T, _ = self.shape
window = int(window_sec * self.fr)
logging.debug(window)
logger.debug(window)
if window >= T:
raise ValueError("The window must be shorter than the total length")

tracesDFF = []
for tr in self.T:
logging.debug(f"TR Shape is {tr.shape}")
logger.debug(f"TR Shape is {tr.shape}")
traceBL = [np.percentile(tr[i:i + window], minQuantile) for i in range(1, len(tr) - window)]
missing = np.percentile(tr[-window:], minQuantile)
missing = np.repeat(missing, window + 1)
Expand Down
6 changes: 3 additions & 3 deletions caiman/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@
import time
from typing import Any, Optional, Union

logger = logging.getLogger(__name__)


def extract_patch_coordinates(dims: tuple,
rf: Union[list, tuple],
stride: Union[list[int], tuple],
Expand Down Expand Up @@ -107,6 +104,7 @@ def start_server(ipcluster: str = "ipcluster", ncpus: int = None) -> None:
ipcluster binary file name; requires 4 path separators on Windows. ipcluster="C:\\\\Anaconda3\\\\Scripts\\\\ipcluster.exe"
Default: "ipcluster"
"""
logger = logging.getLogger("caiman")
logger.info("Starting cluster...")
if ncpus is None:
ncpus = psutil.cpu_count()
Expand Down Expand Up @@ -142,6 +140,7 @@ def stop_server(ipcluster: str = 'ipcluster', pdir: str = None, profile: str = N
dview: Undocumented

"""
logger = logging.getLogger("caiman")
if 'multiprocessing' in str(type(dview)):
dview.terminate()
else:
Expand Down Expand Up @@ -216,6 +215,7 @@ def setup_cluster(backend:str = 'multiprocessing',
number of workers in dview. None means single core mode in use.
"""

logger = logging.getLogger("caiman")
sys.stdout.flush() # XXX Unsure why we do this
if n_processes is None:
n_processes = np.maximum(int(psutil.cpu_count() - 1), 1)
Expand Down
Loading