Skip to content

Commit

Permalink
Merge branch 'staging' into caiman_per_plane_processing
Browse files Browse the repository at this point in the history
  • Loading branch information
kushalbakshi authored Nov 28, 2023
2 parents 7651d35 + 0863475 commit b565204
Show file tree
Hide file tree
Showing 10 changed files with 360 additions and 5 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@
Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and
[Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention.

## [0.9.0] - 2023-10-13

+ Add - Export to NWB and upload to DANDI

## [0.8.1] - 2023-08-31

+ Fix - Rename `get_image_files` to `get_calcium_imaging_files` where missed
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Contribution Guidelines

This project follows the
[DataJoint Contribution Guidelines](https://datajoint.com/docs/community/contribute/).
[DataJoint Contribution Guidelines](https://datajoint.com/docs/about/contribute/).
Please reference the link for more full details.
2 changes: 1 addition & 1 deletion docs/.docker/pip_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
mkdocs-material
mkdocs-material==9.1.17
mkdocs-redirects
mkdocstrings
mkdocstrings-python
Expand Down
15 changes: 15 additions & 0 deletions docs/src/concepts.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,3 +60,18 @@ segmented.
- Scanbox
- Nikon NIS-Elements
- Bruker Prairie View

## Data Export and Publishing

Element Calcium Imaging supports exporting of all data into standard Neurodata
Without Borders (NWB) files. This makes it easy to share files with collaborators and
publish results on [DANDI Archive](https://dandiarchive.org/).
[NWB](https://www.nwb.org/), as an organization, is dedicated to standardizing data
formats and maximizing interoperability across tools for neurophysiology.

To use the export functionality with additional related dependencies, install the
Element with the `nwb` option as follows:

```console
pip install element-calcium-imaging[nwb]
```
4 changes: 2 additions & 2 deletions docs/src/roadmap.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ the common motifs to create Element Calcium Imaging. Major features include:
- [x] Quality metrics
- [ ] Data compression
- [ ] Deepinterpolation
- [ ] Data export to NWB
- [ ] Data publishing to DANDI
- [x] Data export to NWB
- [x] Data publishing to DANDI

Further development of this Element is community driven. Upon user requests and based on
guidance from the Scientific Steering Group we will continue adding features to this
Expand Down
Empty file.
1 change: 1 addition & 0 deletions element_calcium_imaging/export/nwb/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .nwb import imaging_session_to_nwb, write_nwb
333 changes: 333 additions & 0 deletions element_calcium_imaging/export/nwb/nwb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,333 @@
import numpy as np
import datajoint as dj
from datajoint import DataJointError
from pynwb import NWBHDF5IO, NWBFile
from pynwb.ophys import (
Fluorescence,
ImageSegmentation,
OpticalChannel,
RoiResponseSeries,
TwoPhotonSeries,
)

from ... import scan, imaging_no_curation
from ...scan import get_calcium_imaging_files, get_imaging_root_data_dir


logger = dj.logger

if imaging_no_curation.schema.is_activated():
imaging = imaging_no_curation
else:
raise DataJointError(
"This export function is designed for the `imaging_no_curation` module."
)


def imaging_session_to_nwb(
session_key,
include_raw_data=False,
lab_key=None,
project_key=None,
protocol_key=None,
nwbfile_kwargs=None,
):
"""Main function for converting calcium imaging data to NWB.
Args:
session_key (dict): key from Session table.
include_raw_data (bool): Optional. Default False. Include the raw data from
source. `ScanImage`, `Scanbox`, and `PrairieView` are supported.
lab_key (dict): Optional key to add metadata from Element Lab.
project_key (dict): Optional key to add metadata from Element Lab.
protocol_key (dict): Optional key to add metadata from Element Lab.
nwbfile_kwargs (dict): Optional. If Element Session is not used, this argument
is required and must be a dictionary containing 'session_description' (str),
'identifier' (str), and 'session_start_time' (datetime), the required
minimal data for instantiating an NWBFile object. If element-session is
being used, this argument can optionally be used to overwrite NWBFile
fields.
Returns:
nwbfile (NWBFile): nwb file
"""

session_to_nwb = getattr(imaging._linking_module, "session_to_nwb", False)

if session_to_nwb:
nwb_file = session_to_nwb(
session_key,
lab_key=lab_key,
project_key=project_key,
protocol_key=protocol_key,
additional_nwbfile_kwargs=nwbfile_kwargs,
)
else:
nwb_file = NWBFile(**nwbfile_kwargs)

if include_raw_data:
_create_raw_data_nwbfile(session_key, linked_nwb_file=nwb_file)
if not nwb_file.imaging_planes:
_add_scan_to_nwb(session_key, nwbfile=nwb_file)

else:
_add_scan_to_nwb(session_key, nwbfile=nwb_file)
_add_image_series_to_nwb(
session_key, imaging_plane=nwb_file.imaging_planes["ImagingPlane"]
)
_add_segmentation_data_to_nwb(
session_key,
nwbfile=nwb_file,
imaging_plane=nwb_file.imaging_planes["ImagingPlane"],
)

return nwb_file


def _create_raw_data_nwbfile(session_key, linked_nwb_file):
"""Adds raw data to NWB file.
Args:
session_key (dict): key from Session table
linked_nwb_file (NWBFile): nwb file
"""

acquisition_software = (scan.Scan & session_key).fetch1("acq_software")
frame_rate = (scan.ScanInfo & session_key).fetch1("fps")

if acquisition_software == "NIS":
raise NotImplementedError(
"Packaging raw data acquired from `Nikon NIS Elements` software is not supported at this time."
)

elif acquisition_software == "PrairieView":
n_planes = (scan.ScanInfo & session_key).fetch1("ndepths")
raw_data_files_location = get_calcium_imaging_files(
session_key, acquisition_software
)

if n_planes > 1:
from neuroconv.converters import (
BrukerTiffMultiPlaneConverter as BrukerTiffConverter,
)

imaging_interface = BrukerTiffConverter(
file_path=raw_data_files_location[0],
fallback_sampling_frequency=frame_rate,
plane_separation_type="disjoint",
)
else:
from neuroconv.converters import (
BrukerTiffSinglePlaneConverter as BrukerTiffConverter,
)

imaging_interface = BrukerTiffConverter(
file_path=raw_data_files_location[0],
fallback_sampling_frequency=frame_rate,
)
metadata = imaging_interface.get_metadata()
imaging_interface.add_to_nwbfile(
nwbfile=linked_nwb_file,
metadata=metadata,
)
else:
if acquisition_software == "ScanImage":
from neuroconv.datainterfaces import (
ScanImageImagingInterface as ImagingInterface,
)
elif acquisition_software == "Scanbox":
from neuroconv.datainterfaces import SbxImagingInterface as ImagingInterface

raw_data_files_location = get_calcium_imaging_files(
session_key, acquisition_software
)

imaging_interface = ImagingInterface(
file_path=raw_data_files_location[0], fallback_sampling_frequency=frame_rate
)
metadata = imaging_interface.get_metadata()
imaging_interface.add_to_nwbfile(
nwbfile=linked_nwb_file,
metadata=metadata,
)


def _add_scan_to_nwb(session_key, nwbfile):
"""Adds metadata for a scan from database.
Args:
session_key (dict): key from Session table
nwbfile (NWBFile): nwb file
"""

from math import nan

try:
scan_key = (scan.Scan & session_key).fetch1("KEY")
except DataJointError:
raise NotImplementedError(
"Exporting more than one scan per session to NWB is not supported yet."
)

scanner_name, scan_notes = (scan.Scan & scan_key).fetch1("scanner", "scan_notes")
device = nwbfile.create_device(
name=scanner_name if scanner_name is not None else "TwoPhotonMicroscope",
description="Two photon microscope",
manufacturer="Microscope manufacturer",
)

no_channels, frame_rate = (scan.ScanInfo & scan_key).fetch1("nchannels", "fps")

field_keys = (scan.ScanInfo.Field & scan_key).fetch("KEY")

for channel in range(no_channels):
optical_channel = OpticalChannel(
name=f"OpticalChannel{channel+1}",
description=f"Optical channel number {channel+1}",
emission_lambda=nan,
)

for field_key in field_keys:
field_no = (scan.ScanInfo.Field & field_key).fetch1("field_idx")
imaging_plane = nwbfile.create_imaging_plane(
name="ImagingPlane",
optical_channel=optical_channel,
imaging_rate=frame_rate,
description=scan_notes
if scan_notes != ""
else f"Imaging plane for field {field_no+1}, channel {channel+1}",
device=device,
excitation_lambda=nan,
indicator="unknown",
location="unknown",
grid_spacing=(0.01, 0.01),
grid_spacing_unit="meters",
origin_coords=[1.0, 2.0, 3.0],
origin_coords_unit="meters",
)
return imaging_plane


def _add_image_series_to_nwb(session_key, imaging_plane):
"""Adds TwoPhotonSeries to NWB file.
Args:
session_key (dict): key from Session table
imaging_plane (NWBFile Imaging Plane): nwb file imaging plane object
"""

imaging_files = (scan.ScanInfo.ScanFile & session_key).fetch("file_path")
two_p_series = TwoPhotonSeries(
name="TwoPhotonSeries",
dimension=(scan.ScanInfo.Field & session_key).fetch1("px_height", "px_width"),
external_file=imaging_files,
imaging_plane=imaging_plane,
starting_frame=[0],
format="external",
starting_time=0.0,
rate=(scan.ScanInfo & session_key).fetch1("fps"),
)
return two_p_series


def _add_motion_correction_to_nwb(session_key, nwbfile):
raise NotImplementedError(
"Motion Correction data cannot be packaged into NWB at this time."
)


def _add_segmentation_data_to_nwb(session_key, nwbfile, imaging_plane):
"""Adds segmentation data from database.
Args:
session_key (dict): key from Session table
nwbfile (NWBFile): nwb file
imaging_plane (NWBFile Imaging Plane): nwb file imaging plane object
"""

ophys_module = nwbfile.create_processing_module(
name="ophys", description="optical physiology processed data"
)
img_seg = ImageSegmentation()
ps = img_seg.create_plane_segmentation(
name="PlaneSegmentation",
description="output from segmenting",
imaging_plane=imaging_plane,
)
ophys_module.add(img_seg)

mask_keys = (imaging.Segmentation.Mask & session_key).fetch("KEY")
for mask_key in mask_keys:
ps.add_roi(
pixel_mask=np.asarray(
(imaging.Segmentation.Mask() & mask_key).fetch1(
"mask_xpix", "mask_ypix", "mask_weights"
)
).T
)

rt_region = ps.create_roi_table_region(
region=((imaging.Segmentation.Mask & session_key).fetch("mask")).tolist(),
description="All ROIs from database.",
)

channels = (scan.ScanInfo & session_key).fetch1("nchannels")
for channel in range(channels):
roi_resp_series = RoiResponseSeries(
name=f"Fluorescence_{channel}",
data=np.stack(
(
imaging.Fluorescence.Trace
& session_key
& f"fluo_channel='{channel}'"
).fetch("fluorescence")
).T,
rois=rt_region,
unit="a.u.",
rate=(scan.ScanInfo & session_key).fetch1("fps"),
)
neuropil_series = RoiResponseSeries(
name=f"Neuropil_{channel}",
data=np.stack(
(
imaging.Fluorescence.Trace
& session_key
& f"fluo_channel='{channel}'"
).fetch("neuropil_fluorescence")
).T,
rois=rt_region,
unit="a.u.",
rate=(scan.ScanInfo & session_key).fetch1("fps"),
)
deconvolved_series = RoiResponseSeries(
name=f"Deconvolved_{channel}",
data=np.stack(
(
imaging.Activity.Trace & session_key & f"fluo_channel='{channel}'"
).fetch("activity_trace")
).T,
rois=rt_region,
unit="a.u.",
rate=(scan.ScanInfo & session_key).fetch1("fps"),
)
fl = Fluorescence(
roi_response_series=[roi_resp_series, neuropil_series, deconvolved_series]
)
ophys_module.add(fl)


def write_nwb(nwbfile, fname, check_read=True):
"""Export NWBFile
Args:
nwbfile (NWBFile): nwb file
fname (str): Absolute path including `*.nwb` extension.
check_read (bool): If True, PyNWB will try to read the produced NWB file and
ensure that it can be read.
"""
with NWBHDF5IO(fname, "w") as io:
io.write(nwbfile)

if check_read:
with NWBHDF5IO(fname, "r") as io:
io.read()
logger.info("File saved successfully")
3 changes: 3 additions & 0 deletions element_calcium_imaging/export/nwb/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
dandi
pynwb
neuroconv[scanimage, brukertiff, scanbox, caiman, suite2p, extract]
Loading

0 comments on commit b565204

Please sign in to comment.