diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6fa9e94..92d0ae0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,5 @@ default_language_version: - python: python3.8 + python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 diff --git a/region_grower/atlas_helper.py b/region_grower/atlas_helper.py index f41b8da..e7d27d5 100644 --- a/region_grower/atlas_helper.py +++ b/region_grower/atlas_helper.py @@ -2,6 +2,7 @@ This helper allows simple lookups without having to reason in term of [PH][1-6] and [PH]y. """ + import operator from pathlib import Path from typing import List @@ -31,16 +32,24 @@ def __init__(self, atlas: Atlas, region_structure_path: str): if region_structure_path is not None and Path(region_structure_path).exists(): with open(region_structure_path, "r", encoding="utf-8") as region_file: self.region_structure = yaml.safe_load(region_file) + self.region_structure_base_path = Path(region_structure_path).parent else: - raise ValueError("Please provide an existing region_structure file.") + raise ValueError(f"region_structure file not found at {region_structure_path}.") self.regions = list(self.region_structure.keys()) self.layers = {} for region in self.regions: self.layers[region] = self.region_structure[region]["layers"] - self.y = atlas.load_data("[PH]y") + try: + self.y = atlas.load_data("[PH]y") + except: + self.y = None + self.brain_regions = atlas.load_data("brain_regions") - self.orientations = atlas.load_data("orientation", cls=OrientationField) + try: + self.orientations = atlas.load_data("orientation", cls=OrientationField) + except: + self.orientations = None def layer_thickness(self, layer: Union[int, str]) -> Atlas: """Returns an atlas of the layer thickness.""" diff --git a/region_grower/cli.py b/region_grower/cli.py index 3c0f93f..1bf414e 100644 --- a/region_grower/cli.py +++ b/region_grower/cli.py @@ -195,6 +195,12 @@ def generate_distributions( is_flag=True, default=False, ) +@click.option( + "--resume", + help="Resume synthesis, do not synthesize if file exists (default: False)", + is_flag=True, + default=False, +) @click.option( "--max-drop-ratio", help="Max drop ratio for any mtype (default: 0)", @@ -284,6 +290,12 @@ def generate_distributions( is_flag=True, default=False, ) +@click.option( + "--synthesize-axons", + help="Display the versions of all the accessible modules in a logger entry", + is_flag=True, + default=False, +) def synthesize_morphologies(**kwargs): # pylint: disable=too-many-arguments, too-many-locals """Synthesize morphologies.""" if mpi_enabled and kwargs.get("with_mpi", False): # pragma: no cover diff --git a/region_grower/context.py b/region_grower/context.py index 5a46284..07f7d40 100644 --- a/region_grower/context.py +++ b/region_grower/context.py @@ -6,14 +6,20 @@ TLDR: SpaceContext.synthesized() is being called by the placement_algorithm package to synthesize circuit morphologies. """ + +import json import os from collections import defaultdict from copy import deepcopy +from functools import partial +from pathlib import Path from typing import Dict from typing import List from typing import Optional from typing import Union +import trimesh + os.environ.setdefault("NEURON_MODULE_OPTIONS", "-nogui") # suppress NEURON warning # This environment variable must be set before 'NEURON' is loaded in 'morph_tool.nrnhines'. # Note that if 'NEURON' was imported before it will not consider this environment variable. @@ -34,6 +40,8 @@ from neurots import NeuroTSError # noqa: E402 ; pylint: disable=C0413 from neurots.utils import PIA_DIRECTION # noqa: E402 ; pylint: disable=C0413 from voxcell.cell_collection import CellCollection # noqa: E402 ; pylint: disable=C0413 +from voxcell.voxel_data import OrientationField +from voxcell import VoxcellError from region_grower import RegionGrowerError # noqa: E402 ; pylint: disable=C0413 from region_grower import SkipSynthesisError # noqa: E402 ; pylint: disable=C0413 @@ -68,6 +76,7 @@ class CellState: orientation: Matrix mtype: str depth: float + other_parameters: Dict def lookup_orientation(self, vector: Optional[Point] = None) -> np.array: """Returns the looked-up orientation for the given position. @@ -83,6 +92,252 @@ class SpaceContext: layer_depths: List cortical_depths: List + soma_position: float + soma_depth: float + boundaries: List = None + directions: Dict = None + atlas_info: Dict = {} # voxel_dimensions, offset and shape from atlas for indices conversion + orientations = None + mesh = None + + def indices_to_positions(self, indices): + """Local version of voxcel's function to prevent atlas loading.""" + return indices * np.array(self.atlas_info["voxel_dimensions"]) + np.array( + self.atlas_info["offset"] + ) + + def positions_to_indices(self, positions): + """Local and reduced version of voxcel's function to prevent atlas loading.""" + result = (positions - np.array(self.atlas_info["offset"])) / np.array( + self.atlas_info["voxel_dimensions"] + ) + result[np.abs(result) < 1e-7] = 0.0 # suppress rounding errors around 0 + result = np.floor(result).astype(int) + result[result < 0] = -1 + result[result >= self.atlas_info["shape"]] = -1 + return result + + def get_direction(self, cell): + """Get direction data for the given mtype.""" + directions = [] + for direction in self.directions: + mtypes = direction.get("mtypes", None) + if mtypes is not None and cell.mtype not in mtypes: + continue + + # specific for barrel cortex + mclasses = direction.get("mclass", None) + if mclasses is not None and cell.other_parameters.get("mclass", None) not in mclasses: + continue + + def section_prob(seg_direction, current_point, direction=None): + """Probability function for the sections.""" + if self.orientations is None: + self.orientations = OrientationField.load_nrrd( + self.atlas_info["direction_nrrd_path"] + ) + try: + target_direction = np.dot( + self.orientations.lookup(current_point), direction["params"]["direction"] + )[0] + except VoxcellError: + # if we are outside, we do nothing + return 1.0 + power = direction["params"].get("power", 1.0) + mode = direction["params"].get("mode", "parallel") + layers = direction["params"].get("layers", []) + if layers: + # this does not work well in curved atlas + depth = self.soma_depth + (self.soma_position - current_point).dot( + direction["pia_direction"] + ) + in_layer = False + for layer in layers: + if self.layer_depths[layer - 1] < depth < self.layer_depths[layer]: + in_layer = True + if not in_layer: + return 1.0 + + if mode == "parallel": + p = (1.0 - np.arccos(seg_direction.dot(target_direction)) / np.pi) ** power + elif mode == "perpendicular": + p = ( + 1.0 + - abs(np.arccos(seg_direction.dot(target_direction)) / np.pi * 2.0 - 1.0) + ) ** power + + else: + raise ValueError(f"mode {mode} not understood for direction probability") + + if np.isnan(p): + # sometimes we get nan from this computation, to check why + return 1.0 + + return np.clip(p, 0, 1) + + direction["section_prob"] = partial(section_prob, direction=direction) + directions.append(direction) + return directions + + def get_boundaries( + self, cell + ): # pylint: disable=too-many-locals,too-many-statements,too-many-branches + """Returns a dict with boundaries data for NeuroTS.""" + + def get_distance_to_mesh(mesh, ray_origin, ray_direction, mesh_type): + """Compute distances from point/directions to a mesh.""" + debug = os.environ.get("REGION_GROWER_BOUNDARY_DEBUG", 0) + + if mesh_type == "voxel": + ray_origin = self.positions_to_indices(ray_origin) + + locations = mesh.ray.intersects_location( + ray_origins=[ray_origin], ray_directions=[ray_direction] + )[0] + + if len(locations): + intersect = locations[0] + if mesh_type == "voxel": + intersect = self.indices_to_positions(intersect) + ray_origin = self.indices_to_positions(ray_origin) + + dist = np.linalg.norm(intersect - ray_origin) + + if debug: # pragma: no cover + x, y, z = ray_origin + vx, vy, vz = ray_direction + with open("data.csv", "a", encoding="utf8") as file: + print(f"{x}, {y}, {z}, {vx}, {vy}, {vz}, {dist}", file=file) + + return dist + + if debug: # pragma: no cover + x, y, z = ray_origin + vx, vy, vz = ray_direction + with open("data.csv", "a", encoding="utf8") as file: + print(f"{x}, {y}, {z}, {vx}, {vy}, {vz}, {-100}", file=file) + + return np.inf # pragma: no cover + + # add here necessary logic to convert raw config data to NeuroTS context data + all_boundaries = json.loads(self.boundaries) + boundaries = [] + for boundary in all_boundaries: + mtypes = boundary.get("mtypes", None) + if mtypes is not None and cell.mtype not in mtypes: + continue + + # specific for barrel cortex + mclasses = boundary.get("mclass", None) + if mclasses is not None and cell.other_parameters.get("mclass", None) not in mclasses: + continue + + mesh_type = boundary.get("mesh_type", "voxel") + mode = boundary.get("mode", "repulsive") + + meshes = [] + if Path(boundary["path"]).is_dir(): + soma_position = self.soma_position + if mesh_type == "voxel": + soma_position = self.positions_to_indices(soma_position) + + if boundary.get("multimesh_mode", "closest") == "closest": + mesh_paths = list(Path(boundary["path"]).iterdir()) + for mesh_path in mesh_paths: + meshes.append(trimesh.load_mesh(mesh_path)) + distances = [ + trimesh.proximity.closest_point(mesh, [soma_position])[1][0] + for mesh in meshes + ] + mesh_id = np.argmin(distances) + boundary["mesh_name"] = Path(mesh_paths[mesh_id]).stem + self.mesh = meshes[mesh_id] + + if boundary.get("multimesh_mode", "closest") == "closest_y": + mesh_paths = list(Path(boundary["path"]).iterdir()) + meshes = [trimesh.load_mesh(mesh_path) for mesh_path in mesh_paths] + # this is assuming the original direction can be used + # as straight line, refinement would be to follow the vector field + distances = [ + np.linalg.norm( + np.cross(soma_position - mesh.center_mass, boundary["direction"]) + ) + for mesh in meshes + ] + + mesh_id = np.argmin(distances) + boundary["mesh_name"] = Path(mesh_paths[mesh_id]).stem + self.mesh = meshes[mesh_id] + + if boundary.get("multimesh_mode", "closest") == "inside": + for mesh_path in Path(boundary["path"]).iterdir(): + _mesh = trimesh.load_mesh(mesh_path) + if _mesh.contains([soma_position])[0]: + self.mesh = _mesh + break + + if boundary.get("multimesh_mode", "closest") == "territories": + glom_id = int(cell.other_parameters["glomerulus_id"]) + if glom_id >= 0: + self.mesh = trimesh.load_mesh( + Path(boundary["path"]) / f"glomeruli_{glom_id:03d}.obj" + ) + + else: + self.mesh = trimesh.load_mesh(boundary["path"]) + + if mode == "repulsive": + + def prob(direction, current_point, mesh=None, mesh_type=None, params=None): + """Probability function for repulsive mode.""" + dist = get_distance_to_mesh(mesh, current_point, direction, mesh_type=mesh_type) + p = (dist - params.get("d_min", 0)) / ( + params.get("d_max", 100) - max(0, params.get("d_min", 0)) + ) ** params.get("power", 1.5) + return np.clip(p, 0, 1) + + elif mode == "attractive": + + def prob(direction, current_point, mesh=None, mesh_type=None, params=None): + """Probability function for attractive mode.""" + if mesh_type == "voxel": + current_point_id = self.positions_to_indices(current_point) + else: + current_point_id = current_point + + if mesh.contains([current_point_id])[0]: + # when we get inside the mesh, we are free + return 1.0 + + closest_point = trimesh.proximity.closest_point(mesh, [current_point_id])[0][0] + if mesh_type == "voxel": + closest_point = self.indices_to_positions(closest_point) + + mesh_direction = closest_point - current_point + distance = np.linalg.norm(mesh_direction) + if params.get("d_min", 0) < distance < params.get("d_max", 100): + p = ( + 1 - np.arccos(direction.dot(mesh_direction) / distance) / np.pi + ) ** params.get("power", 1.0) + return np.clip(p, 0, 1) + return 1.0 + + else: + raise ValueError(f"boundary mode {mode} not understood!") + + if self.mesh is not None: + if "params_section" in boundary: + boundary["section_prob"] = partial( + prob, mesh=self.mesh, mesh_type=mesh_type, params=boundary["params_section"] + ) + + if "params_trunk" in boundary: + boundary["trunk_prob"] = partial( + prob, mesh=self.mesh, mesh_type=mesh_type, params=boundary["params_trunk"] + ) + + boundaries.append(boundary) + return boundaries def layer_fraction_to_position(self, layer: int, layer_fraction: float) -> float: """Returns an absolute position from a layer and a fraction of the layer. @@ -95,6 +350,9 @@ def layer_fraction_to_position(self, layer: int, layer_fraction: float) -> float Returns: target depth """ layer_thickness = self.layer_depths[layer] - self.layer_depths[layer - 1] + if layer_thickness <= 0: + raise SkipSynthesisError(f"Layer {layer} has no/negative thickness") + return self.layer_depths[layer - 1] + (1.0 - layer_fraction) * layer_thickness def lookup_target_reference_depths(self, depth: float) -> np.array: @@ -111,7 +369,6 @@ def lookup_target_reference_depths(self, depth: float) -> np.array: # we round to avoid being outside due to numerical precision if np.round(depth, 3) <= np.round(layer_depth, 3): return layer_depth, cortical_depth - raise RegionGrowerError(f"Current depth ({depth}) is outside of circuit boundaries") def distance_to_constraint(self, depth: float, constraint: Dict) -> Optional[float]: @@ -128,7 +385,10 @@ def distance_to_constraint(self, depth: float, constraint: Dict) -> Optional[flo constraint_position = self.layer_fraction_to_position( constraint["layer"], constraint["fraction"] ) - return depth - constraint_position + out = depth - constraint_position + if np.isnan(out): + raise SkipSynthesisError("Nan in some PH values") + return out @attr.s(auto_attribs=True) @@ -141,7 +401,6 @@ class SynthesisParameters: axon_morph_scale: Optional[float] = None rotational_jitter_std: float = None scaling_jitter_std: float = None - recenter: bool = True seed: int = 0 min_hard_scale: float = 0 @@ -183,6 +442,21 @@ def __init__( def _correct_position_orientation_scaling(self, params_orig: Dict) -> Dict: """Return a copy of the parameter with the correct orientation and scaling.""" params = deepcopy(params_orig) + if self.context.directions is not None: + if isinstance(self.context.directions, str): + self.context.directions = json.loads(self.context.directions) + for i, direction in enumerate(self.context.directions): + self.context.directions[i]["pia_direction"] = self.cell.lookup_orientation( + PIA_DIRECTION + ).tolist() + + if self.context.boundaries is not None: + boundaries = json.loads(self.context.boundaries) + for boundary in boundaries: + if boundary.get("multimesh_mode", "closest") == "closest_y": + boundary["direction"] = self.cell.lookup_orientation(PIA_DIRECTION).tolist() + + self.context.boundaries = json.dumps(boundaries) for neurite_type in params["grow_types"]: if isinstance(params[neurite_type]["orientation"], dict): @@ -242,7 +516,6 @@ def _post_growth_rescaling(self, grower: NeuronGrower, params: Dict) -> None: target_min_length=target_min_length, target_max_length=target_max_length, ) - if scale > self.params.min_hard_scale: scale_section(root_section, ScaleParameters(mean=scale), recursive=True) is_deleted = False @@ -282,7 +555,10 @@ def synthesize(self) -> SynthesisResult: for _ in range(self.internals.retries): try: return self._synthesize_once(rng) - except NeuroTSError: + except ( + NeuroTSError, + ValueError, + ): # value error is raised when nan in orientation results in soma generation error pass raise SkipSynthesisError( @@ -303,6 +579,14 @@ def completion(self, synthesized): synthesized.apical_points, morph_path ) + # enforce freeing memory of orientation nrrd data and meshes + if hasattr(self.context, "orientations"): + if self.context.orientations is not None: + del self.context.orientations + if hasattr(self.context, "mesh"): + if self.context.mesh is not None: + del self.context.mesh + return { "name": morph_name, "apical_points": synthesized.apical_points, @@ -313,14 +597,7 @@ def completion(self, synthesized): def _synthesize_once(self, rng) -> SynthesisResult: """One try to synthesize the cell.""" params = self._correct_position_orientation_scaling(self.params.tmd_parameters) - - # Today we don't use the atlas during the synthesis (we just use it to - # generate the parameters) so we can - # grow the cell as if it was in [0, 0, 0] - # But the day we use it during the actual growth, we will need to grow the cell at its - # absolute position and translate to [0, 0, 0] after the growth - if self.params.recenter: - params["origin"] = [0, 0, 0] + params["origin"] = self.cell.position.tolist() if self.params.tmd_parameters["diameter_params"]["method"] == "external": external_diametrizer = build_diameters.build @@ -332,16 +609,28 @@ def _synthesize_once(self, rng) -> SynthesisResult: else: axon_morph = None + context = {"debug_data": self.debug_infos["input_scaling"]} + if self.context.directions is not None: + if "constraints" not in context: + context["constraints"] = [] + context["constraints"] += self.context.get_direction(self.cell) + if self.context.boundaries is not None: + if "constraints" not in context: + context["constraints"] = [] + context["constraints"] += self.context.get_boundaries(self.cell) + if context["constraints"] and "mesh_name" in context["constraints"][-1]: + self.debug_infos["mesh_name"] = context["constraints"][-1]["mesh_name"] + grower = NeuronGrower( input_parameters=params, input_distributions=self.params.tmd_distributions, external_diametrizer=external_diametrizer, skip_preprocessing=True, - context={"debug_data": self.debug_infos["input_scaling"]}, + context=context, rng_or_seed=rng, ) grower.grow() - + mt.translate(grower.neuron, -self.cell.position) self._post_growth_rescaling(grower, params) if axon_morph is not None: diff --git a/region_grower/modify.py b/region_grower/modify.py index 4bf42a0..618c786 100644 --- a/region_grower/modify.py +++ b/region_grower/modify.py @@ -134,7 +134,7 @@ def input_scaling( "with_debug_info": debug_info is not None, }, } - else: + elif neurite_type != "axon": if debug_info is not None: debug_info.update( { diff --git a/region_grower/morph_io.py b/region_grower/morph_io.py index 9106ee1..33f5545 100644 --- a/region_grower/morph_io.py +++ b/region_grower/morph_io.py @@ -1,4 +1,5 @@ """Module to load and write morphologies.""" + import os import random import uuid @@ -39,11 +40,12 @@ def get(self, name, options=None): class MorphWriter: """Helper class for writing morphologies.""" - def __init__(self, output_dir, file_exts, skip_write=False): + def __init__(self, output_dir, file_exts, skip_write=False, resume=False): self.output_dir = os.path.realpath(output_dir) self.file_exts = file_exts self._dir_depth = None self.skip_write = skip_write + self.resume = resume @staticmethod def _calc_dir_depth(num_files, max_files_per_dir=None): @@ -75,6 +77,9 @@ def prepare(self, num_files, max_files_per_dir=None, overwrite=False): - ensure it either does not exist, or is empty - if it does not exist, create an empty one """ + if self.resume: + overwrite = True + if self.skip_write: return self._dir_depth = MorphWriter._calc_dir_depth( @@ -117,5 +122,17 @@ def __call__(self, morph, seed): if not self.skip_write: for path in ext_paths: - morph.write(path) + if not (self.resume and path.exists()): + morph.write(path) return str(full_stem), ext_paths + + def check_resume(self, seed): + """Check if morphology exists if we are in resume mode, and return its path.""" + if not self.resume: + return False + + morph_name, subdirs = self.generate_name(seed) + full_stem = Path(subdirs, morph_name) + for ext_path in self.filepaths(full_stem): + if ext_path.exists(): + return ext_path diff --git a/region_grower/synthesize_morphologies.py b/region_grower/synthesize_morphologies.py index 785c2a3..a773188 100644 --- a/region_grower/synthesize_morphologies.py +++ b/region_grower/synthesize_morphologies.py @@ -6,6 +6,7 @@ - assign identity cell rotations to MVD3/sonata - optional axon grafting "on-the-fly" """ + import json import logging import os @@ -41,6 +42,7 @@ from region_grower.context import SpaceContext from region_grower.context import SpaceWorker from region_grower.context import SynthesisParameters +from region_grower.context import SynthesisResult from region_grower.morph_io import MorphLoader from region_grower.morph_io import MorphWriter from region_grower.utils import assign_morphologies @@ -123,10 +125,20 @@ def _parallel_wrapper( orientation=np.array([row["orientation"]]), mtype=row["mtype"], depth=row["current_depth"], + other_parameters={ + p: row[p] + for p in row.keys() + if p not in ["x", "y", "z", "orientation", "mtype", "current_depth"] + }, ) current_space_context = SpaceContext( layer_depths=row["layer_depths"], cortical_depths=cortical_depths[row["synthesis_region"]], + directions=row["directions"] if "directions" in row else None, + boundaries=row["boundaries"] if "boundaries" in row else None, + atlas_info=json.loads(row["atlas_info"]) if "atlas_info" in row else None, + soma_position=current_cell.position, + soma_depth=row["current_depth"], ) axon_scale = row.get("axon_scale", None) @@ -144,7 +156,6 @@ def _parallel_wrapper( axon_morph_scale=axon_scale, rotational_jitter_std=rotational_jitter_std, scaling_jitter_std=scaling_jitter_std, - recenter=True, seed=row["seed"], min_hard_scale=min_hard_scale, ) @@ -154,9 +165,16 @@ def _parallel_wrapper( current_synthesis_parameters, computation_parameters, ) - new_cell = space_worker.synthesize() + resume_path = space_worker.internals.morph_writer.check_resume(row["seed"]) + if resume_path: + try: + new_cell = SynthesisResult(morphio.mut.Morphology(resume_path), [], []) + except morphio.RawDataError: + # in case it created an invalid morphology, just re-synthesize + new_cell = space_worker.synthesize() + else: + new_cell = space_worker.synthesize() res = space_worker.completion(new_cell) - res["debug_infos"] = dict(space_worker.debug_infos) except (SkipSynthesisError, RegionGrowerError, NoDendriteException) as exc: # pragma: no cover LOGGER.error("Skip %s because of the following error: %s", row.name, exc) @@ -198,6 +216,7 @@ class SynthesizeMorphologies: should be used to graft the axon on each synthesized morphology. base_morph_dir: the path containing the morphologies listed in the TSV file given in ``morph_axon``. + synthesize_axons: set to True to synthesize axons instead of grafting atlas_cache: the path to the directory used for the atlas cache. seed: the starting seed to use (note that the GID of each cell is added to this seed to ensure all cells has different seeds). @@ -206,6 +225,7 @@ class SynthesizeMorphologies: max_files_per_dir: the maximum number of file in each directory (will create subdirectories if needed). overwrite: if set to False, the directory given to ``out_morph_dir`` must be empty. + resume: set to True to resume synthesis, do not synthesize if file exists. max_drop_ratio: the maximum ratio that scaling_jitter_std: the std of the scaling jitter. rotational_jitter_std: the std of the rotational jitter. @@ -236,6 +256,7 @@ def __init__( out_apical_nrn_sections=None, max_files_per_dir=None, overwrite=False, + resume=False, max_drop_ratio=0, scaling_jitter_std=None, rotational_jitter_std=None, @@ -251,6 +272,7 @@ def __init__( hide_progress_bar=False, dask_config=None, chunksize=None, + synthesize_axons=False, ): # pylint: disable=too-many-arguments, too-many-locals, too-many-statements self.seed = seed self.scaling_jitter_std = scaling_jitter_std @@ -300,6 +322,16 @@ def __init__( with open(tmd_distributions, "r", encoding="utf-8") as f: self.tmd_distributions = convert_from_legacy_neurite_type(json.load(f)) + for params in self.tmd_parameters.values(): + for param in params.values(): + if synthesize_axons: + if "axon" not in param["grow_types"]: + LOGGER.warning( + "No axon data, but axon synthesis requested, you will not have axons" + ) + elif "axon" in param["grow_types"]: + param["grow_types"].remove("axon") + # Set default values to tmd_parameters and tmd_distributions self.set_default_params_and_distrs() @@ -307,7 +339,9 @@ def __init__( self.set_cortical_depths() LOGGER.info("Preparing morphology output folder in %s", out_morph_dir) - self.morph_writer = MorphWriter(out_morph_dir, out_morph_ext or ["swc"], skip_write) + self.morph_writer = MorphWriter( + out_morph_dir, out_morph_ext or ["swc"], skip_write, resume=resume + ) self.morph_writer.prepare( num_files=len(self.cells.positions), max_files_per_dir=max_files_per_dir, @@ -332,7 +366,8 @@ def __init__( LOGGER.info("Fetching atlas data from %s", atlas) self.assign_atlas_data(min_depth, max_depth) - if morph_axon is not None: + + if morph_axon is not None and not synthesize_axons: LOGGER.info("Loading axon morphologies from %s", morph_axon) self.axon_morph_list = load_morphology_list(morph_axon, self.task_ids) check_na_morphologies( @@ -355,7 +390,7 @@ def set_cortical_depths(self): for region in self.regions: if ( region not in self.atlas.region_structure - or self.atlas.region_structure[region]["thicknesses"] is None + or self.atlas.region_structure[region].get("thicknesses") is None ): # pragma: no cover self.cortical_depths[region] = self.cortical_depths["default"] else: @@ -492,25 +527,11 @@ def assign_atlas_data(self, min_depth=25, max_depth=5000): positions = self.cells.positions[region_mask] LOGGER.debug("Extract atlas data for %s region", _region) - if ( - _region in self.atlas.regions - and self.atlas.region_structure[_region].get("thicknesses", None) is not None - and self.atlas.region_structure[_region].get("layers", None) is not None - ): - layers = self.atlas.layers[_region] - thicknesses = [self.atlas.layer_thickness(layer) for layer in layers] - depths = self.atlas.compute_region_depth(_region) - layer_depths = self.atlas.get_layer_boundary_depths( - positions, thicknesses - ).T.tolist() - current_depths = np.clip(depths.lookup(positions), min_depth, max_depth) - else: - LOGGER.warning( - "We are not able to synthesize the region %s, we fallback to 'default' region", - _region, - ) - layer_depths = None - current_depths = None + layers = self.atlas.layers[_region] + thicknesses = [self.atlas.layer_thickness(layer) for layer in layers] + depths = self.atlas.compute_region_depth(_region) + layer_depths = self.atlas.get_layer_boundary_depths(positions, thicknesses).T.tolist() + current_depths = np.clip(depths.lookup(positions), min_depth, max_depth) self.cells_data.loc[region_mask, "current_depth"] = current_depths self.cells_data.loc[region_mask, "layer_depths"] = pd.Series( @@ -518,13 +539,48 @@ def assign_atlas_data(self, min_depth=25, max_depth=5000): ) LOGGER.debug("Extract orientations data for %s region", _region) - orientations = self.atlas.orientations.lookup(positions) + if self.atlas.orientations is not None: + orientations = self.atlas.orientations.lookup(positions) + else: + orientations = np.array(len(positions) * [np.eye(3)]) self.cells_data.loc[region_mask, "orientation"] = pd.Series( data=orientations.tolist(), index=self.cells_data.loc[region_mask].index, dtype=object, ) + if "directions" in self.atlas.region_structure[_region]: + self.cells_data.loc[region_mask, "directions"] = json.dumps( + self.atlas.region_structure[_region]["directions"] + ) + if "boundaries" in self.atlas.region_structure[_region]: + boundaries = self.atlas.region_structure[_region]["boundaries"] + for boundary in boundaries: + if not Path(boundary["path"]).is_absolute(): + boundary["path"] = str( + (self.atlas.region_structure_base_path / boundary["path"]).absolute() + ) + if boundary.get("multimesh_mode", "closest") == "territories": + territories = self.atlas.atlas.load_data("glomerular_territories") + pos = self.cells_data.loc[region_mask, ["x", "y", "z"]].to_numpy() + self.cells_data.loc[region_mask, "glomerulus_id"] = territories.lookup( + pos, outer_value=-1 + ) + + self.cells_data.loc[region_mask, "boundaries"] = json.dumps(boundaries) + if ( + "directions" in self.atlas.region_structure[_region] + or "boundaries" in self.atlas.region_structure[_region] + ): + self.cells_data.loc[region_mask, "atlas_info"] = json.dumps( + { + "voxel_dimensions": self.atlas.brain_regions.voxel_dimensions.tolist(), + "offset": self.atlas.brain_regions.offset.tolist(), + "shape": self.atlas.brain_regions.shape, + "direction_nrrd_path": self.atlas.atlas.fetch_data("orientation"), + } + ) + @property def task_ids(self): """Task IDs (= CellCollection IDs).""" @@ -588,8 +644,8 @@ def compute(self): func_kwargs = { "computation_parameters": computation_parameters, - "cortical_depths": self.cortical_depths, "rotational_jitter_std": self.rotational_jitter_std, + "cortical_depths": self.cortical_depths, "scaling_jitter_std": self.scaling_jitter_std, "min_hard_scale": self.min_hard_scale, "tmd_parameters": self.tmd_parameters, @@ -609,6 +665,9 @@ def compute(self): } ) + # shuffle rows to get more even loads on tasks + self.cells_data = self.cells_data.sample(frac=1.0).reset_index() + if self.nb_processes == 0: LOGGER.info("Start computation") computed = self.cells_data.apply( @@ -628,8 +687,7 @@ def compute(self): computed = future.compute() LOGGER.info("Format results") - res = self.cells_data.join(computed) - return res + return self.cells_data.join(computed).sort_values(by="index").set_index("index") def finalize(self, result: pd.DataFrame): """Finalize master work. diff --git a/setup.py b/setup.py index e16c5e2..3a8c636 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,8 @@ "tqdm>=4.28.1", "urllib3>=1.26,<2; python_version < '3.9'", "voxcell>=3.1.1,<4", + "trimesh>=3.21", + "rtree>=1.0.1", ] mpi_extras = [ @@ -54,6 +56,7 @@ "pytest-html>=2", "pytest-mock>=3.5", "pytest-xdist>=3.0.2", + "neurocollage>=0.3.0", ] setup( diff --git a/tests/conftest.py b/tests/conftest.py index ad40e30..7eaf84e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,6 +19,7 @@ from .data_factories import generate_cell_collection from .data_factories import generate_cells_df from .data_factories import generate_input_cells +from .data_factories import generate_mesh from .data_factories import generate_small_O1 from .data_factories import get_cell_mtype from .data_factories import get_cell_orientation @@ -37,6 +38,16 @@ def small_O1_path(tmpdir_factory): return atlas_directory +@pytest.fixture(scope="session") +def mesh(small_O1_path, tmpdir_factory): + """Generate mesh from atlas.""" + mesh_path = str(tmpdir_factory.mktemp("mesh") / "mesh.obj") + + atlas = {"atlas": small_O1_path, "structure": DATA / "region_structure.yaml"} + generate_mesh(atlas, mesh_path) + return mesh_path + + @pytest.fixture(scope="session") def small_O1(small_O1_path): """Open the atlas.""" @@ -113,7 +124,15 @@ def space_context(): """A space context object.""" layer_depth = [0.0, 200.0, 300.0, 400.0, 500.0, 600.0, 800.0] thicknesses = [165, 149, 353, 190, 525, 700] - return SpaceContext(layer_depths=layer_depth, cortical_depths=np.cumsum(thicknesses)) + atlas_info = { + "voxel_dimensions": [100.0, 100.0, 100.0], + "offset": [-1100.0, -100.0, -1000.0], + "shape": (22, 10, 20), + } + + return SpaceContext( + layer_depths=layer_depth, cortical_depths=np.cumsum(thicknesses), atlas_info=atlas_info + ) @pytest.fixture(scope="function") diff --git a/tests/data/apical_boundary.yaml b/tests/data/apical_boundary.yaml new file mode 100644 index 0000000..fe76a95 --- /dev/null +++ b/tests/data/apical_boundary.yaml @@ -0,0 +1,16 @@ +14a03569d26b949692e5dfe8cb1855fe: +- 4.369110107421875 +- 55.535736083984375 +- -6.2556915283203125 +216363698b529b4a97b750923ceb3ffd: +- 5.216094970703125 +- 114.55657958984375 +- -12.4947509765625 +4462ebfc5f915ef09cfbac6e7687a66e: +- -11.084274291992188 +- 111.49124145507812 +- 0.980438232421875 +e3e70682c2094cac629f6fbed82c07cd: +- 16.126876831054688 +- 160.52789306640625 +- 22.863800048828125 diff --git a/tests/data/apical_no_axon.yaml b/tests/data/apical_no_axon.yaml index bfd6aa6..424b442 100644 --- a/tests/data/apical_no_axon.yaml +++ b/tests/data/apical_no_axon.yaml @@ -19,9 +19,9 @@ - 127.51496124267578 - 16.63180923461914 6513270e269e0d37f2a74de452e6b438: -- 1.8200105428695679 -- 113.30069732666016 -- 0.27308279275894165 +- -8.12664794921875 +- 114.60183715820312 +- 1.4770965576171875 7b89296c6dcbac5008577eb1924770d3: - -2.5671095848083496 - 58.11355972290039 @@ -31,21 +31,21 @@ - 371.3540344238281 - 6.843407154083252 b8a1abcd1a6916c74da4f9fc3c6da5d7: -- 2.9995338916778564 -- 114.8524169921875 -- 12.829068183898926 +- -13.87677001953125 +- 162.44619750976562 +- 1.412567138671875 cd613e30d8f16adf91b7584a2265b1f5: -- 14.439825057983398 -- 114.19844818115234 -- -7.658063888549805 +- 1.1701507568359375 +- 97.20172119140625 +- 1.061187744140625 d95bafc8f2a4d27bdcf4bb99f4bea973: -- 3.320021867752075 -- 59.75845718383789 -- -2.885958433151245 +- -2.2343902587890625 +- 59.40399169921875 +- -0.331756591796875 db5b5fab8f4d3e27dda1494c73cf256d: -- 10.5548677444458 -- 93.02537536621094 -- 0.9866926074028015 +- -1.606964111328125 +- 58.9842529296875 +- 1.571624755859375 e3e70682c2094cac629f6fbed82c07cd: - -4.8529953956604 - 158.9239959716797 diff --git a/tests/data/parameters.json b/tests/data/parameters.json index bb571ac..3a024fb 100644 --- a/tests/data/parameters.json +++ b/tests/data/parameters.json @@ -7,13 +7,16 @@ "has_apical_tuft": true, "metric": "path_distances", "modify": null, - "orientation": [ - [ - 0.0, - 1.0, - 0.0 - ] - ], + "orientation": { + "mode": "normal_pia_constraint", + "values": { + "direction": { + "mean": 0.0, + "std": 0.0 + } + } + }, + "radius": 0.3, "randomness": 0.15, "step_size": { "norm": { @@ -30,7 +33,17 @@ "growth_method": "tmd", "metric": "path_distances", "modify": null, - "orientation": null, + "orientation": { + "mode": "pia_constraint", + "values": { + "form": "step", + "params": [ + 0.44484279339093336, + 0.5461595884090537 + ] + } + }, + "radius": 0.3, "randomness": 0.15, "step_size": { "norm": { @@ -66,13 +79,16 @@ "has_apical_tuft": true, "metric": "path_distances", "modify": null, - "orientation": [ - [ - 0.0, - 1.0, - 0.0 - ] - ], + "orientation": { + "mode": "normal_pia_constraint", + "values": { + "direction": { + "mean": 0.0, + "std": 0.0 + } + } + }, + "radius": 0.3, "randomness": 0.15, "step_size": { "norm": { @@ -89,7 +105,17 @@ "growth_method": "tmd", "metric": "path_distances", "modify": null, - "orientation": null, + "orientation": { + "mode": "pia_constraint", + "values": { + "form": "step", + "params": [ + 0.44484279339093336, + 0.5461595884090537 + ] + } + }, + "radius": 0.3, "randomness": 0.15, "step_size": { "norm": { diff --git a/tests/data_factories.py b/tests/data_factories.py index a496ba5..457f65d 100644 --- a/tests/data_factories.py +++ b/tests/data_factories.py @@ -8,6 +8,8 @@ import numpy as np import pandas as pd +import yaml +from neurocollage.mesh_helper import MeshHelper from voxcell import CellCollection DF_SIZE = 12 @@ -33,6 +35,31 @@ def generate_small_O1(directory): return str(directory) +def generate_mesh(atlas, mesh_path): + """Generate a mesh from atlas to test boundary code.""" + mesh_helper = MeshHelper(atlas, "O0") + mesh = mesh_helper.get_boundary_mesh() + mesh.export(mesh_path) # pylint: disable=no-member + + +def generate_region_structure_boundary( + region_structure_path, out_path, mesh, with_sections=True, with_trunks=True +): + """Generate region_structure file with boundary entries.""" + with open(region_structure_path, encoding="utf-8") as f: + structure = yaml.safe_load(f) + structure["O0"]["boundaries"] = {"path": mesh} + + if with_sections: + structure["O0"]["boundaries"]["params_section"] = {"d_min": 5, "d_max": 50} + + if with_trunks: + structure["O0"]["boundaries"]["params_trunk"] = {"d_min": 5, "d_max": 5000} + + with open(out_path, "w", encoding="utf-8") as f: + yaml.dump(structure, f) + + def generate_cells_df(): """Raw data for the cell collection.""" x = [200] * DF_SIZE @@ -132,7 +159,7 @@ def get_tmd_distributions(filename): def get_cell_position(): """The cell position.""" - return [0, 500, 0] + return np.array([0, 500, 0]) def get_cell_mtype(): diff --git a/tests/test_context.py b/tests/test_context.py index bc1736e..bc8237a 100644 --- a/tests/test_context.py +++ b/tests/test_context.py @@ -78,9 +78,17 @@ def test_distance_to_constraint(self, space_context): assert space_context.distance_to_constraint(-100, {"layer": 6, "fraction": 0}) == -900 assert space_context.distance_to_constraint(1000, {"layer": 6, "fraction": 0}) == 200 - for i in [np.nan, None, []]: - space_context.layer_depths = i - assert space_context.distance_to_constraint(1000, {"layer": 6, "fraction": 0}) is None + def test_indices_to_positions(self, space_context): + pos = space_context.indices_to_positions([0, 0, 0]) + assert_array_equal(pos, [-1100.0, -100.0, -1000.0]) + pos = space_context.indices_to_positions([100, 100, 100]) + assert_array_equal(pos, [8900.0, 9900.0, 9000.0]) + + def test_positions_to_indices(self, space_context): + indices = space_context.positions_to_indices([-1100.0, -100.0, -1000.0]) + assert_array_equal(indices, [0, 0, 0]) + indices = space_context.positions_to_indices([8900.0, 9900.0, -9000.0]) + assert_array_equal(indices, [-1, -1, -1]) class TestSpaceWorker: @@ -113,22 +121,30 @@ def test_context( ) # This tests that input orientations are not mutated by the synthesize() call - assert_array_almost_equal( - synthesis_parameters.tmd_parameters["apical_dendrite"]["orientation"], [[0.0, 1.0, 0.0]] + assert ( + list( + dictdiffer.diff( + synthesis_parameters.tmd_parameters["apical_dendrite"]["orientation"], + { + "mode": "normal_pia_constraint", + "values": {"direction": {"mean": 0.0, "std": 0.0}}, + }, + ) + ) + == [] ) - assert_array_almost_equal( result.neuron.soma.points, np.array( [ - [-5.785500526428223, 4.9841227531433105, 0.0], - [-7.5740227699279785, -0.9734848141670227, 0.0], - [-1.966903805732727, -7.378671169281006, 0.0], - [3.565324068069458, -6.752922534942627, 0.0], - [7.266839027404785, -2.346604108810425, 0.0], - [7.384983062744141, 1.059786081314087, 0.0], - [6.818241119384766, 3.4387624263763428, 0.0], - [4.675901919111924e-16, 7.636327266693115, 0.0], + [7.0957971, -2.8218994, 0.0], + [0.0, 7.6363220, 0.0], + [-3.0748143, 6.9899292, 0.0], + [-4.8864875, 5.8681946, 0.0], + [-7.5141201, -1.3606873, 0.0], + [-6.1610136, -3.6210632, 0.0], + [-1.2113731, -7.5396423, 0.0], + [0.67872918, -7.6061096, 0.0], ], dtype=np.float32, ), @@ -189,25 +205,25 @@ def test_context_external_diametrizer( assert_array_almost_equal( synthesis_parameters.tmd_parameters["apical_dendrite"]["orientation"], [[0.0, 1.0, 0.0]] ) - assert_array_almost_equal( result.neuron.soma.points, np.array( [ - [-5.785500526428223, 4.9841227531433105, 0.0], - [-7.5740227699279785, -0.9734848141670227, 0.0], - [-1.966903805732727, -7.378671169281006, 0.0], - [3.565324068069458, -6.752922534942627, 0.0], - [7.266839027404785, -2.346604108810425, 0.0], - [7.384983062744141, 1.059786081314087, 0.0], - [6.818241119384766, 3.4387624263763428, 0.0], - [4.675901919111924e-16, 7.636327266693115, 0.0], + [-5.785500526428223, 4.984130859375, 0.0], + [-7.5740227699279785, -0.973480224609375, 0.0], + [-1.966903805732727, -7.378662109375, 0.0], + [3.565324068069458, -6.7529296875, 0.0], + [7.266839027404785, -2.34661865234375, 0.0], + [7.384983062744141, 1.059783935546875, 0.0], + [6.818241119384766, 3.438751220703125, 0.0], + [4.675901919111924e-16, 7.636322021484375, 0.0], ], dtype=np.float32, ), ) assert len(result.neuron.root_sections) == 4 + assert_array_almost_equal( next(result.neuron.iter()).points, np.array( @@ -258,7 +274,7 @@ def test_scale(self, small_context_worker, tmd_parameters): assert_array_almost_equal( [ # Check only first and last points of neurites - np.around(np.array([neu.points[0], neu.points[-1]]), 6) + np.around(np.array([neu.points[0], neu.points[-1]]), 5) for neu in result.neuron.root_sections ], [ @@ -440,6 +456,11 @@ def test_scale(self, small_context_worker, tmd_parameters): } } result = small_context_worker.synthesize() + assert [i.type for i in result.neuron.root_sections] == expected_types[1:] + + # test removing basals if scale is too small + small_context_worker.params.min_hard_scale = 2.2 + result = small_context_worker.synthesize() assert [i.type for i in result.neuron.root_sections] == [expected_types[-1]] params["basal_dendrite"]["orientation"] = {} @@ -484,18 +505,9 @@ def test_debug_scales(self, small_context_worker, tmd_parameters): "min_target_thickness": 1.0, }, "scaling": [ - { - "max_ph": 126.96580088057513, - "scaling_ratio": 0.9554140127388535, - }, - { - "max_ph": 139.93140451880743, - "scaling_ratio": 0.9554140127388535, - }, - { - "max_ph": 143.28779114733433, - "scaling_ratio": 0.9554140127388535, - }, + {"max_ph": 78.53590605172101, "scaling_ratio": 0.9554140127388535}, + {"max_ph": 126.96580088057513, "scaling_ratio": 0.9554140127388535}, + {"max_ph": 126.96580088057513, "scaling_ratio": 0.9554140127388535}, ], }, "target_func": { @@ -507,10 +519,7 @@ def test_debug_scales(self, small_context_worker, tmd_parameters): "min_target_path_distance": 1.0, }, "scaling": [ - { - "max_ph": 420.70512274498446, - "scaling_ratio": 0.18064909574697127, - } + {"max_ph": 255.45843100194077, "scaling_ratio": 0.2975043716581138} ], }, }, diff --git a/tests/test_synthesize_morphologies.py b/tests/test_synthesize_morphologies.py index d1d72e2..e506fcf 100644 --- a/tests/test_synthesize_morphologies.py +++ b/tests/test_synthesize_morphologies.py @@ -4,6 +4,7 @@ import logging import os import shutil +import sys from copy import deepcopy from itertools import combinations from pathlib import Path @@ -53,6 +54,7 @@ def create_args( axon_morph_tsv, out_apical_NRN_sections, min_depth, + region_structure, ): """Create the arguments used for tests.""" args = {} @@ -90,7 +92,7 @@ def create_args( args["max_drop_ratio"] = 0.5 args["rotational_jitter_std"] = 10 args["scaling_jitter_std"] = 0.5 - args["region_structure"] = DATA / "region_structure.yaml" + args["region_structure"] = region_structure return args @@ -118,6 +120,7 @@ def test_synthesize( axon_morph_tsv if with_axon else None, "apical_NRN_sections.yaml" if with_NRN else None, min_depth, + DATA / "region_structure.yaml", ) synthesizer = SynthesizeMorphologies(**args) @@ -175,6 +178,7 @@ def test_synthesize_dask_config( None, None, 100, + DATA / "region_structure.yaml", ) custom_scratch_config = str(tmp_folder / "custom_scratch_config") @@ -233,6 +237,7 @@ def test_synthesize_skip_write( axon_morph_tsv if with_axon else None, "apical_NRN_sections.yaml" if with_NRN else None, min_depth, + DATA / "region_structure.yaml", ) args["skip_write"] = True args["nb_processes"] = nb_processes @@ -285,6 +290,109 @@ def test_synthesize_skip_write( ] +@pytest.mark.parametrize("with_sections", [True, False]) +@pytest.mark.parametrize("with_trunks", [True, False]) +def test_synthesize_boundary( + tmpdir, + small_O1_path, + input_cells, + axon_morph_tsv, + mesh, + with_sections, + with_trunks, +): # pylint: disable=unused-argument,too-many-locals + """Test morphology synthesis but skip write step.""" + with_axon = True + with_NRN = True + min_depth = 25 + tmp_folder = Path(tmpdir) + + # pylint: disable=import-outside-toplevel + from .data_factories import generate_region_structure_boundary + + region_structure = "region_structure.yaml" + generate_region_structure_boundary( + DATA / "region_structure.yaml", region_structure, mesh, with_sections, with_trunks + ) + args = create_args( + False, + tmp_folder, + input_cells, + small_O1_path, + axon_morph_tsv if with_axon else None, + "apical_NRN_sections.yaml" if with_NRN else None, + min_depth, + region_structure, + ) + args["skip_write"] = True + + synthesizer = SynthesizeMorphologies(**args) + res = synthesizer.synthesize() + + assert (res["x"] == 200).all() + assert (res["y"] == 200).all() + assert (res["z"] == 200).all() + assert res["name"].tolist() == [ + "e3e70682c2094cac629f6fbed82c07cd", + None, + "216363698b529b4a97b750923ceb3ffd", + None, + "14a03569d26b949692e5dfe8cb1855fe", + None, + "4462ebfc5f915ef09cfbac6e7687a66e", + None, + ] + if with_sections and with_trunks: + assert_allclose(res["apical_points"][0], [[16.126877, 160.5279, 22.8638]]) + assert res["apical_points"][1] is None + + assert_allclose(res["apical_points"][3], [[5.216095, 114.55658, -12.494751]]) + assert res["apical_points"][4] is None + + assert_allclose(res["apical_points"][6], [[4.36911, 55.535736, -6.2556915]]) + assert res["apical_points"][7] is None + + assert_allclose(res["apical_points"][9], [[-11.084274, 111.49124, 0.98043823]]) + assert res["apical_points"][10] is None + + if with_sections and not with_trunks: + assert_allclose(res["apical_points"][0], [[-0.29234314, 58.81488, 0.5384369]]) + assert res["apical_points"][1] is None + + assert_allclose(res["apical_points"][3], [[8.230438, 116.570435, -7.345169]]) + assert res["apical_points"][4] is None + + assert_allclose(res["apical_points"][6], [[11.181992, 96.11371, -12.706863]]) + assert res["apical_points"][7] is None + + assert_allclose(res["apical_points"][9], [[3.5267944, 164.42618, 1.2018433]]) + assert res["apical_points"][10] is None + + if not with_sections and with_trunks: + assert_allclose(res["apical_points"][0], [[8.792313, 131.91104, -4.2198334]]) + assert res["apical_points"][1] is None + + assert_allclose(res["apical_points"][3], [[2.0048676, 116.672, -5.334656]]) + assert res["apical_points"][4] is None + + assert_allclose(res["apical_points"][6], [[-2.347641, 58.229614, -0.56985474]]) + assert res["apical_points"][7] is None + + assert_allclose(res["apical_points"][9], [[6.861679, 112.31445, -13.528854]]) + assert res["apical_points"][10] is None + + # Check that the morphologies were not written + res_files = tmpdir.listdir() + assert len(res_files) == 5 + assert sorted(i.basename for i in res_files) == [ + "apical.yaml", + "apical_NRN_sections.yaml", + "axon_morphs.tsv", + "input_cells.mvd3", + "test_cells.mvd3", + ] + + def run_with_mpi(): """Test morphology synthesis with MPI.""" # pylint: disable=import-outside-toplevel, too-many-locals, import-error @@ -316,6 +424,7 @@ def run_with_mpi(): tmp_folder / "axon_morphs.tsv", "apical_NRN_sections.yaml", min_depth=25, + region_structure=DATA / "region_structure.yaml", ) setup_logger("debug", prefix=f"Rank = {rank} - ") @@ -346,12 +455,83 @@ def run_with_mpi(): print(f"============= #{rank}: Checking results") expected_size = 18 assert len(list(iter_morphology_files(tmp_folder))) == expected_size + check_yaml(DATA / "apical.yaml", args["out_apical"]) + check_yaml(DATA / "apical_NRN_sections.yaml", args["out_apical_nrn_sections"]) + finally: + # Clean the directory + print(f"============= #{rank}: Cleaning") + shutil.rmtree(tmp_folder) + + +def run_with_mpi_boundary(): + """Test morphology synthesis with MPI.""" + # pylint: disable=import-outside-toplevel, too-many-locals, import-error + from data_factories import generate_axon_morph_tsv + from data_factories import generate_cell_collection + from data_factories import generate_cells_df + from data_factories import generate_input_cells + from data_factories import generate_mesh + from data_factories import generate_region_structure_boundary + from data_factories import generate_small_O1 + from data_factories import input_cells_path + from mpi4py import MPI + + from region_grower.utils import setup_logger - check_yaml(DATA / ("apical.yaml"), args["out_apical"]) - check_yaml( - DATA / ("apical_NRN_sections.yaml"), - args["out_apical_nrn_sections"], + COMM = MPI.COMM_WORLD # pylint: disable=c-extension-no-member + rank = COMM.Get_rank() + MASTER_RANK = 0 + is_master = rank == MASTER_RANK + + tmp_folder = Path("/tmp/test-run-synthesis_" + str(uuid4())) + tmp_folder = COMM.bcast(tmp_folder, root=MASTER_RANK) + input_cells = input_cells_path(tmp_folder) + small_O1_path = str(tmp_folder / "atlas") + region_structure_path = str(tmp_folder / "region_structure_boundary.yaml") + args = create_args( + True, + tmp_folder, + input_cells, + small_O1_path, + tmp_folder / "axon_morphs.tsv", + "apical_NRN_sections.yaml", + min_depth=25, + region_structure=region_structure_path, + ) + + setup_logger("info") + logging.getLogger("distributed").setLevel(logging.ERROR) + + if is_master: + tmp_folder.mkdir(exist_ok=True) + print(f"============= #{rank}: Create data") + cells_raw_data = generate_cells_df() + cell_collection = generate_cell_collection(cells_raw_data) + generate_input_cells(cell_collection, tmp_folder) + generate_small_O1(small_O1_path) + atlas = {"atlas": small_O1_path, "structure": DATA / "region_structure.yaml"} + generate_mesh(atlas, tmp_folder / "mesh.obj") + region_structure_path = generate_region_structure_boundary( + DATA / "region_structure.yaml", region_structure_path, str(tmp_folder / "mesh.obj") ) + + generate_axon_morph_tsv(tmp_folder) + for dest in range(1, COMM.Get_size()): + req = COMM.isend("done", dest=dest) + else: + req = COMM.irecv(source=0) + req.wait() + + synthesizer = SynthesizeMorphologies(**args) + try: + print(f"============= #{rank}: Start synthesize") + synthesizer.synthesize() + + # Check results + print(f"============= #{rank}: Checking results") + expected_size = 12 + assert len(list(iter_morphology_files(tmp_folder))) == expected_size + check_yaml(DATA / "apical_boundary.yaml", args["out_apical"]) finally: # Clean the directory print(f"============= #{rank}: Cleaning") @@ -537,6 +717,7 @@ def test_inconsistent_params( axon_morph_tsv, "apical_NRN_sections.yaml", min_depth, + DATA / "region_structure.yaml", ) args["out_morph_ext"] = ["h5"] @@ -569,6 +750,7 @@ def test_inconsistent_context( axon_morph_tsv, "apical_NRN_sections.yaml", min_depth, + DATA / "region_structure.yaml", ) args["nb_processes"] = 0 @@ -603,4 +785,7 @@ def test_inconsistent_context( if __name__ == "__main__": # pragma: no cover - run_with_mpi() + if sys.argv[-1] == "boundary": + run_with_mpi_boundary() + else: + run_with_mpi() diff --git a/tox.ini b/tox.ini index 5227512..ced097a 100644 --- a/tox.ini +++ b/tox.ini @@ -49,7 +49,9 @@ deps = pytest allowlist_externals = mpiexec -commands = mpiexec -n 4 {envbindir}/python tests/test_synthesize_morphologies.py +commands = + mpiexec -n 4 {envbindir}/python tests/test_synthesize_morphologies.py + mpiexec -n 4 {envbindir}/python tests/test_synthesize_morphologies.py boundary [testenv:coverage] skip_install = true @@ -81,7 +83,7 @@ commands = basepython = python3.8 [testenv:lint] -basepython = python3.8 +basepython = python3 deps = pre-commit pylint @@ -90,7 +92,7 @@ commands = pylint -j {env:PYLINT_NPROCS:1} {[base]files} [testenv:format] -basepython = python3.8 +basepython = python3 skip_install = true deps = codespell