Skip to content

Commit

Permalink
Insitu validation (collage + extents plots)
Browse files Browse the repository at this point in the history
Change-Id: I775dc14789b76d79a688f57c8eb24f69d3c6f1ca
  • Loading branch information
arnaudon committed Jul 7, 2021
1 parent 06257d7 commit d3e611d
Show file tree
Hide file tree
Showing 16 changed files with 756 additions and 249 deletions.
1 change: 1 addition & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ disable=
no-member,
too-many-ancestors,
too-many-lines,
unsubscriptable-object,

[FORMAT]
# Maximum number of characters on a single line.
Expand Down
5 changes: 3 additions & 2 deletions requirements/base.pip
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
atlas_analysis>0.0.1
brainbuilder>=0.14
bluepy>=2.3,<3
bluepymm
bluepyparallel
brainbuilder>=0.14
diameter_synthesis>=0.2.4
gitpython
jinja2
Expand All @@ -19,7 +20,7 @@ PyYAML
region_grower>=0.2.2
scipy
seaborn
tns>=2.4
tmd
tns>=2.4
tqdm
voxcell>=3,<4
48 changes: 43 additions & 5 deletions src/synthesis_workflow/circuit.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
import numpy as np

from atlas_analysis.planes.planes import create_planes as _create_planes
from atlas_analysis.planes.planes import create_centerline as _create_centerline
from atlas_analysis.planes.planes import _smoothing
from brainbuilder.app.cells import _place as place

L = logging.getLogger(__name__)
Expand Down Expand Up @@ -186,6 +184,44 @@ def slice_circuit(input_mvd3, output_mvd3, slicer):
return sliced_cells


def _get_principal_direction(points):
"""Return the principal direction of a point cloud.
It is the eigen vector of the covariance matrix with the highest eigen value.
Taken from neuror.unravel.
"""
X = np.copy(np.asarray(points))
X -= np.mean(X, axis=0)
C = np.dot(X.T, X)
w, v = np.linalg.eig(C)
return v[:, w.argmax()]


def get_centerline_bounds(atlas, layer, region=None, central_layer=5, hemisphere=None):
"""Find centerline bounds using PCA of the voxell position of a given layer in the region."""
if region is not None:
mask = atlas.get_region_mask(region)
mask.raw = np.array(mask.raw, dtype=int)
layer.raw = layer.raw * mask.raw

_ids = np.where(layer.raw == central_layer)
ids = np.column_stack(_ids)
points = layer.indices_to_positions(ids)

ids = np.array(ids)
points = np.array(points)
if hemisphere is not None:
if hemisphere == "left":
point_mask = points[:, 2] < points[:, 2].mean()
if hemisphere == "right":
point_mask = points[:, 2] > points[:, 2].mean()
ids = ids[point_mask]
points = points[point_mask]
direction = _get_principal_direction(points)
_align = points.dot(direction)
return ids[_align.argmin()], ids[_align.argmax()]


def create_planes(
layer_annotation,
plane_type="aligned",
Expand Down Expand Up @@ -217,10 +253,12 @@ def create_planes(
centerline_axis (str): (for plane_type = aligned) axis along which to create planes
"""
if plane_type == "centerline":
centerline = _create_centerline(
layer_annotation, [centerline_first_bound, centerline_last_bound]
centerline = np.array(
[
layer_annotation.indices_to_positions(centerline_first_bound),
layer_annotation.indices_to_positions(centerline_last_bound),
]
)
centerline = _smoothing(centerline)

elif plane_type == "aligned":
_n_points = 10
Expand Down
1 change: 1 addition & 0 deletions src/synthesis_workflow/extra/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Extra functions for valiation, etc..."""
276 changes: 276 additions & 0 deletions src/synthesis_workflow/extra/insitu_validation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,276 @@
"""In-situ validation functions for synthesis in atlas."""
from pathlib import Path
from tqdm import tqdm
import numpy as np
import pandas as pd

import matplotlib.pyplot as plt
import matplotlib
from matplotlib.backends.backend_pdf import PdfPages
import seaborn as sns

from morphio import Morphology, SectionType
import neurom as nm
from voxcell import CellCollection
from voxcell.nexus.voxelbrain import Atlas
from region_grower.atlas_helper import AtlasHelper
from bluepyparallel import evaluate, init_parallel_factory

from synthesis_workflow.tools import get_layer_tags

matplotlib.use("Agg")


def _extent_function(row):
"""Inner function to compute the max extent of dendrites."""
neuron = nm.load_neuron(row["path"])
a = nm.get("max_radial_distance", neuron, neurite_type=nm.NeuriteType.basal_dendrite)
b = nm.get("max_radial_distance", neuron, neurite_type=nm.NeuriteType.apical_dendrite)
return {"extent": max(a, b)}


def compute_extents(sonata_path, morphology_path, ext=".h5", parallel_factory="multiprocessing"):
"""Compute the extents (max radial distance) of all morphologies.
Args:
sonata_path (str): path to a sonata file with morphologies
morphology_path (str): base path to the morphologies
ext (str): extension of the morphologies
parallel_factory (str): name of parallel factory to use (see BluePyParallel)
Returns:
pandas.DataFrame: dataframe from sonata with column 'extent' containing the computed values
"""
df = CellCollection.load_sonata(sonata_path).as_dataframe()

df["path"] = morphology_path + df["morphology"] + ext
_df = df[["path", "mtype", "etype"]]

factory = init_parallel_factory(parallel_factory)
df["extent"] = evaluate(
_df, _extent_function, new_columns=[["extent", 0]], parallel_factory=factory
)["extent"]
return df


def plot_extents(results_df, pdf_name="extents.pdf", bins=200):
"""Plot histograms of extents of morphologies.
Args:
results_df (pandas.Dataframe): results from compute_extents
pdf_name (str): name of pdf to save
bins (int): number of bins for histograms
"""
with PdfPages(pdf_name) as pdf:
for mtype, _df in tqdm(results_df.groupby("mtype")):
plt.figure()
plt.hist(_df["extent"].to_list(), bins=bins)
plt.suptitle(f"mtype: {mtype}")
plt.xlabel("extent (microns)")
plt.ylabel("number of morphologies")
pdf.savefig()


def get_layer_morpho_counts(
sonata_path,
morphology_path,
atlas_path,
n_cells=100,
section_type="basal_dendrite",
region=None,
hemisphere="right",
ext=".h5",
):
"""For each layer, compute the fraction of crossing morphologies.
Args:
sonata_path (str): path to sonata file
morphology_path (str): base path to morphologies
atlas_path (str): path to atlas folder
n_cells (int): number of morphologies to use to compute the fraction
section_type (str): section type (attr of morphio.SectionType)
region (str): is not None, must be the name of a region to filter morphologies
hemisphere (str): left/right hemisphere
ext (str): extension of morphology files
Returns:
pandas.DataFrame: dafaframe with the counts, mtype and layers information
"""
section_type = getattr(SectionType, section_type)
cells = CellCollection.load(sonata_path).as_dataframe()
morph_path = Path(morphology_path)

layer_annotation, _ = get_layer_tags(atlas_path)

dfs = []
for mtype in tqdm(cells.mtype.unique()):
_cells = cells[cells.mtype == mtype]
if region is not None:
_cells = _cells[_cells.region == f"{region}@{hemisphere}"]
_n_cells = min(n_cells, len(_cells.index))
_cells = _cells.sample(_n_cells)

_layer_count = np.zeros(7)
for gid in _cells.index:
morph = Morphology((morph_path / _cells.loc[gid, "morphology"]).with_suffix(ext))
points = []
for section in morph.iter():
if section.type == section_type:
points += (
section.points + _cells.loc[gid, ["x", "y", "z"]].to_numpy()[np.newaxis]
).tolist()

if len(points) > 0:
layers = layer_annotation.lookup(points, outer_value=0)
_layers, _counts = np.unique(layers, return_counts=True)
_layer_count[_layers] += 1
_df = pd.DataFrame()
_df["count"] = _layer_count / _n_cells
_df["layer"] = _df.index
_df["mtype"] = mtype
dfs.append(_df)
return pd.concat(dfs)


def plot_layer_morph_counts(counts_df, pdf_name="layer_count_comparison.pdf"):
"""Plot count of crossing of morphologies with layers.
If 'label' column is present in counts_df, the barplots will split the data for comparison.
For example:
df1 = get_layer_morpho_counts(...)
df1['label'] = 'dataset1'
df2 = get_layer_morpho_counts(...)
df2['label'] = 'dataset2'
plot_layer_morph_counts(pd.concat([df1, df2]))
Args:
counts_df (pandas.DataFrame): results of get_layer_morpho_counts,
pdf_name (str): name of pdf to save
"""
with PdfPages(pdf_name) as pdf:
for mtype in counts_df.mtype.unique():
df = counts_df[counts_df.mtype == mtype]
if len(df[df["count"] > 0]) > 0:

plt.figure(figsize=(5, 4))
if "label" in df.columns:
sns.barplot(x="layer", hue="label", y="count", data=df)
else:
sns.barplot(x="layer", y="count", data=df)
plt.axhline(1.0)
plt.suptitle(mtype)
pdf.savefig()
plt.close()


def get_depth(atlas, pos, layers, lower=0, upper=1000, precision=0.1):
"""Estimate the depth from 'pos' to pia using layer annotation from atlas."""
orientation = atlas.orientations.lookup(pos)[0].dot([0, 1, 0])
voxel_dim = precision * layers.voxel_dimensions.max()

def _get_layer(_pos):
return layers.lookup(pos + _pos * orientation, outer_value=0)

def _search(lower, upper, search_depth):
mid = 0.5 * (lower + upper)
layer = _get_layer(mid)

if upper - lower < voxel_dim:
return np.linalg.norm(mid * orientation)

if layer >= 1:
return _search(mid, upper, search_depth + 1)
if layer == 0:
return _search(lower, mid, search_depth + 1)
return None

upper_layer = _get_layer(upper)
if upper_layer > 0:
return get_depth(atlas, pos, layers, lower=lower, upper=2 * upper)
return _search(lower, upper, 0)


# pylint: disable=too-many-arguments,too-many-locals
def plot_layer_collage(
sonata_path,
morphology_path,
atlas_path,
n_cells=10,
section_types=None,
region=None,
hemisphere="right",
pdf_name="layer_collage.pdf",
shift=200,
dpi=200,
ext=".h5",
):
"""Plot morphologies next to each other with points colored by layer.
Args:
sonata_path (str): path to sonata file
morphology_path (str): base path to morphologies
atlas_path (str): path to atlas folder
n_cells (int): number of morphologies to use to compute the fraction
section_types (list): list of section types (attr of morphio.SectionType),
if None, basal_dendrite, apical_dendrite will be used
region (str): is not None, must be the name of a region to filter morphologies
hemisphere (str): left/right hemisphere
pdf_name (str): name of pdf to save
shift (float): x-shift between morphologies
dpi (int): dpi to save rasterized pdf
ext (str): extension of morphology files
"""
if section_types is None:
section_types = ["basal_dendrite", "apical_dendrite"]
section_types = [getattr(SectionType, section_type) for section_type in section_types]

cells = CellCollection.load(sonata_path).as_dataframe()
morph_path = Path(morphology_path)
layer_annotation, _ = get_layer_tags(atlas_path)
atlas = AtlasHelper(Atlas().open(atlas_path))

cmap = matplotlib.colors.ListedColormap(["C0", "C1", "C2", "C3", "C4", "C5", "C6"])
with PdfPages(pdf_name) as pdf:
for mtype in tqdm(cells.mtype.unique()):
_cells = cells[cells.mtype == mtype]
if region is not None:
_cells = _cells[_cells.region == f"{region}@{hemisphere}"]
_n_cells = min(n_cells, len(_cells.index))
_cells = _cells.sample(_n_cells)

plt.figure(figsize=(len(_cells.index) * 2, 3))
for i, gid in enumerate(_cells.index):
morph = Morphology((morph_path / _cells.loc[gid, "morphology"]).with_suffix(ext))
points = []
for section in morph.iter():
if section.type in section_types:
points += section.points.tolist()

pos = _cells.loc[gid, ["x", "y", "z"]].to_numpy()
points = np.array(points)
_points = points + pos[np.newaxis]
layers = layer_annotation.lookup(_points, outer_value=0)

depth = atlas.depths.lookup(pos)

rotation = atlas.orientations.lookup(pos)[0]

points = points.dot(rotation.T)
points += np.array([shift * i, 0, 0])[np.newaxis]
scat = plt.scatter(
*points[:, :2].T, c=layers, s=0.1, cmap=cmap, vmin=-0.5, vmax=6.5
)
plt.scatter(shift * i, 0, c="k", s=5)
plt.plot([shift * i - shift / 2, shift * i + shift / 2], [depth, depth], "-k")
new_depth = get_depth(atlas, pos, layer_annotation)
plt.plot(
[shift * i - shift / 2, shift * i + shift / 2], [new_depth, new_depth], "-r"
)

plt.colorbar(scat, label="layers (0=outside)")
plt.suptitle(mtype)
plt.axis("equal")
plt.gca().set_rasterized(True)
pdf.savefig(bbox_inches="tight", dpi=dpi)
plt.close()
Loading

0 comments on commit d3e611d

Please sign in to comment.