Skip to content

Commit

Permalink
several things in that commit:
Browse files Browse the repository at this point in the history
- remove the rescale related functions/tasks
- improved the collage
- improve atlas slicing
- added example with typical luigi.cfg files
- add morphology release loader in tools.py
- rework default parameter names
- remove the dependence on in_pc_mtype file from distribution/parameters

Change-Id: Ic6be88f4da8a90b4b1f4b4f786b4d827093fb6fa
  • Loading branch information
arnaudon authored and adrien-berchet committed Oct 19, 2020
1 parent 25733bd commit 2b5a492
Show file tree
Hide file tree
Showing 18 changed files with 730 additions and 463 deletions.
46 changes: 46 additions & 0 deletions examples/luigi_cfg/luigi_vacuum.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# global parameters

[SynthesisConfig]
tmd_parameters_path = out/tmd_parameters.json
tmd_distributions_path = out/tmd_distributions.json

[PathConfig]
morphs_df_path = synthesis_configs/morphs_df.csv
synth_morphs_df_path = out/synth_morphs_df.csv
synth_output_path = out/synthesized_morphologies
substituted_morphs_df_path = out/substituted_morphs_df.csv

# synthesis setup
[ApplySubstitutionRules]
substitution_rules_path = synthesis_configs/substitution_rules.yaml

[BuildSynthesisParameters]
tmd_parameters_path = out/tmd_parameters_no_scaling.json
input_tmd_parameters_path = synthesis_configs/tmd_specific_parameters.json
morphology_path = repaired_morphology_path_h5

[BuildSynthesisDistributions]
morphology_path = repaired_morphology_path_h5

# synthesize in vacuum
[VacuumSynthesize]
vacuum_synth_morphology_path = out/vacuum_synth_morphologies
vacuum_synth_morphs_df_path = out/vacuum_synth_morphs_df.csv
n_cells = 10
mtypes = ["all"]

# validation plots
[ValidateVacuumSynthesis]
with_morphometrics = False
with_density_profiles = False
with_vacuum_morphologies= True

[PlotVacuumMorphologies]
pdf_filename = figures/vacuum_morphologies.pdf

[PlotMorphometrics]
morphometrics_path = figures/morphometrics

[PlotDensityProfiles]
density_profiles_path = figures/density_profiles.pdf

2 changes: 2 additions & 0 deletions synthesis_workflow/morphology_combos/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
"""Module to create and modify morphologie combos with dataframes."""
from .create_me_combos import *
170 changes: 170 additions & 0 deletions synthesis_workflow/morphology_combos/create_me_combos.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
"""Functions to create and mofify combos dataframe."""
from pathlib import Path
import json
import logging
import pandas as pd

from bluepymm.prepare_combos.parse_files import read_mm_recipe

L = logging.getLogger(__name__)


def get_me_types_map(recipe, emodel_etype_map):
"""Use recipe data and bluepymm to get mtype/etype combos."""
me_types_map = pd.DataFrame()
for i in recipe.index:
combo = recipe.loc[i]
for emodel, emap in emodel_etype_map.items():
if combo.layer in emap["layer"] and combo.etype == emap["etype"]:
if "mtype" in emap:
if emap["mtype"] == combo.fullmtype:
combo["emodel"] = emodel
me_types_map = me_types_map.append(combo.copy())
else:
combo["emodel"] = emodel
me_types_map = me_types_map.append(combo.copy())

return me_types_map.rename(columns={"fullmtype": "mtype"}).reset_index()


def create_morphs_combos_df(
morphs_df,
recipe_path=None,
emodel_etype_map_path=None,
emodels=None,
me_types_map=None,
):
"""From the morphs_df, create a dataframe with all possible combos."""
if (
me_types_map is None
and emodel_etype_map_path is not None
and recipe_path is not None
):
recipe = read_mm_recipe(recipe_path)
emodel_etype_map = json.load(open(emodel_etype_map_path, "rb"))
me_types_map = get_me_types_map(recipe, emodel_etype_map)

morphs_combos_df = pd.DataFrame()
for combo_id in me_types_map.index:
if emodels is not None:
if me_types_map.loc[combo_id, "emodel"] not in emodels:
continue
combo = morphs_df[morphs_df.mtype == me_types_map.loc[combo_id, "mtype"]]
combo = combo.assign(etype=me_types_map.loc[combo_id, "etype"])
combo = combo.assign(emodel=me_types_map.loc[combo_id, "emodel"])
morphs_combos_df = morphs_combos_df.append(combo.copy())

morphs_combos_df = (
morphs_combos_df.drop_duplicates()
.reset_index()
.rename(columns={"index": "morph_gid"})
)
return morphs_combos_df


def _base_emodel(emodel):
return "_".join(emodel.split("_")[:2])


def add_for_optimisation_flag(
config_path, morphs_combos_df=None, morphs_df=None, emodels=None
):
"""Add for_optimisation flag for combos used for optimisation."""
if morphs_df is None and morphs_combos_df is None:
raise Exception("Please provide at least one dataframe.")

if morphs_combos_df is not None:
emodels = list(set(morphs_combos_df.emodel))
morphs_combos_df["for_optimisation"] = False
for emodel in emodels:
recipe = json.load(
open(
config_path / _base_emodel(emodel) / "config/recipes/recipes.json",
"rb",
)
)[_base_emodel(emodel)]
opt_mask = (morphs_combos_df.emodel == emodel) & (
morphs_combos_df.name == Path(recipe["morphology"][0][1]).stem
)
morphs_combos_df.loc[opt_mask, "for_optimisation"] = True
if len(morphs_combos_df[opt_mask]) == 0:

new_combo = morphs_combos_df[
(morphs_combos_df.name == Path(recipe["morphology"][0][1]).stem)
& (morphs_combos_df.for_optimisation == 1)
]
if len(new_combo) > 0:
new_combo = new_combo.iloc[0]
L.warning("Duplicate optimisation cell for emodel %s", emodel)
else:
L.warning("Error, no cell for %s", emodel)

new_combo["emodel"] = emodel
new_combo["etype"] = emodel.split("_")[0]
morphs_combos_df = morphs_combos_df.append(new_combo.copy())

if morphs_df is not None:
morphs_df["for_optimisation"] = False
if emodels is None and morphs_combos_df is None:
raise Exception("Please provide a list of emodels for your cells")
for emodel in emodels:
recipe = json.load(
open(
config_path / _base_emodel(emodel) / "config/recipes/recipes.json",
"rb",
)
)[_base_emodel(emodel)]
morphs_df.loc[
(morphs_df.name == Path(recipe["morphology"][0][1]).stem),
"for_optimisation",
] = True
return morphs_combos_df, morphs_df


def add_for_optimisation_flag_old(
config_path, morphs_combos_df=None, morphs_df=None, emodels=None
):
"""Add for_optimisation flag for combos used for optimisation."""
if morphs_df is None and morphs_combos_df is None:
raise Exception("Please provide at least one dataframe.")

if morphs_combos_df is not None:
emodels = list(set(morphs_combos_df.emodel))
morphs_combos_df["for_optimisation"] = False
for emodel in emodels:
recipe = json.load(
open(config_path / emodel / "recipes/recipes.json", "rb")
)[emodel]
morphs_combos_df.loc[
(morphs_combos_df.emodel == emodel)
& (morphs_combos_df.name == Path(recipe["morphology"][0][1]).stem),
"for_optimisation",
] = True
if (
len(
morphs_combos_df.loc[
(morphs_combos_df.emodel == emodel)
& (
morphs_combos_df.name
== Path(recipe["morphology"][0][1]).stem
)
]
)
== 0
):
L.warning(
"Could not find a cell for optimisation for emodel %s", emodel
)

if morphs_df is not None:
morphs_df["for_optimisation"] = False
if emodels is None and morphs_combos_df is None:
raise Exception("Please provide a list of emodels for your cells")
for emodel in emodels:
recipe = json.load(
open(config_path / emodel / "recipes/recipes.json", "rb")
)[emodel]
morphs_df.loc[
(morphs_df.name == Path(recipe["morphology"][0][1]).stem),
"for_optimisation",
] = True
46 changes: 6 additions & 40 deletions synthesis_workflow/synthesis.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
"""Functions for synthesis to be used by luigi tasks."""
import json
import logging
import os
import re
from collections import defaultdict
from functools import partial
from pathlib import Path

Expand Down Expand Up @@ -40,13 +38,12 @@
matplotlib.use("Agg")


def get_neurite_types(pc_in_types_path, mtypes):
def get_neurite_types(morphs_df, mtypes):
"""Get the neurite types to consider for PC or IN cells."""
with open(pc_in_types_path, "rb") as pc_in_file:
pc_in_files = yaml.full_load(pc_in_file)

return {
mtype: ["basal"] if pc_in_files[mtype] == "IN" else ["basal", "apical"]
mtype: ["basal"]
if morphs_df.loc[morphs_df.mtype == mtype, "morph_class"].tolist()[0] == "IN"
else ["basal", "apical"]
for mtype in mtypes
}

Expand Down Expand Up @@ -129,7 +126,7 @@ def build_distributions(

tmd_distributions = {
"mtypes": {},
"metadata": {"cortical_thickness": json.loads(cortical_thickness)},
"metadata": {"cortical_thickness": cortical_thickness},
}
for mtype, distribution in Parallel(nb_jobs, verbose=joblib_verbose)(
delayed(build_distributions_single_mtype)(mtype) for mtype in mtypes
Expand Down Expand Up @@ -309,38 +306,6 @@ def run_synthesize_morphologies(kwargs, nb_jobs=-1):
run_master(SynthesizeMorphologiesMaster, kwargs, parser_args, defaults, nb_jobs)


def get_mean_neurite_lengths(
morphs_df,
neurite_type="apical",
mtypes=None,
morphology_path="morphology_path",
percentile=None,
):
"""Extract the mean radial neurite lengths of a population, by mtypes."""
if mtypes is None:
mtypes = ["all"]
if mtypes[0] != "all":
morphs_df = morphs_df[morphs_df.mtype.isin(mtypes)]

# Choose mean or percentile function
def _percentile(q, a, *args, **kwargs):
return np.percentile(a, q, *args, **kwargs)

if percentile is None:
f = np.mean
else:
f = partial(_percentile, float(percentile))

apical_lengths = defaultdict(list)
for gid in tqdm(morphs_df.index):
neuron = Morphology(morphs_df.loc[gid, morphology_path])
for neurite in neuron.root_sections:
if neurite.type == STR_TO_TYPES[neurite_type]:
apical_lengths[morphs_df.loc[gid, "mtype"]].append(get_max_len(neurite))

return {mtype: float(f(lengths)) for mtype, lengths in apical_lengths.items()}


def get_target_length(soma_layer, target_layer, cortical_thicknesses):
"""Compute the target length of a neurite from soma and target layer."""
cortical_depths = np.insert(np.cumsum(cortical_thicknesses), 0, 0.0)
Expand Down Expand Up @@ -537,6 +502,7 @@ def _process_scaling_rule(
file_lists = [
(mtype, morphs_df.loc[morphs_df.mtype == mtype, morphology_path].to_list())
for mtype in scaling_rules.keys()
if mtype != "default"
]

# Fit data and update TMD parameters
Expand Down
Loading

0 comments on commit 2b5a492

Please sign in to comment.