Skip to content

Commit

Permalink
Make choose_morphologies export scores
Browse files Browse the repository at this point in the history
Change-Id: I7b74c2bc8fab5daa183907a8487ff3b8e920a740
  • Loading branch information
adrien-berchet authored and arnaudon committed Apr 20, 2021
1 parent 16af2e3 commit 608b92f
Show file tree
Hide file tree
Showing 25 changed files with 1,415 additions and 1,081 deletions.
33 changes: 17 additions & 16 deletions doc/source/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Configuration file for the Sphinx documentation builder.
"""Configuration file for the Sphinx documentation builder."""
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
Expand All @@ -10,12 +10,20 @@
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# pylint: disable=protected-access

import importlib
import re
from pkg_resources import get_distribution

import luigi

import morphval
import synthesis_workflow
import synthesis_workflow.tasks
from synthesis_workflow.tasks.cli import _PARAM_NO_VALUE
from synthesis_workflow.tasks.cli import _process_param


# -- Project information -----------------------------------------------------

Expand Down Expand Up @@ -109,17 +117,7 @@
"luigi": ("https://luigi.readthedocs.io/en/stable", None),
}


import importlib
import luigi
import re

import morphval
import synthesis_workflow
import synthesis_workflow.tasks
from synthesis_workflow.tasks.cli import _PARAM_NO_VALUE
from synthesis_workflow.tasks.cli import _process_param

# Auto-API customization

SKIP = [
r".*\.L$",
Expand All @@ -136,7 +134,9 @@
}


# pylint: disable=unused-argument
def maybe_skip_member(app, what, name, obj, skip, options):
"""Skip and update documented objects."""
skip = None
for pattern in SKIP:
if re.match(pattern, name) is not None:
Expand Down Expand Up @@ -167,11 +167,12 @@ def maybe_skip_member(app, what, name, obj, skip, options):
):
help_str += f"\n\n:default value: {actual_obj._default}"
obj.docstring = help_str
except:
except Exception: # pylint: disable=broad-except
pass

return skip


def setup(app):
"""Sphinx events setup."""
app.connect("autoapi-skip-member", maybe_skip_member)
8 changes: 4 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,14 @@
"morph_validator",
"morphio>=2.7",
"neuroc",
"neurom!=2.0.1.dev4,!=1.7.0",
"neurom>=2.0.1",
"pandas",
"placement_algorithm>=2.1.1",
"placement_algorithm>=2.1.2",
"PyYAML",
"region_grower>=0.1.10",
"region_grower==0.1.11",
"scipy",
"seaborn",
"tns>=2.2.7,<2.3",
"tns>=2.3.3",
"tmd",
"tqdm",
"voxcell>=3",
Expand Down
7 changes: 5 additions & 2 deletions src/morphval/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,12 +127,15 @@ def get_components_population(population, component):
return population

def filtered_neurites(n):
return list(iter_neurites(n, filt=is_type(COMP_MAP[component])))
return iter_neurites(n, filt=is_type(COMP_MAP[component]))

morphs = []
for n in population:
nrn = deepcopy(n)
nrn.neurites = filtered_neurites(n)
for neurite in filtered_neurites(n):
if neurite not in nrn.neurites:
nrn.delete_section(neurite, recursive=True)

nrn.name = n.name + "_" + component
morphs.append(nrn)

Expand Down
27 changes: 27 additions & 0 deletions src/synthesis_workflow/synthesis.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,9 @@ def run_choose_morphologies(kwargs, nb_jobs=-1):
"seed",
"output",
"no-mpi",
"scores-output-path",
"bias-kind",
"no-optional-scores",
]
]

Expand All @@ -188,6 +191,9 @@ def run_choose_morphologies(kwargs, nb_jobs=-1):
"scales": None,
"seed": 0,
"segment-type": None,
"scores-output-path": None,
"bias-kind": "linear",
"no-optional-scores": False,
}

# Set logging arguments
Expand Down Expand Up @@ -216,6 +222,9 @@ def create_axon_morphologies_tsv(
scales=None,
seed=0,
axon_morphs_path="axon_morphs.tsv",
scores_output_path=None,
bias_kind="linear",
with_optional_scores=True,
nb_jobs=-1,
):
"""Create required axon_morphology tsv file for placement-algorithm to graft axons.
Expand All @@ -231,6 +240,11 @@ def create_axon_morphologies_tsv(
scales (list(float)): Scale(s) to check
seed (int): Random number generator seed
axon_morphs_path (str): Name of the axon morphology list in .tsv format
scores_output_path (str): Make ``placement_algorithm.app.choose_morphologies`` export scores
into files in this folder
bias_kind (str): Kind of bias used to penalize scores of rescaled morphologies
(can be "linear" or "gaussian")
with_optional_scores (bool): Use or ignore optional rules for morphology choice
nb_jobs (int): Number of jobs
"""
check_placement_params = {
Expand Down Expand Up @@ -260,6 +274,9 @@ def create_axon_morphologies_tsv(
"scales": scales,
"seed": seed,
"output": axon_morphs_path,
"scores-output-path": scores_output_path,
"bias-kind": bias_kind,
"no-optional-scores": not bool(with_optional_scores),
"no-mpi": True,
}

Expand Down Expand Up @@ -308,12 +325,15 @@ def run_synthesize_morphologies(kwargs, nb_jobs=-1, debug_scales_path=None):
"atlas-cache",
"seed",
"out-mvd3",
"out-cells-path",
"out-apical",
"out-morph-dir",
"out-morph-ext",
"max-files-per-dir",
"overwrite",
"max-drop-ratio",
"scaling-jitter-std",
"rotational-jitter-std",
"no-mpi",
]
]
Expand All @@ -326,11 +346,18 @@ def run_synthesize_morphologies(kwargs, nb_jobs=-1, debug_scales_path=None):
"max_files_per_dir": None,
"morph_axon": None,
"mvd3": None,
"out_mvd3": None,
"out_morph_dir": "out",
"out_morph_ext": ["swc"],
"scaling_jitter_std": None,
"rotational_jitter_std": None,
"seed": 0,
}

if kwargs.pop("apply_jitter", False):
kwargs["scaling_jitter_std"] = 0.2
kwargs["rotational_jitter_std"] = 10

# Set logging arguments
logger_kwargs = None
if debug_scales_path is not None:
Expand Down
2 changes: 1 addition & 1 deletion src/synthesis_workflow/tasks/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
warnings.filterwarnings("ignore", module="neurom.features")
warnings.filterwarnings("ignore", module="scipy")

# Disable matplotlib logger
# Disable some loggers
logging.getLogger("matplotlib").propagate = False
logging.getLogger("numexpr").propagate = False
logging.getLogger("tns").propagate = False
Expand Down
68 changes: 58 additions & 10 deletions src/synthesis_workflow/tasks/synthesis.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,8 @@ class BuildAxonMorphologies(WorkflowTask):
annotations_path = luigi.Parameter(
default=None,
description=(
":str: Path to annotations file used by ``placementAlgorithm.choose_morphologies``. "
":str: Path to annotations file used by "
"``placementAlgorithm.app.choose_morphologies``. "
"If None, random axons will be choosen."
),
)
Expand All @@ -280,18 +281,36 @@ class BuildAxonMorphologies(WorkflowTask):
)
placement_rules_path = luigi.Parameter(
default=None,
description=":str: See ``placementAlgorithm.choose_morphologies``.",
description=":str: See ``placementAlgorithm.app.choose_morphologies``.",
)
placement_alpha = luigi.FloatParameter(
default=1.0,
description=":float: See ``placementAlgorithm.choose_morphologies``.",
description=":float: See ``placementAlgorithm.app.choose_morphologies``.",
)
placement_scales = luigi.ListParameter(
default=None,
description=":list: See ``placementAlgorithm.choose_morphologies``.",
description=":list: See ``placementAlgorithm.app.choose_morphologies``.",
)
placement_seed = luigi.IntParameter(
default=0, description=":int: See ``placementAlgorithm.choose_morphologies``."
default=0,
description=":int: See ``placementAlgorithm.app.choose_morphologies``.",
)
with_scores = BoolParameter(default=False, description=":bool: Export morphology scores.")
filter_axons = BoolParameter(
default=False,
description=(
":bool: Read the neuronDB.xml file, filter cell with use_axon=True and generate a new "
"neurondb.dat that is then read by ``placementAlgorithm.app.choose_morphologies``."
),
)
bias_kind = luigi.ChoiceParameter(
choices=["uniform", "linear", "gaussian"],
default="linear",
description=":str: Kind of bias used to penalize scores of rescaled morphologies.",
)
with_optional_scores = BoolParameter(
default=True,
description=":bool: Use or ignore optional rules for morphology choice.",
)
nb_jobs = luigi.IntParameter(default=20, description=":int: Number of workers.")

Expand Down Expand Up @@ -329,7 +348,15 @@ def run(self):
input_task_target = yield GetSynthesisInputs()
annotations_file = input_task_target.pathlib_path / self.annotations_path
axon_cells = None
neurondb_path = find_case_insensitive_file(self.get_neuron_db_path("dat"))
if self.filter_axons:
neurondb_path = self.output()["axons_neurondb"].path
input_neurondb_path = find_case_insensitive_file(self.get_neuron_db_path("xml"))
morphs_df = load_neurondb_to_dataframe(input_neurondb_path)
morphs_df.drop(morphs_df.loc[~morphs_df["use_axon"]].index, inplace=True)
morphs_df.drop(columns=["use_axon"], inplace=True)
morphs_df.to_csv(neurondb_path, sep=" ", header=False, index=False)
else:
neurondb_path = find_case_insensitive_file(self.get_neuron_db_path("dat"))

if any(
[
Expand All @@ -343,6 +370,12 @@ def run(self):
else:
atlas_path = CircuitConfig().atlas_path

with_scores = self.with_scores
if with_scores:
scores_output_folder = self.output()["scores"].path
else:
scores_output_folder = None

create_axon_morphologies_tsv(
self.input()["circuit"].path,
morphs_df_path=axon_cells,
Expand All @@ -353,13 +386,24 @@ def run(self):
alpha=self.placement_alpha,
scales=self.placement_scales,
seed=self.placement_seed,
axon_morphs_path=self.output().path,
axon_morphs_path=self.output()["morphs"].path,
scores_output_path=scores_output_folder,
bias_kind=self.bias_kind,
with_optional_scores=self.with_optional_scores,
nb_jobs=self.nb_jobs,
)

def output(self):
""""""
return MorphsDfLocalTarget(self.axon_morphs_path)
targets = {
"morphs": MorphsDfLocalTarget(self.axon_morphs_path),
}
if self.with_scores:
scores_output_folder = Path(self.axon_morphs_path).with_suffix("")
targets["scores"] = MorphsDfLocalTarget(scores_output_folder)
if self.filter_axons:
targets["axons_neurondb"] = MorphsDfLocalTarget("axons_neurondb.dat")
return targets


@copy_params(
Expand Down Expand Up @@ -397,6 +441,9 @@ class Synthesize(WorkflowTask):
default=0.1,
description=":float: The maximum drop ratio.",
)
apply_jitter = BoolParameter(
default=False, description=":bool: Apply jitter to all sections of axons."
)
seed = luigi.IntParameter(default=0, description=":int: Pseudo-random generator seed.")

def requires(self):
Expand All @@ -413,7 +460,7 @@ def requires(self):
def run(self):
""""""

axon_morphs_path = self.input()["axons"].path
axon_morphs_path = self.input()["axons"]["morphs"].path
out_mvd3 = self.output()["out_mvd3"]
out_morphologies = self.output()["out_morphologies"]
out_apical_points = self.output()["apical_points"]
Expand Down Expand Up @@ -441,7 +488,7 @@ def run(self):
"tmd_parameters": self.input()["tmd_parameters"].path,
"tmd_distributions": self.input()["tmd_distributions"].path,
"atlas": CircuitConfig().atlas_path,
"out_mvd3": out_mvd3.path,
"out_cells_path": out_mvd3.path,
"out_apical": out_apical_points.path,
"out_morph_ext": [str(self.ext)],
"out_morph_dir": out_morphologies.path,
Expand All @@ -450,6 +497,7 @@ def run(self):
"morph-axon": axon_morphs_path,
"base-morph-dir": axon_morphs_base_dir,
"max_drop_ratio": self.max_drop_ratio,
"apply_jitter": self.apply_jitter,
"seed": self.seed,
}

Expand Down
5 changes: 3 additions & 2 deletions src/synthesis_workflow/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,12 +346,13 @@ def run_master(
task_ids = np.random.permutation(master.task_ids)

# Run the worker
L.info("Using batch size of %d tasks", int(len(task_ids) / cpu_count()))
batch_size = 1 + int(len(task_ids) / (cpu_count() if nb_jobs == -1 else nb_jobs))
L.info("Using batch size of %d tasks", batch_size)
results = Parallel(
n_jobs=nb_jobs,
verbose=verbose,
backend="multiprocessing",
batch_size=1 + int(len(task_ids) / (cpu_count() if nb_jobs == -1 else nb_jobs)),
batch_size=batch_size,
)(delayed(_wrap_worker)(i, worker, logger_kwargs) for i in task_ids)

# Gather the results
Expand Down
Loading

0 comments on commit 608b92f

Please sign in to comment.