Skip to content

Commit

Permalink
Black the code with line length of 100
Browse files Browse the repository at this point in the history
Change-Id: I5c0885459d8996bd22c7c698120052e13c872310
  • Loading branch information
adrien-berchet committed Jan 20, 2021
1 parent c44cb63 commit f269215
Show file tree
Hide file tree
Showing 28 changed files with 141 additions and 385 deletions.
8 changes: 2 additions & 6 deletions doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,16 +146,12 @@ def maybe_skip_member(app, what, name, obj, skip, options):
try:
package, module, *path = name.split(".")
root_package = IMPORT_MAPPING[package]
actual_module = importlib.import_module(
root_package.__name__ + "." + module
)
actual_module = importlib.import_module(root_package.__name__ + "." + module)
task = getattr(actual_module, path[-2])
actual_obj = getattr(task, path[-1])
if isinstance(actual_obj, luigi.Parameter):
if hasattr(actual_obj, "description") and actual_obj.description:
help_str, param_type, choices, interval, optional = _process_param(
actual_obj
)
help_str, param_type, choices, interval, optional = _process_param(actual_obj)
if optional:
help_str = "(optional) " + help_str
if param_type is not None:
Expand Down
4 changes: 1 addition & 3 deletions src/morphval/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ def get_parser():
required=True,
help="full path to directory for the validation results",
)
parser.add_argument(
"-c", "--config", required=True, help="full path to yaml config file"
)
parser.add_argument("-c", "--config", required=True, help="full path to yaml config file")
parser.add_argument(
"--example-config",
action="store_true",
Expand Down
20 changes: 5 additions & 15 deletions src/morphval/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,7 @@ def get_agg_fig():

def center_population(population):
"""Return a new population where all cells have been translated to their soma origins."""
morphs = [
geom.transform.translate(n, -np.asarray(n.soma.center)) for n in population
]
morphs = [geom.transform.translate(n, -np.asarray(n.soma.center)) for n in population]
return Population(morphs, name=population.name)


Expand Down Expand Up @@ -218,18 +216,10 @@ def plot_normalized_neurons(
test_comp_pop = get_components_population(test_population, comp)
xlim, ylim = compute_bounding_box(ref_comp_pop, test_comp_pop)

desc = progress_bar_label(
"Reference {}", pretty_name(comp_dict[comp]), notebook_desc
)
ref_plot_paths[comp] = plot_population(
ref_output_dir, ref_comp_pop, xlim, ylim, desc
)
desc = progress_bar_label(
"Test {}", pretty_name(comp_dict[comp]), notebook_desc
)
test_plot_paths[comp] = plot_population(
test_output_dir, test_comp_pop, xlim, ylim, desc
)
desc = progress_bar_label("Reference {}", pretty_name(comp_dict[comp]), notebook_desc)
ref_plot_paths[comp] = plot_population(ref_output_dir, ref_comp_pop, xlim, ylim, desc)
desc = progress_bar_label("Test {}", pretty_name(comp_dict[comp]), notebook_desc)
test_plot_paths[comp] = plot_population(test_output_dir, test_comp_pop, xlim, ylim, desc)

return ref_plot_paths, test_plot_paths

Expand Down
16 changes: 4 additions & 12 deletions src/morphval/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,7 @@

DICTDATA = dict.fromkeys(["name", "data_type", "data", "labels"])

DICTALLDATA = dict.fromkeys(
["datasets", "description", "charts", "version", "result", "type"]
)
DICTALLDATA = dict.fromkeys(["datasets", "description", "charts", "version", "result", "type"])

DESCR = (
"Morphology validation against reference morphologies. "
Expand All @@ -39,9 +37,7 @@ def extract_hist(data, bins=20):
"""
bin_data, edges = np.histogram(data, bins, normed=True)

edges_centers = [
float(Decimal("%.2f" % e)) for e in list((edges[1:] + edges[:-1]) / 2)
]
edges_centers = [float(Decimal("%.2f" % e)) for e in list((edges[1:] + edges[:-1]) / 2)]

return list(bin_data), list(edges_centers)

Expand Down Expand Up @@ -107,13 +103,9 @@ def write_all(validation_data, reference_data, component, feature, name, config)
config=config, name=name, component=component, feature=feature
)

valid = write_hist(
data=validation_data, feature=feature, name=name + "-test", bins=bins
)
valid = write_hist(data=validation_data, feature=feature, name=name + "-test", bins=bins)

refer = write_hist(
data=reference_data, feature=feature, name=name + "-reference", bins=bins
)
refer = write_hist(data=reference_data, feature=feature, name=name + "-reference", bins=bins)

results, status = stat_test(
validation_data, reference_data, test=test, fargs=thresh, val_crit=val_crit
Expand Down
72 changes: 18 additions & 54 deletions src/morphval/validation_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@


_distribution = pkg_resources.get_distribution("synthesis-workflow")
TEMPLATES = (
Path(_distribution.get_resource_filename("morphval", "morphval")) / "templates"
)
TEMPLATES = Path(_distribution.get_resource_filename("morphval", "morphval")) / "templates"
TEMPLATE_FILE = (TEMPLATES / "report_template.jinja2").as_posix()
SUMMARY_TEMPLATE_FILE = (TEMPLATES / "report_summary_template.jinja2").as_posix()

Expand Down Expand Up @@ -48,9 +46,7 @@ def count_passing_validations(features):
Returns:
returns tuple(feature_pass, features_total)
"""
num_pass = sum(
v["validation_criterion"]["status"] == "PASS" for v in features.values()
)
num_pass = sum(v["validation_criterion"]["status"] == "PASS" for v in features.values())
return num_pass, len(features)


Expand Down Expand Up @@ -91,9 +87,7 @@ def do_validation(validation_config, ref_population, test_population):
results[component_name] = component_results = collections.OrderedDict()
morphometrics[component_name] = component_metrics = {}
for feature_name, feature_config in features.items():
component_results[
feature_name
] = feature_results = collections.OrderedDict()
component_results[feature_name] = feature_results = collections.OrderedDict()

test_data, ref_data = validation.extract_feature(
test_population, ref_population, component_name, feature_name
Expand All @@ -104,18 +98,12 @@ def do_validation(validation_config, ref_population, test_population):
"ref": ref_data,
}

feature_results["test_summary_statistics"] = compute_summary_statistics(
test_data
)
feature_results["ref_summary_statistics"] = compute_summary_statistics(
ref_data
)
feature_results["test_summary_statistics"] = compute_summary_statistics(test_data)
feature_results["ref_summary_statistics"] = compute_summary_statistics(ref_data)

test_name = feature_config["stat_test"]

feature_results[
"statistical_tests"
] = test_results = compute_statistical_tests(
feature_results["statistical_tests"] = test_results = compute_statistical_tests(
test_data,
ref_data,
feature_config["stat_test"],
Expand All @@ -134,19 +122,13 @@ def write_morphometrics(output_dir, morphometrics):
for component_name, features in morphometrics.items():
for feature_name, feature_metrics in features.items():
for kind in ("test", "ref"):
dir_name = os.path.join(
output_dir, component_name, "morphometrics", kind
)
dir_name = os.path.join(output_dir, component_name, "morphometrics", kind)
save_csv(dir_name, feature_name, feature_metrics[kind])


def create_morphometrics_histograms(
output_dir, morphometrics, config, notebook_desc=None
):
def create_morphometrics_histograms(output_dir, morphometrics, config, notebook_desc=None):
"""Create histograms based on morphometrics."""
m_items = common.add_progress_bar(
morphometrics.items(), "[{}] Histograms", notebook_desc
)
m_items = common.add_progress_bar(morphometrics.items(), "[{}] Histograms", notebook_desc)
for component_name, features in m_items:
figure_dir = os.path.join(output_dir, component_name, "figures")

Expand Down Expand Up @@ -279,9 +261,7 @@ def validate_features(self, cell_figure_count=100, nb_jobs=-1, joblib_verbose=0)
"""Validate all features."""
self.results = results = collections.OrderedDict()

batch_size = 1 + int(
len(self.config) / (nb_jobs if nb_jobs > 0 else cpu_count())
)
batch_size = 1 + int(len(self.config) / (nb_jobs if nb_jobs > 0 else cpu_count()))

for res_mtype, res, ref_p, test_p in Parallel(
nb_jobs,
Expand All @@ -293,12 +273,8 @@ def validate_features(self, cell_figure_count=100, nb_jobs=-1, joblib_verbose=0)
mtype,
config,
self.output_dir / mtype,
self.ref_files.loc[
self.ref_files["mtype"] == mtype, "filepath"
].tolist(),
self.test_files.loc[
self.test_files["mtype"] == mtype, "filepath"
].tolist(),
self.ref_files.loc[self.ref_files["mtype"] == mtype, "filepath"].tolist(),
self.test_files.loc[self.test_files["mtype"] == mtype, "filepath"].tolist(),
cell_figure_count,
False,
)
Expand All @@ -308,9 +284,7 @@ def validate_features(self, cell_figure_count=100, nb_jobs=-1, joblib_verbose=0)
self.ref_plots[res_mtype] = ref_p
self.test_plots[res_mtype] = test_p

self.results_file = common.dump2json(
self.output_dir, "validation_results", results
)
self.results_file = common.dump2json(self.output_dir, "validation_results", results)
return self.results_file

def generate_report_data(self, mtype):
Expand Down Expand Up @@ -349,16 +323,12 @@ def generate_report_data(self, mtype):

tt["num_pass"] = total_num_pass
tt["num_features"] = total_num_features
tt["pass_percentage"] = "{:5.2f}".format(
(100.0 * total_num_pass) / total_num_features
)
tt["pass_percentage"] = "{:5.2f}".format((100.0 * total_num_pass) / total_num_features)

return tt

@staticmethod
def merge_results_features(
mtype, component, feature_name, feature_config, feature_results
):
def merge_results_features(mtype, component, feature_name, feature_config, feature_results):
"""Merge result features."""
stat_test = feature_config["stat_test"]
stat_test_results = feature_results["statistical_tests"][stat_test]["results"]
Expand Down Expand Up @@ -393,9 +363,7 @@ def merge_results_features(

return ret

def write_report(
self, validation_report=True, template_file=TEMPLATE_FILE, prefix="report-"
):
def write_report(self, validation_report=True, template_file=TEMPLATE_FILE, prefix="report-"):
"""For each mtype in the results, write out its report.
Args:
Expand All @@ -410,9 +378,7 @@ def write_report(

output_files = []
for mtype in self.results:
output_text = self.render_mtype_report(
template_file, mtype, validation_report
)
output_text = self.render_mtype_report(template_file, mtype, validation_report)
output_files.append(os.path.join(report_dir, prefix + mtype + ".html"))
with open(output_files[-1], "w") as outputFile:
outputFile.write(output_text)
Expand All @@ -436,9 +402,7 @@ def render_mtype_report(self, template_file, mtype, validation_report):
template = load_template(template_file)
templateText = self.generate_report_data(mtype)

config_file = common.dump2json(
self.output_dir, "validation_config", self.config
)
config_file = common.dump2json(self.output_dir, "validation_config", self.config)

templateVars = {
"output_title": "Validation report: " + mtype,
Expand Down
20 changes: 5 additions & 15 deletions src/synthesis_workflow/circuit.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,7 @@ def halve_atlas(annotated_volume, axis=0, side=LEFT):
for coord in range(3):
if axis == coord:
slices_[coord] = (
slice(0, middle)
if side == RIGHT
else slice(middle, annotated_volume.shape[axis])
slice(0, middle) if side == RIGHT else slice(middle, annotated_volume.shape[axis])
)
else:
slices_[coord] = slice(0, annotated_volume.shape[coord])
Expand Down Expand Up @@ -72,9 +70,7 @@ def create_atlas_thickness_mask(atlas_dir):
for i, max_thickness in enumerate(max_thicknesses, 1):
ph = atlas.load_data(f"[PH]{i}", memcache=True)
with np.errstate(invalid="ignore"):
invalid_thickness = (ph.raw[..., 1] - ph.raw[..., 0]) > (
max_thickness + tolerance
)
invalid_thickness = (ph.raw[..., 1] - ph.raw[..., 0]) > (max_thickness + tolerance)
too_thick = np.logical_or(too_thick, invalid_thickness)

L.info(
Expand All @@ -88,9 +84,7 @@ def create_atlas_thickness_mask(atlas_dir):
np.round(100 * too_thick[isocortex_mask].mean(), 3),
)

return brain_regions.with_data(
np.logical_and(~too_thick, isocortex_mask).astype(np.uint8)
)
return brain_regions.with_data(np.logical_and(~too_thick, isocortex_mask).astype(np.uint8))


def build_circuit(
Expand Down Expand Up @@ -167,9 +161,7 @@ def circuit_slicer(cells, n_cells, mtypes=None, planes=None, hemisphere=None):
# between each pair of planes, select n_cells
return pd.concat(
[
slice_n_cells(
get_cells_between_planes(cells, plane_left, plane_right), n_cells
)
slice_n_cells(get_cells_between_planes(cells, plane_left, plane_right), n_cells)
for plane_left, plane_right in tqdm(
zip(planes[:-1:3], planes[2::3]), total=int(len(planes) / 3)
)
Expand Down Expand Up @@ -240,9 +232,7 @@ def create_planes(
_n_points,
)
else:
raise Exception(
f"Please set plane_type to 'aligned' or 'centerline', not {plane_type}."
)
raise Exception(f"Please set plane_type to 'aligned' or 'centerline', not {plane_type}.")

# create all planes to match slice_thickness between every two planes
centerline_len = np.linalg.norm(np.diff(centerline, axis=0), axis=1).sum()
Expand Down
12 changes: 3 additions & 9 deletions src/synthesis_workflow/fit_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,8 @@ def clean_outliers(

# Detect outliers
errs = np.array([np.abs(p(ix) - y[i]) for i, ix in enumerate(x)])
x_clean = np.delete(
x, [np.where(errs > np.percentile(np.sort(errs), outlier_percentage))][0]
)
y_clean = np.delete(
y, [np.where(errs > np.percentile(np.sort(errs), outlier_percentage))][0]
)
x_clean = np.delete(x, [np.where(errs > np.percentile(np.sort(errs), outlier_percentage))][0])
y_clean = np.delete(y, [np.where(errs > np.percentile(np.sort(errs), outlier_percentage))][0])

return x_clean, y_clean

Expand Down Expand Up @@ -106,9 +102,7 @@ def fit_path_distance_to_extent(
return popt[0], 0


def get_path_distance_from_extent(
slope: float, intercept: float, extent: float
) -> float:
def get_path_distance_from_extent(slope: float, intercept: float, extent: float) -> float:
"""Returns a path distance for an input extent according to fitted function.
The function is given by the equation:
Expand Down
Loading

0 comments on commit f269215

Please sign in to comment.