Skip to content

Commit

Permalink
diameter smoother
Browse files Browse the repository at this point in the history
related to: https://bbpgitlab.epfl.ch/neuromath/morphology-processing-workflow/-/merge_requests/8/diffs

Change-Id: I5014a4b84cf54bd0d9f2f9da9341a2d6437b8b03
  • Loading branch information
arnaudon committed Oct 7, 2021
1 parent a5a2bde commit 2345201
Show file tree
Hide file tree
Showing 7 changed files with 79 additions and 27 deletions.
58 changes: 38 additions & 20 deletions diameter_synthesis/build_diameters.py
Expand Up @@ -3,13 +3,14 @@
from collections import deque
from functools import partial
from copy import copy
from copy import deepcopy

import numpy as np
from numpy.polynomial import polynomial

from morphio import SectionType, IterType

import diameter_synthesis.morph_functions as morph_funcs
from diameter_synthesis import morph_functions
from diameter_synthesis import utils
from diameter_synthesis.distribution_fitting import sample_distribution
from diameter_synthesis.exception import DiameterSynthesisError
Expand All @@ -33,10 +34,10 @@

def _reset_caches():
"""Reset the cached functions."""
morph_funcs.sec_length.cache_clear()
morph_funcs.partition_asymmetry_length.cache_clear()
morph_funcs.lengths_from_origin.cache_clear()
morph_funcs.n_children_downstream.cache_clear()
morph_functions.sec_length.cache_clear()
morph_functions.partition_asymmetry_length.cache_clear()
morph_functions.lengths_from_origin.cache_clear()
morph_functions.n_children_downstream.cache_clear()


def _get_neurites(neuron, neurite_type):
Expand Down Expand Up @@ -75,7 +76,7 @@ def _sample_sibling_ratio(
return 0.0
return sample_distribution(params["sibling_ratios"][neurite_type], rng=rng)
# This case should never happen since the mode is already checked in `_select_model`
raise DiameterSynthesisError("mode not understood {}".format(mode))
raise DiameterSynthesisError(f"mode not understood {mode}")


def _sample_diameter_power_relation(
Expand All @@ -102,7 +103,7 @@ def _sample_diameter_power_relation(
# This case should never happen since this mode is not known by `_select_model`
return 1.0
# This case should never happen since the mode is already checked in `_select_model`
raise DiameterSynthesisError("mode not understood {}".format(mode))
raise DiameterSynthesisError(f"mode not understood {mode}")


def _sample_trunk_diameter(params, neurite_type, rng=np.random):
Expand Down Expand Up @@ -180,7 +181,7 @@ def _sample_daughter_diameters(section, params, params_tree, rng=np.random):
rng=rng,
)

reduction_factor = morph_funcs.diameter_power_relation_factor(
reduction_factor = morph_functions.diameter_power_relation_factor(
diameter_power_relation, sibling_ratio
)

Expand Down Expand Up @@ -220,7 +221,7 @@ def _diametrize_section(section, initial_diam, taper, min_diam=0.07, max_diam=10
min_diam (flaot): minimum diameter
max_diam (float): maximum diameter
"""
diams = polynomial.polyval(morph_funcs.lengths_from_origin(section), [initial_diam, taper])
diams = polynomial.polyval(morph_functions.lengths_from_origin(section), [initial_diam, taper])
section.diameters = np.clip(diams, min_diam, max_diam)


Expand All @@ -235,7 +236,7 @@ def _diametrize_tree(neurite, params, params_tree, rng=np.random):
Returns:
bool: True is all terminal diameters are small enough, False otherwise
"""
params_tree["tot_length"] = morph_funcs.get_total_length(neurite)
params_tree["tot_length"] = morph_functions.get_total_length(neurite)
max_diam = params["terminal_diameters"][params_tree["neurite_type"]]["params"]["max"]
wrong_tips = False
active = deque([neurite[0]])
Expand Down Expand Up @@ -285,7 +286,7 @@ def _diametrize_neuron(params_tree, neuron, params, neurite_types, config, rng=n
neurite_type (str or morphio.SectionType): the neurite type to consider
config (dict): general configuration parameters
"""
# pylint: disable=too-many-locals
# pylint: disable=too-many-locals, too-many-branches
major_sections = set()
if params_tree["with_asymmetry"]:
# Get sections on the major branch
Expand All @@ -308,15 +309,24 @@ def _diametrize_neuron(params_tree, neuron, params, neurite_types, config, rng=n
n_tries = 0
trunk_diam_frac = 1.0
n_tries_step = 1
while wrong_tips:
trunk_diam = trunk_diam_frac * _sample_trunk_diameter(params, neurite_type, rng=rng)

if trunk_diam < 0.01:
trunk_diam = 1.0
L.warning("sampled trunk diameter < 0.01, so use 1 instead")

params_tree["trunk_diam"] = trunk_diam
wrong_tips = _diametrize_tree(neurite, params, params_tree, rng=rng)
_params = deepcopy(params) if "taper_increase" in params_tree else params
while wrong_tips:
if "taper_increase" in params_tree:
_params["tapers"][neurite_type]["params"]["scale"] *= params_tree[
"taper_increase"
]
params_tree["trunk_diam"] = neurite[0].diameters[0]
else:
trunk_diam = trunk_diam_frac * _sample_trunk_diameter(
_params, neurite_type, rng=rng
)
if trunk_diam < 0.01:
trunk_diam = 1.0
L.warning("sampled trunk diameter < 0.01, so use 1 instead")
params_tree["trunk_diam"] = trunk_diam

wrong_tips = _diametrize_tree(neurite, _params, params_tree, rng=rng)

# if we can't get a good model, reduce the trunk diameter progressively
if n_tries > N_TRIES_BEFORE_REDUC * n_tries_step:
Expand Down Expand Up @@ -349,8 +359,16 @@ def _select_model(model):
params_tree["mode_diameter_power_relation"] = "generic"
params_tree["with_asymmetry"] = True
params_tree["reduction_factor_max"] = 3.0
elif model == "neurite_based":
params_tree = {}
params_tree["mode_sibling"] = "threshold"
params_tree["mode_diameter_power_relation"] = "threshold"
params_tree["with_asymmetry"] = True
params_tree["reduction_factor_max"] = 1.0
params_tree["taper_increase"] = 1.02

else:
raise DiameterSynthesisError("Unknown diameter model: {}".format(model))
raise DiameterSynthesisError(f"Unknown diameter model: {model}")

return partial(_diametrize_neuron, params_tree)

Expand Down
2 changes: 1 addition & 1 deletion diameter_synthesis/build_models.py
Expand Up @@ -47,7 +47,7 @@ def _get_model_builder(config):
"tapers": ["exponnorm", None],
}
else:
raise DiameterSynthesisError("model not understood {}".format(model))
raise DiameterSynthesisError(f"model not understood {model}")

all_models[model] = partial(build_single_model, distribution_types)
return all_models
Expand Down
2 changes: 2 additions & 0 deletions diameter_synthesis/distribution_fitting.py
Expand Up @@ -25,6 +25,8 @@ def _truncate(sample_func, min_value, max_value):
"""Ensure sample is within bounds."""
sample = sample_func()
n_tries = 0
if min_value == max_value:
return min_value
while sample > max_value or sample < min_value:
sample = sample_func()
n_tries += 1
Expand Down
32 changes: 32 additions & 0 deletions diameter_synthesis/main.py
Expand Up @@ -162,3 +162,35 @@ def run_diameters(config_file, models_params_file):
list(tqdm(pool.imap(worker, all_neurons), total=len(all_neurons)))
pool.close()
pool.join()


def diametrize_single_neuron(neuron, config=None, apical_point_sec_ids=None):
"""Diametrize single neuron by learning diameter model from it.
Args:
neuron (mophio.mut.Morphology): neuron to consider
config (dict): dict with entry 'model' and 'diameters' with corresponding dicts, if None,
default dict will be used
apical_point_sect_ids (list): list of apical points if any
"""
if config is None:
config = {
"model": {
"taper": {"max": 1e-06, "min": -0.1},
"terminal_threshold": 2.0,
"models": ["generic"],
"neurite_types": ["basal", "apical"],
},
"diameters": {
"models": ["generic"],
"n_samples": 1,
"neurite_types": ["basal", "apical"],
"seed": 0,
"trunk_max_tries": 100,
},
}

model_params = build_model([nm.load_neuron(neuron)], config["model"])
if apical_point_sec_ids is not None:
model_params["apical_point_sec_ids"] = apical_point_sec_ids
build_diameters(neuron, model_params, ["basal", "apical"], config["diameters"])
4 changes: 2 additions & 2 deletions diameter_synthesis/plotting.py
Expand Up @@ -472,7 +472,7 @@ def make_cumulative_figures(
original_cells, diametrized_cells, feature1, feature2, neurite_types
)

figure_name = figname_prefix + "cumulative_{}_{}_{}".format(prefix1, basename1, basename2)
figure_name = f"{figname_prefix}_cumulative_{prefix1}_{basename1}_{basename2}"

fig.savefig(out_dir / (figure_name + ext), bbox_inches="tight")
plt.close(fig)
Expand All @@ -492,7 +492,7 @@ def make_cumulative_figures(
neurite_types,
auto_limit=False,
)
fname = "{}_{}{}".format(figure_name, original_cell.name, ext)
fname = f"{figure_name}_{original_cell.name}_{ext}"
f.savefig(
out_dir / (figure_name + "_individual") / (str(i) + "_" + fname),
bbox_inches="tight",
Expand Down
6 changes: 3 additions & 3 deletions diameter_synthesis/utils.py
Expand Up @@ -30,13 +30,13 @@ def _create_morphologies_dict_dat(morph_path, mtypes_file="neurondb.dat"):
morph_name = pd.read_csv(mtypes_file, sep=r"\s+", header=None)
name_dict = defaultdict(list)
if not morph_name.empty:
first_name = morph_name.loc[0, 0]
first_name = morph_name.loc[0, 0] # pylint: disable=no-member
file_list = Path(morph_path).glob(first_name + "*")
try:
ext = next(file_list).suffix
except StopIteration as e:
raise DiameterSynthesisError(f"Could not find a file starting with {first_name}") from e
for morph in morph_name.values:
for morph in morph_name.values: # pylint: disable=no-member
name_dict[morph[2]] += [Path(morph_path) / (morph[0] + ext)]
return name_dict

Expand Down Expand Up @@ -96,7 +96,7 @@ def create_morphologies_dict(morph_path, mtypes_file=None):
L.info("found dat file")
return _create_morphologies_dict_dat(morph_path, mtypes_file=mtypes_file)
raise DiameterSynthesisError(
"neurondb file format {} not implemented".format(mtype_path.suffix)
f"neurondb file format {mtype_path.suffix} not implemented"
)

L.info("use all files as single mtype")
Expand Down
2 changes: 1 addition & 1 deletion diameter_synthesis/version.py
@@ -1,3 +1,3 @@
"""Package version."""
VERSION = "0.2.5" # pragma: no cover
VERSION = "0.2.6.dev0" # pragma: no cover
__version__ = VERSION # pragma: no cover

0 comments on commit 2345201

Please sign in to comment.