Skip to content

Commit

Permalink
astrocytes + small cleanup
Browse files Browse the repository at this point in the history
Change-Id: I787cf4e5e48d0d785f34e6ef04a73720a460c12c
  • Loading branch information
arnaudon committed Dec 9, 2019
1 parent 7577d3b commit e3e37e3
Show file tree
Hide file tree
Showing 7 changed files with 206 additions and 85 deletions.
2 changes: 1 addition & 1 deletion diameter_synthesis/app/analysis.py
Expand Up @@ -81,7 +81,7 @@ def make_figures(original_cells, diametrized_cells, feature1, feature2):
print('...done.')

feature_pairs = [
#('segment_radial_distances', 'segment_volumes'),
('segment_radial_distances', 'segment_volumes'),
#('section_radial_distances', 'section_areas'),
('section_path_distances', 'section_areas'),
#('section_branch_orders', 'section_areas'),
Expand Down
79 changes: 57 additions & 22 deletions diameter_synthesis/build_diameters.py
Expand Up @@ -25,7 +25,6 @@
from functools import partial

TRUNK_FRAC_DECREASE = 0.1
TRUNK_MAX_TRIES = 90

##################################
## Build diameters from a model ##
Expand All @@ -40,9 +39,11 @@ def build_diameters(models, models_params, morphologies_dict, neurite_types, new
if model == 'M0':
all_models[model] = diametrize_model_generic
if model == 'M1':
all_models[model] = diametrize_model_generic
all_models[model] = diametrize_model_apical
if model == 'M2':
all_models[model] = diametrize_model_apical
if model == 'M3':
all_models[model] = diametrize_model_astrocyte

# collect neurons paths and mtypes
for model in models:
Expand Down Expand Up @@ -92,10 +93,44 @@ def build_diam_pool(all_models, model, models_params, neurite_types, extra_param
io.save_neuron(neuron, model, new_morph_path)
if plot:
folder = 'shapes_' + os.path.basename(new_morph_path[:-1])
plotting.plot_diameter_diff(os.path.splitext(fname)[0], morph_path, new_morph_path,
plotting.plot_diameter_diff(os.path.splitext(fname)[0], morph_path, neuron,
model, neurite_types, folder=folder, ext = ext)


def diametrize_model_astrocyte(neuron, params, neurite_types, extra_params):
'''Corrects the diameters of a morphio-neuron according to the model.
Starts from the root and moves towards the tips.
'''

for neurite_type in neurite_types:
neurites = (neurite for neurite in neuron.neurites if neurite.type ==
STR_TO_TYPES[neurite_type])

for neurite in neurites:

wrong_tips = True
n_tries = 0
trunk_diam_frac = 1.
n_tries_step = 1
while wrong_tips:
# sample a trunk diameter
trunk_diam = trunk_diam_frac * get_trunk_diameter(neurite, params['trunk_diameter'][neurite_type])
# try to diametrize the neurite
wrong_tips = diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='threshold', mode_rall='generic', sibling_threshold=extra_params['threshold'][neurite_type], rall_threshold=extra_params['threshold'][neurite_type], with_asymmetry=True, no_taper=False, reduction_factor_max=2.0)

# if we can't get a good model, reduce the trunk diameter progressively
n_tries += 1
if n_tries > 2 * n_tries_step: # if we keep failing, slighly reduce the trunk diams
trunk_diam_frac -= TRUNK_FRAC_DECREASE
n_tries_step += 1

# don't try to much and keep the latest try
if n_tries > extra_params['trunk_max_tries'] and extra_params['trunk_max_tries'] > 1:
print('max tries attained with', neurite_type)
wrong_tips = False



def diametrize_model_generic(neuron, params, neurite_types, extra_params):
'''Corrects the diameters of a morphio-neuron according to the model.
Starts from the root and moves towards the tips.
Expand All @@ -116,8 +151,7 @@ def diametrize_model_generic(neuron, params, neurite_types, extra_params):
# sample a trunk diameter
trunk_diam = trunk_diam_frac * get_trunk_diameter(neurite, params['trunk_diameter'][neurite_type])
# try to diametrize the neurite
wrong_tips = diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='threshold', mode_rall='generic',
sibling_threshold=extra_params['threshold'][neurite_type], rall_threshold=extra_params['threshold'][neurite_type], with_asymmetry=True, no_taper=False)
wrong_tips = diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='generic', mode_rall='generic', sibling_threshold=extra_params['threshold'][neurite_type], rall_threshold=extra_params['threshold'][neurite_type], with_asymmetry=True, no_taper=False, reduction_factor_max=1.0)

# if we can't get a good model, reduce the trunk diameter progressively
n_tries += 1
Expand All @@ -126,7 +160,7 @@ def diametrize_model_generic(neuron, params, neurite_types, extra_params):
n_tries_step += 1

# don't try to much and keep the latest try
if n_tries > TRUNK_MAX_TRIES:
if n_tries > extra_params['trunk_max_tries'] and extra_params['trunk_max_tries'] > 1:
print('max tries attained with', neurite_type)
wrong_tips = False

Expand All @@ -151,8 +185,7 @@ def diametrize_model_apical(neuron, params, neurite_types, extra_params):
# sample a trunk diameter
trunk_diam = trunk_diam_frac * get_trunk_diameter(neurite, params['trunk_diameter'][neurite_type])
# try to diametrize the neurite
wrong_tips = diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='threshold', mode_rall='threshold',
sibling_threshold=extra_params['threshold'][neurite_type], rall_threshold=extra_params['threshold'][neurite_type], with_asymmetry=True, no_taper=True)
wrong_tips = diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='threshold', mode_rall='threshold', sibling_threshold=extra_params['threshold'][neurite_type], rall_threshold=extra_params['threshold'][neurite_type], with_asymmetry=True, no_taper=True, reduction_factor_max=1.0)

# if we can't get a good model, reduce the trunk diameter progressively
n_tries += 1
Expand All @@ -161,11 +194,10 @@ def diametrize_model_apical(neuron, params, neurite_types, extra_params):
n_tries_step += 1

# don't try to much and keep the latest try
if n_tries > TRUNK_MAX_TRIES:
if n_tries > extra_params['trunk_max_tries'] and extra_params['trunk_max_tries'] > 1:
print('max tries attained with', neurite_type)
wrong_tips = False


def get_sibling_ratio(section, params, mode='generic', tot_length=1., threshold=0.3):
"""return a sampled sibling ratio"""
if mode == 'generic':
Expand Down Expand Up @@ -240,11 +272,9 @@ def get_taper(section, params, no_taper=False):
return taper


def get_daughter_diameters(section, terminal_diam, params, neurite_type, mode_sibling='generic', mode_rall='generic', tot_length=1, sibling_threshold=0.3, rall_threshold=0.3, with_asymmetry=False):
def get_daughter_diameters(section, terminal_diam, params, neurite_type, mode_sibling='generic', mode_rall='generic', tot_length=1, sibling_threshold=0.3, rall_threshold=0.3, with_asymmetry=False, reduction_factor_max=3.0):
""" return daughter diamters from parent d0 """

# first find a reduction factor
reduction_factor_max = 1.0 # if set to larger than 1, we allow increase of diameters
reduction_factor = reduction_factor_max + 1.0
while reduction_factor > reduction_factor_max: # try until we get a reduction of diameter in the branching

Expand Down Expand Up @@ -288,7 +318,7 @@ def get_daughter_diameters(section, terminal_diam, params, neurite_type, mode_si
return diams


def diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='generic', mode_rall='generic', sibling_threshold=0.3, rall_threshold=0.3, with_asymmetry=False, no_taper=False):
def diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='generic', mode_rall='generic', sibling_threshold=0.3, rall_threshold=0.3, with_asymmetry=False, no_taper=False, reduction_factor_max=1.0):
""" diametrize a single tree """

# initialise status variables
Expand All @@ -313,12 +343,11 @@ def diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='gen
taper = get_taper(neurite, params['taper'][neurite_type], no_taper=no_taper)

# diametrize a section
diametrize_section(section, init_diam, taper=taper, min_diam=terminal_diam)
diametrize_section(section, init_diam, taper=taper, min_diam=terminal_diam, max_diam=trunk_diam)

# if branching points has children, keep looping
if len(section.children) > 0:
diams = get_daughter_diameters(section, terminal_diam, neurite_type=neurite_type, params=params, mode_sibling=mode_sibling,
mode_rall=mode_rall, tot_length=tot_length, sibling_threshold=sibling_threshold, with_asymmetry=with_asymmetry)
diams = get_daughter_diameters(section, terminal_diam, neurite_type=neurite_type, params=params, mode_sibling=mode_sibling, mode_rall=mode_rall, tot_length=tot_length, sibling_threshold=sibling_threshold, with_asymmetry=with_asymmetry, reduction_factor_max=reduction_factor_max)

# set diameters
for i, ch in enumerate(section.children):
Expand All @@ -332,24 +361,30 @@ def diametrize_tree(neurite, params, neurite_type, trunk_diam, mode_sibling='gen
return wrong_tips


def diametrize_section(section, initial_diam, taper, min_diam=0.07, max_diam=100.):
def diametrize_section(section, initial_diam, taper, min_diam=0.07, max_diam=10.):
'''Corrects the diameters of a section'''

#max_diam = np.clip(np.random.normal(1.7, 1.0), 1.2, 2.5)
#min_diam = np.clip(np.random.normal(0.6, 0.5), 0.3, 0.8)

diams = [initial_diam]

# if the initial diameter is not in the range of the sampled terminal diameters, just reset it
#fact = 1.0
#fact2 = 1.0

if initial_diam < min_diam:
min_diam = initial_diam

if initial_diam > max_diam:
max_diam = initial_diam
#if initial_diam > max_diam:
# max_diam = initial_diam

# lengths of each segments will be used for scaling of tapering
lengths = [0] + utils.section_lengths(section)

diams = polynomial.polyval(lengths, [initial_diam, taper])
diams[diams < min_diam] = min_diam
diams[diams > max_diam] = max_diam
diams = np.clip(diams, min_diam, max_diam)

set_diameters(section, np.array(diams, dtype=np.float32))


Expand Down
110 changes: 106 additions & 4 deletions diameter_synthesis/build_models.py
Expand Up @@ -300,7 +300,108 @@ def sampling_model_trunk_path(morphologies, neurite_types, extra_params, tqdm_di
tapers_models[neurite_type]['sequential'] = tapers_sequential
tapers_models[neurite_type]['params'] = fit_distribution(
tapers[neurite_type], tapers_models[neurite_type]['distribution'], seq=tapers_sequential, extra_params=extra_params)
# min_sample_num = extra_params['trunk_min_sample_num'][neurite_type])

# collect all models in one dictionary
all_models = {
'sibling_ratio': sibling_ratio_models,
'rall_deviation': rall_deviation_models,
'terminal_diameter': terminal_diameters_models,
'trunk_diameter': trunk_diameters_models,
'taper': tapers_models
}

all_data = {
'sibling_ratio': sibling_ratios,
'rall_deviation': rall_deviations,
'terminal_diameter': terminal_diameters,
'trunk_diameter': trunk_diameters,
'taper': tapers
}

return all_models, all_data


def sampling_model_astrocyte(morphologies, neurite_types, extra_params, tqdm_disable=False):
""" test for sampling models """

sibling_sequential = None
rall_deviation_sequential = None
terminal_diameters_sequential = None
trunk_diameters_sequential = None
tapers_sequential = None

# initialise dictionaries for collecting morphological quantities
sibling_ratios = {}
rall_deviations = {}
terminal_diameters = {}
trunk_diameters = {}
tapers = {}
for neurite_type in neurite_types:
sibling_ratios[neurite_type] = []
rall_deviations[neurite_type] = []
terminal_diameters[neurite_type] = []
trunk_diameters[neurite_type] = []
tapers[neurite_type] = []

# loop first over all morphologies (TODO: could be parallelized)
i = 0
for neuron in tqdm(morphologies, disable=tqdm_disable):
# for each neurite in the neuron
for neurite in neuron.neurites:
# for each type of neurite we consider
for neurite_type in neurite_types:

if neurite.type == STR_TO_TYPES[neurite_type]:

# compute here all the morphological values from the neurite
sibling_ratios[neurite_type] += morph_funcs.sibling_ratios(neurite, seq=sibling_sequential)
rall_deviations[neurite_type] += morph_funcs.rall_deviations(neurite, seq=rall_deviation_sequential, bounds = [0, 10])
terminal_diameters[neurite_type] += morph_funcs.min_diameter(neurite)
trunk_diameters[neurite_type] += morph_funcs.max_diameter(neurite)
tapers[neurite_type] += morph_funcs.taper(neurite, params=extra_params['taper'], seq=tapers_sequential)

# do the fits of each morphological values
sibling_ratio_models = {}
rall_deviation_models = {}
terminal_diameters_models = {}
trunk_diameters_models = {}
tapers_models = {}
for neurite_type in neurite_types:

# sibling ratio
sibling_ratio_models[neurite_type] = {}
sibling_ratio_models[neurite_type]['distribution'] = 'expon_rev'
sibling_ratio_models[neurite_type]['sequential'] = sibling_sequential
sibling_ratio_models[neurite_type]['params'] = fit_distribution(
sibling_ratios[neurite_type], sibling_ratio_models[neurite_type]['distribution'], seq=sibling_sequential, extra_params=extra_params)

# Rall deviation
rall_deviation_models[neurite_type] = {}
rall_deviation_models[neurite_type]['distribution'] = 'exponnorm'
rall_deviation_models[neurite_type]['sequential'] = rall_deviation_sequential
rall_deviation_models[neurite_type]['params'] = fit_distribution(
rall_deviations[neurite_type], rall_deviation_models[neurite_type]['distribution'], seq=rall_deviation_sequential, extra_params=extra_params)

# terminal diameters
terminal_diameters_models[neurite_type] = {}
terminal_diameters_models[neurite_type]['distribution'] = 'exponnorm'
terminal_diameters_models[neurite_type]['sequential'] = terminal_diameters_sequential
terminal_diameters_models[neurite_type]['params'] = fit_distribution(
terminal_diameters[neurite_type], terminal_diameters_models[neurite_type]['distribution'], seq=terminal_diameters_sequential, extra_params=extra_params)

# trunk diameters
trunk_diameters_models[neurite_type] = {}
trunk_diameters_models[neurite_type]['distribution'] = 'exponnorm'
trunk_diameters_models[neurite_type]['sequential'] = trunk_diameters_sequential
trunk_diameters_models[neurite_type]['params'] = fit_distribution(
trunk_diameters[neurite_type], trunk_diameters_models[neurite_type]['distribution'], seq=trunk_diameters_sequential, extra_params=extra_params)

# taper
tapers_models[neurite_type] = {}
tapers_models[neurite_type]['distribution'] = 'exponnorm'
tapers_models[neurite_type]['sequential'] = tapers_sequential
tapers_models[neurite_type]['params'] = fit_distribution(
tapers[neurite_type], tapers_models[neurite_type]['distribution'], seq=tapers_sequential, extra_params=extra_params)

# collect all models in one dictionary
all_models = {
Expand Down Expand Up @@ -403,7 +504,6 @@ def sampling_model_generic(morphologies, neurite_types, extra_params, tqdm_disab
tapers_models[neurite_type]['sequential'] = tapers_sequential
tapers_models[neurite_type]['params'] = fit_distribution(
tapers[neurite_type], tapers_models[neurite_type]['distribution'], seq=tapers_sequential, extra_params=extra_params)
# min_sample_num = extra_params['trunk_min_sample_num'][neurite_type])

# collect all models in one dictionary
all_models = {
Expand Down Expand Up @@ -432,10 +532,12 @@ def build_models(models, morphologies, neurite_types, extra_params, fig_folder='
for model in models:
if model == 'M0':
all_models[model] = sampling_model_generic
if model == 'M1':
elif model == 'M1':
all_models[model] = sampling_model_sibling_asymmetry
if model == 'M2':
elif model == 'M2':
all_models[model] = sampling_model_sibling_asymmetry_trunk
elif model == 'M3':
all_models[model] = sampling_model_astrocyte

tqdm_1, tqdm_2 = utils.tqdm_disable(morphologies) # to have a single progression bar

Expand Down
6 changes: 3 additions & 3 deletions diameter_synthesis/distribution_fitting.py
Expand Up @@ -195,7 +195,7 @@ def fit_distribution_single(data, distribution, p=5):
return {'a': 0., 'loc': 0., 'scale': 0., 'min': 0., 'max': 0.1, 'num_value': len(data)}


def fit_distribution(data, distribution, min_sample_num=20, p=5, n_bins=10, seq=None, extra_params=None, name = 'test', threshold = 1.):
def fit_distribution(data, distribution, min_sample_num=10, p=5, n_bins=10, seq=None, extra_params=None, name = 'test', threshold = 1.):
""" generic function to fit a distribution with scipy """
if not isinstance(seq, str): # if no sequential fitting needed
return fit_distribution_single(data, distribution, p=p)
Expand Down Expand Up @@ -223,12 +223,12 @@ def fit_distribution(data, distribution, min_sample_num=20, p=5, n_bins=10, seq=
for i in range(len(bins) - 1):
data_tpe = values[(tpes >= bins[i]) & (tpes < bins[i + 1])] # select the values by its type
params[np.round((bins[i + 1] + bins[i]) / 2., ROUND)] = fit_distribution_single(data_tpe, distribution, p=p)
return update_params_fit_distribution(params, orders=extra_params['orders'])
return update_params_fit_distribution(params)
else:
return {'a': 0., 'loc': 0., 'scale': 0., 'min': 0., 'max': 0.1, 'num_value': len(data), 'params_data': {'a': 0., 'loc': 0., 'scale': 0., 'min': 0., 'max': 0.1, 'num_value': len(data)}}


def update_params_fit_distribution(params_data, orders={'a': 1, 'loc': 1, 'scale': 1, 'min': 1, 'max': 1}):
def update_params_fit_distribution(params_data):
""" linear fit to model parameters as a function of a given quantity tpes_model
and update the model dictionary with the fits of parameters """

Expand Down

0 comments on commit e3e37e3

Please sign in to comment.