Skip to content

Commit

Permalink
Merge branch 'master' into pyup-pin-nbsphinx-0.3.3
Browse files Browse the repository at this point in the history
  • Loading branch information
schlegelp committed Jul 4, 2018
2 parents fb1e606 + 3cbaa41 commit bf05709
Show file tree
Hide file tree
Showing 11 changed files with 308 additions and 67 deletions.
4 changes: 2 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,9 @@
# built documents.
#
# The short X.Y version.
version = '0.82'
version = '0.84'
# The full version, including alpha/beta/rc tags.
release = '0.82'
release = '0.84'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
2 changes: 1 addition & 1 deletion docs/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
###### Requirements without Version Specifiers ######
numpydoc
nbsphinx==0.3.3
numpydoc==0.8.0
ipykernel
2 changes: 1 addition & 1 deletion pymaid/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.82"
__version__ = "0.84"

from pymaid import config

Expand Down
62 changes: 58 additions & 4 deletions pymaid/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,20 @@ def __init__(self, x, remote_instance=None, meta_data=None):
except BaseException:
pass

def __dir__(self):
""" Custom __dir__ to add some parameters that we want to make
searchable.
"""
add_attributes = ['n_open_ends', 'n_branch_nodes', 'n_end_nodes',
'cable_length', 'root', 'neuron_name',
'nodes', 'annotations', 'partners', 'review_status',
'connectors', 'presynapses', 'postsynapses',
'gap_junctions', 'soma', 'root', 'tags',
'n_presynapses', 'n_postsynapses', 'n_connectors',
'bbox']

return list(set(super().__dir__() + add_attributes))

def __getattr__(self, key):
# This is to catch empty neurons (e.g. after pruning)
if 'nodes' in self.__dict__ and \
Expand Down Expand Up @@ -335,6 +349,8 @@ def __getattr__(self, key):
elif key == 'tags':
self.get_skeleton()
return self.tags
elif key == 'sampling_resolution':
return self.n_nodes / self.cable_length
elif key == 'n_open_ends':
if 'nodes' in self.__dict__:
closed = self.tags.get('ends', []) \
Expand All @@ -344,36 +360,43 @@ def __getattr__(self, key):
+ self.tags.get('soma', [])
return len([n for n in self.nodes[self.nodes.type == 'end'].treenode_id.tolist() if n not in closed])
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
elif key == 'n_branch_nodes':
if 'nodes' in self.__dict__:
return self.nodes[self.nodes.type == 'branch'].shape[0]
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
elif key == 'n_end_nodes':
if 'nodes' in self.__dict__:
return self.nodes[self.nodes.type == 'end'].shape[0]
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
elif key == 'n_nodes':
if 'nodes' in self.__dict__:
return self.nodes.shape[0]
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
elif key == 'n_connectors':
if 'connectors' in self.__dict__:
return self.connectors.shape[0]
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
elif key == 'n_presynapses':
if 'connectors' in self.__dict__:
return self.connectors[self.connectors.relation == 0].shape[0]
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
elif key == 'n_postsynapses':
if 'connectors' in self.__dict__:
return self.connectors[self.connectors.relation == 1].shape[0]
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
elif key == 'cable_length':
if 'nodes' in self.__dict__:
Expand All @@ -384,6 +407,13 @@ def __getattr__(self, key):
w = nx.get_edge_attributes(self.graph, 'weight').values()
return sum(w) / 1000
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
elif key == 'bbox':
if 'nodes' in self.__dict__:
return self.nodes.describe().loc[['min','max'],['x','y','z']].values.T
else:
logger.info('No skeleton data available. Use .get_skeleton() to fetch.')
return 'NA'
else:
raise AttributeError('Attribute "%s" not found' % key)
Expand Down Expand Up @@ -470,7 +500,9 @@ def _clear_temp_attr(self, exclude=[]):
for a in [at for at in temp_att if at not in exclude]:
try:
delattr(self, a)
logger.debug('Neuron {}: {} cleared'.format(self.skeleton_id, a))
except BaseException:
logger.debug('Neuron {}: Unable to clear temporary attribute "{}"'.format(self.skeleton_id, a))
pass

temp_node_cols = ['flow_centrality', 'strahler_index']
Expand Down Expand Up @@ -844,10 +876,11 @@ def prune_distal_to(self, node, inplace=True):

for n in node:
prox = graph_utils.cut_neuron(x, n, ret='proximal')
# Reinitialise with proximal data
x.__init__(prox, x._remote_instance, x.meta_data)

# Clear temporary attributes is done by cut_neuron
# x._clear_temp_attr()
# Remove potential "left over" attributes (happens if we use a copy)
x._clear_temp_attr(exclude=['graph', 'igraph', 'type',
'classify_nodes'])

if not inplace:
return x
Expand Down Expand Up @@ -879,7 +912,11 @@ def prune_proximal_to(self, node, inplace=True):

for n in node:
dist = graph_utils.cut_neuron(x, n, ret='distal')
# Reinitialise with distal data
x.__init__(dist, x._remote_instance, x.meta_data)
# Remove potential "left over" attributes (happens if we use a copy)
x._clear_temp_attr(exclude=['graph', 'igraph', 'type',
'classify_nodes'])

# Clear temporary attributes is done by cut_neuron
# x._clear_temp_attr()
Expand Down Expand Up @@ -1431,13 +1468,28 @@ def __len__(self):
"""Use skeleton ID here, otherwise this is terribly slow."""
return len(self.skeleton_id)

def __dir__(self):
""" Custom __dir__ to add some parameters that we want to make
searchable.
"""
add_attributes = ['n_open_ends', 'n_branch_nodes', 'n_end_nodes',
'cable_length', 'root', 'neuron_name',
'nodes', 'annotations', 'partners', 'review_status',
'connectors', 'presynapses', 'postsynapses',
'gap_junctions', 'soma', 'root', 'tags',
'n_presynapses', 'n_postsynapses', 'n_connectors',
'skeleton_id', 'empty', 'shape', 'bbox']

return list(set(super().__dir__() + add_attributes))

def __getattr__(self, key):
if key == 'shape':
return (self.__len__(),)
elif key in ['n_nodes', 'n_connectors', 'n_presynapses',
'n_postsynapses', 'n_open_ends', 'n_end_nodes',
'cable_length', 'tags', 'igraph', 'soma', 'root',
'segments', 'graph', 'n_branch_nodes', 'dps']:
'segments', 'graph', 'n_branch_nodes', 'dps',
'sampling_resolution']:
self.get_skeletons(skip_existing=True)
return np.array([getattr(n, key) for n in self.neurons])
elif key == 'neuron_name':
Expand All @@ -1455,6 +1507,8 @@ def __getattr__(self, key):
this_n['skeleton_id'] = n.skeleton_id
data.append(this_n)
return pd.concat(data, axis=0, ignore_index=True)
elif key == 'bbox':
return self.nodes.describe().loc[['min','max'],['x','y','z']].values.T
elif key == '_remote_instance':
all_instances = [
n._remote_instance for n in self.neurons if n._remote_instance != None]
Expand Down
5 changes: 2 additions & 3 deletions pymaid/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -1578,9 +1578,8 @@ def get_connector_links(x, with_tags=False, chunk_size=50, remote_instance=None)
Parameters
----------
x : list of connector IDs | CatmaidNeuron | CatmaidNeuronList
Connector ID(s) to retrieve details for. If
CatmaidNeuron/List, will use their connectors.
x : int | CatmaidNeuron | CatmaidNeuronList
Neurons/Skeleton IDs to retrieve link details for.
with_tags : bool, optional
If True will also return dictionary of connector tags.
chunk_size : int, optional
Expand Down
22 changes: 14 additions & 8 deletions pymaid/graph_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -887,7 +887,12 @@ def reroot_neuron(x, new_root, inplace=False):


def cut_neuron(x, cut_node, ret='both'):
""" Split neuron at given point. Returns two new neurons.
""" Split neuron at given point and returns two new neurons.
Note
----
Split is performed between cut node and its parent node. However, cut node
will still be present in both resulting neurons.
Parameters
----------
Expand Down Expand Up @@ -970,23 +975,22 @@ def _cut_igraph(x, cut_node, ret):
g = x.igraph.copy()

# Get vertex index
cut_node = g.vs.find(node_id=cut_node).index
cut_ix = g.vs.find(node_id=cut_node).index

# Get edge to parent
e = g.es.find(_source=cut_node)
e = g.es.find(_source=cut_ix)

# Remove edge
g.delete_edges(e)

# Make graph undirected -> otherwise .decompose() throws an error
# This issue is fixed in the up-to-date branch of igraph-python
# (which is not on PYPI...)
# (which is not on PyPI O_o )
g.to_undirected(combine_edges='first')

# Get subgraph
# Get subgraph -> fastest way to get sets of nodes for subsetting
a, b = g.decompose(mode='WEAK')

# Important: a,b are now UNDIRECTED graphs -> we must not keep using them.
# IMPORTANT: a,b are now UNDIRECTED graphs -> we must not keep using them!

if x.root[0] in a.vs['node_id']:
dist_graph, prox_graph = b, a
Expand All @@ -1004,7 +1008,7 @@ def _cut_igraph(x, cut_node, ret):
dist._clear_temp_attr(exclude=['igraph', 'type', 'classify_nodes'])

if ret == 'proximal' or ret == 'both':
prox = subset_neuron(x, prox_graph.vs['node_id'], clear_temp=False)
prox = subset_neuron(x, prox_graph.vs['node_id'] + [cut_node], clear_temp=False)

# Change new root for dist
prox.nodes.loc[prox.nodes.treenode_id == cut_node, 'type'] = 'end'
Expand Down Expand Up @@ -1104,6 +1108,8 @@ def subset_neuron(x, subset, clear_temp=True, remove_disconnected=True,
Cut neuron at specific point.
"""
if isinstance(x, core.CatmaidNeuronList) and len(x) == 1:
x = x[0]

if not isinstance(x, core.CatmaidNeuron):
raise TypeError('Can only process data of type "CatmaidNeuron", not\
Expand Down

0 comments on commit bf05709

Please sign in to comment.