Skip to content

Commit

Permalink
improve docstrings & code clean-up
Browse files Browse the repository at this point in the history
  • Loading branch information
schlegelp committed Mar 17, 2019
1 parent cbf19c4 commit 87fe2fb
Show file tree
Hide file tree
Showing 6 changed files with 51 additions and 44 deletions.
4 changes: 2 additions & 2 deletions pymaid/b3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,9 @@ def __getattr__(self, key):
elif key == 'abutting':
return object_list(self._cn_selection_helper(3))
elif key == 'all':
return self.neurons + self.connectors + self.soma
return self.neurons + self.connectors + self.soma
else:
try:
try:
return getattr(self.all, key)
except:
raise AttributeError('Unknown attribute ' + key)
Expand Down
13 changes: 7 additions & 6 deletions pymaid/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2594,8 +2594,8 @@ def to_selection(self, save_to='selection.json'):
Parameters
----------
save_to : str | None, optional
Filename to save selection to. If not provided, will
return the json data.
Filename to save selection to. If ``None``, will
return the json data instead.
"""

data = [dict(skeleton_id=int(n.skeleton_id),
Expand All @@ -2608,7 +2608,7 @@ def to_selection(self, save_to='selection.json'):
with open(save_to, 'w') as outfile:
json.dump(data, outfile)

logger.info('Selection saved as {}.'.format(fname))
logger.info('Selection saved as {}.'.format(save_to))
else:
return data

Expand Down Expand Up @@ -2768,10 +2768,11 @@ def __getitem__(self, skid):
sel = [n for n in self.obj if str(n.skeleton_id) in skid]

# Reorder to keep in the order requested
sel = sorted(sel, key=lambda x : np.where(skid == str(x.skeleton_id))[0][0])
sel = sorted(sel, key=lambda x: np.where(skid == str(x.skeleton_id))[0][0])

if len(sel) == 0:
raise ValueError('No neuron with skeleton ID(s) {0}'.format(skid))
raise ValueError('No neuron(s) with given skeleton ID(s):'
' {0}'.format(skid))
elif len(sel) == 1:
return sel[0]
else:
Expand Down Expand Up @@ -3251,4 +3252,4 @@ def add_edge(edges, edge_points, coords, i, j):

def _convert_helper(x):
""" Helper function to convert x to CatmaidNeuron."""
return CatmaidNeuron(x[0], remote_instance=x[1])
return CatmaidNeuron(x[0], remote_instance=x[1])
42 changes: 24 additions & 18 deletions pymaid/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ def make_global(self):
logger.info('Global CATMAID instance set. Caching is OFF.')

def fetch(self, url, post=None, desc='Fetching', callback=None, files=None,
disable_pbar=False, leave_pbar=True, return_type='json'):
disable_pbar=False, leave_pbar=True, return_type='json'):
""" Requires the url to connect to and the variables for POST,
if any, in a dictionary.
Expand Down Expand Up @@ -415,14 +415,14 @@ def __repr__(self):

@property
def catmaid_version(self):
""" Version of CATMAID your server is running. """
""" Version of CATMAID your server is running. """

return self.fetch(self._get_catmaid_version())['SERVER_VERSION']
return self.fetch(self._get_catmaid_version())['SERVER_VERSION']

@property
def available_projects(self):
""" List of projects hosted on your server. Depends on your user's
permission! """
permission! """

return pd.DataFrame(self.fetch(self._get_projects_url())).sort_values('id')

Expand All @@ -437,7 +437,7 @@ def image_stacks(self):
s.update(d)

# Return as DataFrame
return pd.DataFrame(stacks).set_index('id')
return pd.DataFrame(stacks).set_index('id')

def _get_catmaid_version(self, **GET):
""" Use to parse url for retrieving CATMAID server version"""
Expand Down Expand Up @@ -1012,9 +1012,9 @@ def get_neuron(x, remote_instance=None, connector_flag=1, tag_flag=1,
return df

if df.shape[0] > 1:
return core.CatmaidNeuronList(df, remote_instance=remote_instance,)
return core.CatmaidNeuronList(df, remote_instance=remote_instance)
else:
return core.CatmaidNeuron(df.loc[0], remote_instance=remote_instance,)
return core.CatmaidNeuron(df.loc[0], remote_instance=remote_instance)


# This is for legacy reasons -> will remove eventually
Expand Down Expand Up @@ -1481,7 +1481,7 @@ def get_partners(x, remote_instance=None, threshold=1, min_size=2, filt=[],
d]] + list(x), remote_instance)

df = pd.DataFrame(columns=['neuron_name', 'skeleton_id',
'num_nodes', 'relation'] + list(x))
'num_nodes', 'relation'] + list(x))

# Number of synapses is returned as list of links with 0-5 confidence:
# {'skid': [0, 1, 2, 3, 4, 5]}
Expand Down Expand Up @@ -2586,7 +2586,7 @@ def remove_meta_annotations(remove_from, to_remove, remote_instance=None):
an = get_annotation_list(remote_instance=remote_instance)

# Get annotation IDs
remove_from = utils._make_iterable(remove_from)
remove_from = utils._make_iterable(remove_from)
rm = an[an.annotation.isin(remove_from)]
if rm.shape[0] != len(remove_from):
missing = set(remove_from).difference(rm.annotation.values)
Expand Down Expand Up @@ -3143,7 +3143,7 @@ def get_annotated(x, remote_instance=None, include_sub_annotations=False,
logger.info('..... and NOT: {}'.format(','.join([str(s) for s in neg_ids])))

urls = remote_instance._get_annotated_url()

resp = remote_instance.fetch(urls, post=post, desc='Fetching')

return pd.DataFrame(resp['entities'])
Expand Down Expand Up @@ -3238,6 +3238,8 @@ def get_skids_by_annotation(annotations, remote_instance=None,
Use to retrieve neurons by combining various
search criteria. For example names, reviewers,
annotations, etc.
:func:`pymaid.get_annotated`
Use to retrieve entities (neurons and annotations).
"""

remote_instance = utils._eval_remote_instance(remote_instance)
Expand Down Expand Up @@ -3896,7 +3898,7 @@ def get_contributor_statistics(x, remote_instance=None, separate=False,
remote_instance : CATMAID instance, optional
If not passed directly, will try using global.
separate : bool, optional
If true, stats are given per neuron.
If True, stats are given per neuron.
max_threads : int, optional
Maximum parallel data requests. Overrides
``CatmaidInstance.max_threads``.
Expand Down Expand Up @@ -4347,7 +4349,7 @@ def _constructor_helper(data, key, days):
@cache.undo_on_error
def get_nodes_in_volume(left, right, top, bottom, z1, z2, remote_instance=None,
coord_format='NM', resolution=(4, 4, 50)):
""" Retrieve treenodes in given bounding box.
""" Retrieve treenodes and connectors in given bounding box.
Parameters
----------
Expand Down Expand Up @@ -4706,7 +4708,7 @@ def find_neurons(names=None, annotations=None, volumes=None, users=None,
return

logger.info(
'Get all neurons with at least {0} nodes'.format(min_size))
'Get all neurons with > {0} nodes'.format(min_size))
get_skeleton_list_GET_data = {'nodecount_gt': min_size}
remote_get_list_url = remote_instance._get_list_skeletons_url()
remote_get_list_url += '?%s' % urllib.parse.urlencode(
Expand Down Expand Up @@ -4778,7 +4780,7 @@ def get_neurons_in_volume(volumes, min_nodes=2, min_cable=1, intersect=False,
volumes : str | core.Volume | list of either
Single or list of CATMAID volumes.
min_nodes : int, optional
Minimum node count for a neuron within given
Minimum node count for a neuron within given
volume(s).
min_cable : int, optional
Minimum cable length [nm] for a neuron within
Expand Down Expand Up @@ -4882,7 +4884,7 @@ def get_neurons_in_bbox(bbox, unit='NM', min_nodes=1, min_cable=1,
resolution of 35nm is assumed. Pass 'xy_res' and
'z_res' as ``**kwargs`` to override this.
min_nodes : int, optional
Minimum node count for a neuron within given
Minimum node count for a neuron within given
bounding box.
min_cable : int, optional
Minimum cable length [nm] for a neuron within
Expand All @@ -4897,7 +4899,7 @@ def get_neurons_in_bbox(bbox, unit='NM', min_nodes=1, min_cable=1,
"""

remote_instance = utils._eval_remote_instance(remote_instance)
remote_instance = utils._eval_remote_instance(remote_instance)

if isinstance(bbox, dict):
bbox = np.array([[bbox['left'], bbox['right']],
Expand Down Expand Up @@ -5089,7 +5091,9 @@ def get_paths(sources, targets, remote_instance=None, n_hops=2, min_synapses=1,
response, remote_instance=remote_instance, threshold=min_synapses)

# Get all paths between sources and targets
all_paths = [p for s in sources for t in targets for p in nx.all_simple_paths(g, s, t, cutoff=max(n_hops)) if len(p) - 1 in n_hops]
all_paths = [p for s in sources for t in targets for p in
nx.all_simple_paths(g, s, t,
cutoff=max(n_hops)) if len(p) - 1 in n_hops]

if not return_graph:
return all_paths
Expand Down Expand Up @@ -5150,7 +5154,7 @@ def get_volume(volume_name=None, remote_instance=None,
if isinstance(volume_name, type(None)):
logger.info('Retrieving list of available volumes.')
elif not isinstance(volume_name, (int, str, list, np.ndarray)):
raise TypeError('Volume name must be str or list of str.')
raise TypeError('Volume name must be id (int), str or list of either, not {}.'.format(type(volume_name)))

volume_names = utils._make_iterable(volume_name)

Expand Down Expand Up @@ -5484,6 +5488,7 @@ def rename_neurons(x, new_names, remote_instance=None, no_prompt=False):

return


@cache.undo_on_error
def get_node_location(x, remote_instance=None):
""" Retrieves location for a set of tree- or connector nodes.
Expand Down Expand Up @@ -5519,6 +5524,7 @@ def get_node_location(x, remote_instance=None):

return df


@cache.undo_on_error
def get_label_list(remote_instance=None):
""" Retrieves all labels (TREENODE tags only) in a project.
Expand Down
4 changes: 2 additions & 2 deletions pymaid/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ def neuron2igraph(x):

def nx2neuron(g, neuron_name=None, skeleton_id=None, root=None):
""" Generate neuron object from NetworkX Graph.
This function will try to generate a neuron-like tree structure from
the Graph. Therefore the graph may not contain loops!
Expand Down Expand Up @@ -410,7 +410,7 @@ def nx2neuron(g, neuron_name=None, skeleton_id=None, root=None):
radii = nx.get_node_attributes(g, 'radius')
tn_table['radius'] = tn_table.index.map(lambda x: radii.get(x, -1))

# Turn this into a Series
# Turn this into a Series
n = pd.Series({'skeleton_id': skeleton_id,
'neuron_name': neuron_name,
'nodes': tn_table.reset_index(),
Expand Down
8 changes: 4 additions & 4 deletions pymaid/graph_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,13 +561,13 @@ def find_main_branchpoint(x, reroot_to_soma=False):
g = graph.neuron2nx(x)

# First, find longest path
longest = nx.dag_longest_path(g)
longest = nx.dag_longest_path(g, weight='weight')

# Remove longest path
g.remove_nodes_from(longest)

# Find second longst path
sc_longest = nx.dag_longest_path(g)
sc_longest = nx.dag_longest_path(g, weight='weight')

# Parent of the last node in sc_longest is the common branch point
bp = list(x.graph.successors(sc_longest[-1]))[0]
Expand Down Expand Up @@ -824,11 +824,11 @@ def reroot_neuron(x, new_root, inplace=False):
return

if x.igraph and config.use_igraph:
# Prevent warnings in the following code - querying paths between
# Prevent warnings in the following code - querying paths between
# unreachable nodes will otherwise generate a runtime warning
with warnings.catch_warnings():
warnings.simplefilter("ignore")

# Find paths to all roots
path = x.igraph.get_shortest_paths(x.igraph.vs.find(node_id=new_root),
[x.igraph.vs.find(node_id=r) for r in x.root])
Expand Down
24 changes: 12 additions & 12 deletions pymaid/morpho.py
Original file line number Diff line number Diff line change
Expand Up @@ -1127,7 +1127,7 @@ def stitch_neurons(*x, method='LEAFS', master='SOMA', tn_to_stitch=None):
""" Stitch multiple neurons together.
Uses minimum spanning tree to determine a way to connect all fragments
while minimizing length (eucledian distance) of the new edges. Nodes
while minimizing length (eucledian distance) of the new edges. Nodes
that have been stitched will be get a "stitched" tag.
Important
Expand All @@ -1146,11 +1146,11 @@ def stitch_neurons(*x, method='LEAFS', master='SOMA', tn_to_stitch=None):
(2) 'ALL': All treenodes are considered.
(3) 'NONE': Node and connector tables will simply
be combined without generating any new edges.
The resulting neuron will have multiple roots.
The resulting neuron will have multiple roots.
master : 'SOMA' | 'LARGEST' | 'FIRST', optional
Sets the master neuron:
(1) 'SOMA': The largest fragment with a soma
becomes the master neuron. If no neuron with
(1) 'SOMA': The largest fragment with a soma
becomes the master neuron. If no neuron with
soma, will pick the largest.
(2) 'LARGEST': The largest fragment becomes the
master neuron.
Expand All @@ -1159,7 +1159,7 @@ def stitch_neurons(*x, method='LEAFS', master='SOMA', tn_to_stitch=None):
tn_to_stitch : List of treenode IDs, optional
If provided, these treenodes will be preferentially
used to stitch neurons together. Overrides methods
``'ALL'`` or ``'LEAFS'``.
``'ALL'`` or ``'LEAFS'``.
Returns
-------
Expand All @@ -1183,13 +1183,13 @@ def stitch_neurons(*x, method='LEAFS', master='SOMA', tn_to_stitch=None):
"""
method = str(method).upper()
master = str(master).upper()
master = str(master).upper()

if method not in ['LEAFS', 'ALL', 'NONE']:
raise ValueError('Unknown method: %s' % str(method))

if master not in ['SOMA', 'LARGEST', 'FIRST']:
raise ValueError('Unknown master: %s' % str(master))
raise ValueError('Unknown master: %s' % str(master))

# Compile list of individual neurons
x = utils._unpack_neurons(x)
Expand Down Expand Up @@ -1283,8 +1283,8 @@ def stitch_neurons(*x, method='LEAFS', master='SOMA', tn_to_stitch=None):
# calculating the minimum spanning tree
nx.set_edge_attributes(g, 0, 'weight')

# If two nodes occupy the same position (e.g. after if fragments are the
# result of cutting), they will have a distance of 0. Hence, we won't be
# If two nodes occupy the same position (e.g. if fragments are the
# result of cutting), they will have a distance of 0. Hence, we won't be
# able to simply filter by distance
nx.set_edge_attributes(g, False, 'new')

Expand Down Expand Up @@ -1329,7 +1329,7 @@ def stitch_neurons(*x, method='LEAFS', master='SOMA', tn_to_stitch=None):
# Keep track of original master root
master_root = master.root[0]

# Generate one big neuron
# Generate one big neuron
master.nodes = x.nodes
master.connectors = x.connectors
for n in x:
Expand Down Expand Up @@ -2091,8 +2091,8 @@ def time_machine(x, target, inplace=False, remote_instance=None):
nodes = pd.DataFrame(data[0], columns=['treenode_id', 'parent_id',
'user_id', 'x', 'y', 'z', 'radius',
'confidence', 'creation_timestamp',
'modified_timestamp'])
nodes.parent_id = nodes.parent_id.astype(object)
'modified_timestamp', 'ordering_by'])
nodes.loc[:, 'parent_id'] = nodes.parent_id.values.astype(object)
nodes.loc[~nodes.parent_id.isnull(), 'parent_id'] = nodes.loc[~nodes.parent_id.isnull(), 'parent_id'].map(int)
nodes.loc[nodes.parent_id.isnull(), 'parent_id'] = None

Expand Down

0 comments on commit 87fe2fb

Please sign in to comment.