Permalink
Branch: master
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
12317 lines (10094 sloc) 607 KB
"""
CATMAID to Blender Import Script - connects to CATMAID servers and retrieves
skeleton data
Copyright (C) 2014 Philipp Schlegel
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import ssl
# Uncomment this if you're having problems with SSL certificate of your CATMAID server
# NOT recommended!
#ssl._create_default_https_context = ssl._create_unverified_context
import bpy, blf
import os
import re
import random
import time
import datetime
import urllib
import json
import math
import colorsys
import copy
import http.cookiejar as cj
import threading
import concurrent.futures
import asyncio
import mathutils
import sys
import numpy as np
try:
from scipy.spatial import distance
from scipy import cluster
except:
print('Unable to import SciPy. Some functions will not work!')
try:
import matplotlib.pyplot as plt
import pylab
except:
print('Unable to import matplotlib. Some functions will not work!')
import base64
import statistics
from bpy.types import Operator, AddonPreferences
from bpy_extras.io_utils import ImportHelper, ExportHelper
from bpy.props import FloatVectorProperty, FloatProperty, StringProperty, BoolProperty, EnumProperty, IntProperty, CollectionProperty
remote_instance = None
connected = False
#bl_info holds plugin info
bl_info = {
"name": "CATMAIDImport",
"author": "Philipp Schlegel",
"version": (6, 1, 0),
"for_catmaid_version": '2018.07.19-1ad1035b96',
"blender": (2, 7, 9),
"location": "Properties > Scene > CATMAID Import",
"description": "Imports Neuron from CATMAID server, Analysis tools, Export to SVG",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"category": "Object"}
class CATMAIDimportPanel(bpy.types.Panel):
"""Creates Import Menu in Properties -> Scene """
bl_label = "CATMAID Import"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "scene"
def draw(self, context):
layout = self.layout
#Version check panel
config = bpy.data.scenes[0].CONFIG_VersionManager
layout.label(text="Your Blender Script Version: %s" % str(round(config.current_version,3)))
if config.latest_version == 0:
layout.label(text="On Github: Please Connect...")
else:
layout.label(text="On Github: %s" % str(round(config.latest_version,3)))
layout.label(text="Tested for CATMAID Version: %s" % config.tested_catmaid_version)
if config.your_catmaid_server == "":
layout.label(text="Your CATMAID Server: Please Connect...")
else:
layout.label(text="Your CATMAID Server: %s" % config.your_catmaid_server)
if config.last_stable_version > config.current_version:
layout.label(text="Your are behind the last working", icon = 'ERROR')
layout.label(text=" version of the Script!")
layout.label(text="Please Download + Replace with the")
layout.label(text="latest Version of CATMAIDImport.py:")
layout.label(text="https://github.com/schlegelp/CATMAID-to-Blender")
elif config.latest_version > config.current_version and config.new_features != '':
layout.label(text="New Features in Latest Version: %s" % config.new_features)
if config.your_catmaid_server != 'Please connect...' and config.your_catmaid_server != config.tested_catmaid_version:
layout.label(text="Your server is running a version of CATMAID", icon = 'ERROR')
layout.label(text=" that may not be supported!")
if config.message != '':
print('Message from Github: %s' % config.message)
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("check.version", text = "Check Versions", icon ='VISIBLE_IPO_ON')
layout.label('CATMAID Import:')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("connect.to_catmaid", text = "Connect 2 CATMAID", icon = 'PLUGIN')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("retrieve.neuron", text = "Import Neuron(s)", icon = 'ARMATURE_DATA')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("retrieve.partners", text = "Retrieve Partners", icon = 'AUTOMERGE_ON')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("retrieve.by_pairs", text = "Retrieve Paired", icon = 'MOD_ARRAY')
row.operator("display.help", text = "", icon ='QUESTION').entry = 'retrieve.by_pairs'
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("retrieve.in_volume", text = "Retrieve in Volume", icon = 'BBOX')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("reload.neurons", text = "Reload Neurons", icon = 'FILE_REFRESH')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("retrieve.connectors", text = "Retrieve Connectors", icon = 'PMARKER_SEL')
row.operator("display.help", text = "", icon ='QUESTION').entry = 'retrieve.connectors'
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("retrieve.tags", text = "Retrieve Tags", icon = 'SYNTAX_OFF')
layout.label('Materials:')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("change.material", text = "Change Materials", icon ='COLOR_BLUE')
row.operator("display.help", text = "", icon ='QUESTION').entry = 'change.material'
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("random.all_materials", text = "Randomize Color", icon ='COLOR')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("color.by_spatial", text = "By Spatial Distr.", icon ='ROTATECENTER')
row.operator("display.help", text = "", icon ='QUESTION').entry = 'color.by_spatial'
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("color.by_annotation", text = "By Annotation", icon ='SORTALPHA')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("color.by_synapse_count", text = "By Synapse Count", icon ='IPO_QUART')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("color.by_pairs", text = "By Pairs", icon ='MOD_ARRAY')
row.operator("display.help", text = "", icon ='QUESTION').entry = 'color.by_pairs'
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("color.by_strahler", text = "By Strahler Index", icon ='MOD_ARRAY')
row.operator("display.help", text = "", icon ='QUESTION').entry = 'color.by_strahler'
layout.label(text="Export to SVG:")
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("exportall.to_svg", text = 'Export Morphology', icon = 'EXPORT')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("connectors.to_svg", text = 'Export Connectors', icon = 'EXPORT')
layout.label('Select:')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("select.by_annotation", text = 'By Annotation', icon = 'BORDER_RECT')
layout.label('Analyze:')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("analyze.statistics", text = 'Get Statistics', icon = 'FILE_TICK')
layout.label('Calculate Similarity:')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator_context = 'INVOKE_DEFAULT'
row.operator("calc.similarity_modal", text = "Start Calculation", icon ='PARTICLE_PATH')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("calc.similarity_modal_settings", text = "Settings", icon ='MODIFIER')
row.operator("display.help", text = "", icon ='QUESTION').entry = 'color.by_similarity'
layout.label('Volumes:')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("export.volume", text = 'Export Mesh', icon = 'EXPORT')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("import.volume", text = 'Import Volume', icon = 'IMPORT')
layout.label('Animate:')
row = layout.row(align=True)
row.alignment = 'EXPAND'
row.operator("animate.history", text = 'History', icon = 'OUTLINER_DATA_CAMERA')
row.operator("display.help", text = "", icon ='QUESTION').entry = 'animate.history'
class VersionManager(bpy.types.PropertyGroup):
"""Class to hold version related properties
"""
current_version = bpy.props.FloatProperty(name="Your Script Version", default=0,min=0, description="Current Version of the Script you are using")
latest_version = bpy.props.FloatProperty(name="Latest Version", default=0,min=0, description="Latest Version on Github")
last_stable_version = bpy.props.FloatProperty(name="Last Stable Version", default=0,min=0, description="Last Stable Version of the Script")
message = bpy.props.StringProperty(name="Message", default="", description="Message from Github")
new_features = bpy.props.StringProperty(name="New Features", default="", description="New features in latest Version of the Script on Github")
your_catmaid_server = bpy.props.StringProperty(name="Your CATMAID Server Version", default='', description="Your CATMAID Server's Version")
tested_catmaid_version = bpy.props.StringProperty(name="Last tested CATMAID Version", default='', description="Last Version confirmed to Work with this Blender")
class get_version_info(Operator):
"""
Operator for Checking Addon Version on Github. Will be called when connection to CATMAID servers is attempted or when button 'check version' is invoked.
"""
bl_idname = "check.version"
bl_label = "Check Version on Github"
def execute(self,context):
self.check_version()
return{'FINISHED'}
def check_version(context):
#Read current version from bl_info and convert from tuple into float
print('Checking Version on Github...')
current_version = str(bl_info['version'][0]) + '.'
for i in range(len(bl_info['version'])-1):
current_version += str(bl_info['version'][i+1])
current_version = float(current_version)
print('Current version of the Script: ', current_version)
try:
update_url = 'https://raw.githubusercontent.com/schlegelp/CATMAID-to-Blender/master/update.txt'
update_file = urllib.request.urlopen(update_url)
file_content = update_file.read().decode("utf-8")
latest_version = re.search('current_version.*?{(.*?)}',file_content).group(1)
last_stable = re.search('last_stable.*?{(.*?)}',file_content).group(1)
new_features = re.search('new_features.*?{(.*?)}',file_content).group(1)
message = re.search('message.*?{(.*?)}',file_content).group(1)
print('Latest version on Github: ', latest_version)
except:
print('Error fetching info on latest version')
self.report({'ERROR'},'Error fetching latest info')
latest_version = 0
last_stable = 0
new_features = ''
message = ''
tested_catmaid_version = str(bl_info['for_catmaid_version'])
print('This Script was tested with CATMAID Server Version: ', tested_catmaid_version)
try:
your_catmaid_server = remote_instance.fetch( remote_instance.djangourl('/version') )['SERVER_VERSION']
print('You are running CATMAID Server Version: ', your_catmaid_server)
except:
your_catmaid_server = 'Please connect...'
config = bpy.data.scenes[0].CONFIG_VersionManager
config.current_version = current_version
config.latest_version = float(latest_version)
config.last_stable_version = float(last_stable)
config.message = message
config.new_features = new_features
config.tested_catmaid_version = tested_catmaid_version
config.your_catmaid_server = your_catmaid_server
class CatmaidInstance:
""" A class giving access to a CATMAID instance.
"""
def __init__(self, server, authname, authpassword, authtoken):
self.server = server
self.authname = authname
self.authpassword = authpassword
self.authtoken = authtoken
self.opener = urllib.request.build_opener(urllib.request.HTTPRedirectHandler())
def djangourl(self, path):
""" Expects the path to lead with a slash '/'. """
return self.server + path
def auth(self, request):
if self.authname:
base64string = base64.encodestring(('%s:%s' % (self.authname, self.authpassword)).encode()).decode().replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
if self.authtoken:
request.add_header("X-Authorization", "Token {}".format(self.authtoken))
def fetch(self, url, post=None):
""" Requires the url to connect to and the variables for POST, if any, in a dictionary. """
if post:
data = urllib.parse.urlencode(post)
data = data.encode('utf-8')
#If experiencing issue with [SSL: CERTIFICATE_VERIFY_FAILED] -> set unverifiable to True
#Warning: This makes the connection insecure!
request = urllib.request.Request(url, data = data, unverifiable = False)
else:
request = urllib.request.Request(url)
self.auth(request)
response = self.opener.open(request)
return json.loads(response.read().decode("utf-8"))
#Use to parse url for retrieving stack infos
def get_stack_info_url(self, pid, sid):
return self.djangourl("/" + str(pid) + "/stack/" + str(sid) + "/info")
#Use to parse url for retrieving skeleton nodes (no info on parents or synapses, does need post data)
def get_skeleton_nodes_url(self, pid):
return self.djangourl("/" + str(pid) + "/treenode/table/list")
#Use to parse url for retrieving connectivity (does need post data)
def get_connectivity_url(self, pid):
return self.djangourl("/" + str(pid) + "/skeletons/connectivity" )
#Use to parse url for retrieving info connectors (does need post data)
def get_connector_details_url(self, pid):
return self.djangourl("/" + str(pid) + "/connector/skeletons" )
#Use to parse url for retrieving info connectors (does need GET data)
def get_connectors_url(self, pid):
return self.djangourl("/" + str(pid) + "/connectors/" )
#Use to parse url for names for a list of skeleton ids (does need post data: pid, skid)
def get_neuronnames(self, pid):
return self.djangourl("/" + str(pid) + "/skeleton/neuronnames" )
#Get user list for project
def get_user_list_url(self):
return self.djangourl("/user-list" )
#Use to parse url for a SINGLE neuron (will also give you neuronid)
def get_single_neuronname(self, pid, skid):
return self.djangourl("/" + str(pid) + "/skeleton/" + str(skid) + "/neuronname" )
#Use to get skeletons review status
def get_review_status(self, pid):
return self.djangourl("/" + str(pid) + "/skeletons/review-status" )
#Use to get annotations for given neuron. DOES need skid as postdata
def get_neuron_annotations(self, pid):
return self.djangourl("/" + str(pid) + "/annotations/table-list" )
"""
ATTENTION!!!!: This does not seem to work anymore as of 20/10/2015 -> although it still exists in CATMAID code
use get_annotations_for_skid_list2
"""
#Use to get annotations for given neuron. DOES need skid as postdata
def get_annotations_for_skid_list(self, pid):
return self.djangourl("/" + str(pid) + "/annotations/skeletons/list" )
"""
!!!!
"""
#Does need postdata
def list_skeletons(self, pid):
return self.djangourl("/" + str(pid) + "/skeletons" )
#Use to get annotations for given neuron. DOES need skid as postdata
def get_annotations_for_skid_list2(self, pid):
return self.djangourl("/" + str(pid) + "/skeleton/annotationlist" )
#Use to parse url for retrieving list of all annotations (and their IDs!!!) (does NOT need post data)
def get_annotation_list(self, pid):
return self.djangourl("/" + str(pid) + "/annotations/" )
#Use to parse url for retrieving contributor statistics for given skeleton (does NOT need post data)
def get_contributions_url(self, pid, skid):
return self.djangourl("/" + str(pid) + "/skeleton/" + str(skid) + "/contributor_statistics" )
#Use to parse url for retrieving neurons with given annotation or name (does need post data)
def get_annotated_url(self, pid):
#return self.djangourl("/" + str(pid) + "/neuron/query-by-annotations" )
return self.djangourl("/" + str(pid) + "/annotations/query-targets" )
#Use to parse url for retrieving list of nodes (needs post data)
def get_node_list(self, pid):
return self.djangourl("/" + str(pid) + "/node/list" )
#Use to parse url for retrieving all info the 3D viewer gets (does NOT need post data)
#Returns, in JSON, [[nodes], [connectors], [tags]], with connectors and tags being empty when 0 == with_connectors and 0 == with_tags, respectively
def get_compact_skeleton_url(self, pid, skid, connector_flag = 1, tag_flag = 1):
return self.djangourl("/" + str(pid) + "/" + str(skid) + "/" + str(connector_flag) + "/" + str(tag_flag) + "/compact-skeleton")
def get_compact_details_url(self, pid, skid):
""" Similar to compact-skeleton but if 'with_history':True is passed as GET request, returned data will include all positions a nodes/connector has ever occupied plus the creation time and last modified.
"""
return self.djangourl("/" + str(pid) + "/skeletons/" + str(skid) + "/compact-detail")
#The difference between this function and the compact_skeleton function is that
#the connectors contain the whole chain from the skeleton of interest to the
#partner skeleton: contains [treenode_id, confidence_to_connector, connector_id, confidence_from_connector, connected_treenode_id, connected_skeleton_id, relation1, relation2]
#relation1 = 1 means presynaptic (this neuron is upstream), 0 means postsynaptic (this neuron is downstream)
def get_compact_arbor_url(self, pid, skid, nodes_flag = 1, connector_flag = 1, tag_flag = 1):
return self.djangourl("/" + str(pid) + "/" + str(skid) + "/" + str(nodes_flag) + "/" + str(connector_flag) + "/" + str(tag_flag) + "/compact-arbor")
#Use to parse url for retrieving edges between given skeleton ids (does need postdata)
#Returns list of edges: [source_skid, target_skid, weight]
def get_edges_url(self, pid):
return self.djangourl("/" + str(pid) + "/skeletongroup/skeletonlist_confidence_compartment_subgraph" )
def search_url(self,tag,pid):
return self.djangourl("/" + str(pid) + "/search?pid=" + str(pid) + "&substring=" + str(tag) )
#Use to get all skeletons of a given neuron (neuron_id)
def get_skeletons_from_neuron_id(self,neuron_id,pid):
return self.djangourl("/" + str(pid) + "/neuron/" + str(neuron_id) + '/get-all-skeletons' )
#Use to parse url for adding volumes
def add_volume(self, pid):
return self.djangourl("/" + str(pid) + "/volumes/add")
#Get list of all volumes in project
def get_volumes(self, pid):
return self.djangourl("/" + str(pid) + "/volumes/")
#Get details on a given volume
def get_volume_details(self, pid, volume_id):
return self.djangourl("/" + str(pid) + "/volumes/" + str(volume_id) )
def get_list_skeletons_url(self, pid):
""" Use to parse url for names for a list of skeleton ids (works with GET).
"""
return self.djangourl("/" + str(pid) + "/skeletons/")
def get_review_details_url(self, pid, skid):
""" Use to retrieve review status for every single node of a skeleton.
For some reason this needs to be fetched as POST (even though actual POST data is not necessary)
Returns list of arbors, the nodes contained and who has been reviewing them at what time
"""
return self.djangourl("/" + str(pid) + "/skeletons/" + str(skid) + "/review")
def get_review_details(x, remote_instance=None, max_threads=None):
""" Retrieve review status (reviewer + timestamp) for each node
of a given skeleton. Uses the review API.
Parameters
-----------
x : {int, str, CatmaidNeuron, CatmaidNeuronList, DataFrame}
Your options are either::
1. int or list of ints will be assumed to be skeleton IDs
2. str or list of str:
- if convertible to int, will be interpreted as x
- elif start with 'annotation:' will be assumed to be
annotations
- else, will be assumed to be neuron names
3. For CatmaidNeuron/List or pandas.DataFrames will try
to extract skeleton_id parameter
Returns
-------
dict
{ 'skid1' : [ (node_id,
most_recent_reviewer_login,
most_recent_review_datetime),
...],
'skid2' : ... }
"""
if remote_instance is None:
if 'remote_instance' in globals():
remote_instance = globals()['remote_instance']
else:
print('Please either pass a CATMAID instance or define globally as "remote_instance" ')
return
if not isinstance(x, (list, np.ndarray, set)):
x = [x]
urls = []
post_data = []
for s in x:
urls.append(remote_instance.get_review_details_url(project_id, s))
# For some reason this needs to fetched as POST (even though actual
# POST data is not necessary)
post_data.append({'placeholder': 0})
rdata = get_urls_threaded(urls, post_data, max_threads)
user_list = remote_instance.fetch( remote_instance.get_user_list_url() )
user_list = { k['id'] : k for k in user_list}
last_reviewer = {}
for i, neuron in enumerate(rdata):
this_neuron = []
for arbor in neuron:
this_neuron += [ (n['id'],
user_list[n['rids'][0][0]]['login'],
datetime.datetime.strptime(n['rids'][0][1][:16], '%Y-%m-%dT%H:%M'))
for n in arbor['sequence'] if n['rids']]
last_reviewer[x[i]] = this_neuron
return last_reviewer
def eval_skids(x):
""" Wrapper to evaluate parameters passed as skeleton IDs. Will turn
annotations and neuron names into skeleton IDs.
Parameters
----------
x : {int, str, CatmaidNeuron, CatmaidNeuronList, DataFrame}
Your options are either::
1. int or list of ints will be assumed to be skeleton IDs
2. str or list of str:
- if convertible to int, will be interpreted as x
- elif start with 'annotation:' will be assumed to be
annotations
- else, will be assumed to be neuron names
3. For CatmaidNeuron/List or pandas.DataFrames will try
to extract skeleton_id parameter
remote_instance : CatmaidInstance, optional
Returns
-------
list of str
list containing skeleton IDs as strings
"""
if ',' in x:
x = x.split(',')
if isinstance(x, (int, np.int64, np.int32, np.int)):
return [ str(x) ]
elif isinstance(x, (str, np.str)):
try:
int(x)
return [ str(x) ]
except:
if x.startswith('annotation:'):
return search_annotations(x[11:])
elif x.startswith('name:'):
return search_neuron_names(x[5:],allow_partial=False).skeleton_id.tolist()
else:
return search_neuron_names(x, allow_partial=False).skeleton_id.tolist()
elif isinstance(x, (list, np.ndarray)):
skids = []
for e in x:
temp = eval_skids(e)
if isinstance(temp, (list, np.ndarray)):
skids += temp
else:
skids.append(temp)
return list(set(skids))
else:
remote_instance.logger.error(
'Unable to extract x from type %s' % str(type(x)))
raise TypeError('Unable to extract x from type %s' % str(type(x)))
def search_neuron_names(tag, allow_partial = True):
""" Searches for neuron names. Returns a list of skeleton ids.
"""
search_url = remote_instance.get_annotated_url( project_id )
annotation_post = { 'name': str(tag) , 'rangey_start': 0, 'range_length':500, 'with_annotations':False }
results = remote_instance.fetch( search_url, annotation_post )
match = []
for e in results['entities']:
if allow_partial and e['type'] == 'neuron' and tag.lower() in e['name'].lower():
match += e['skeleton_ids']
if not allow_partial and e['type'] == 'neuron' and e['name'] == tag:
match += e['skeleton_ids']
return list( set(match) )
def search_annotations(annotations_to_retrieve, allow_partial=False, intersect=False):
""" Searches for annotations, returns list of skeleton IDs
"""
### Get annotation IDs
osd.show("Looking for Annotations...")
print('Looking for Annotations:', annotations_to_retrieve)
#bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
print('Retrieving list of Annotations...')
an_list = remote_instance.fetch( remote_instance.get_annotation_list( project_id ) )
print('List of %i annotations retrieved.' % len(an_list['annotations']))
annotation_ids = []
annotation_names = []
if not allow_partial:
annotation_ids = [ x['id'] for x in an_list['annotations'] if x['name'] in annotations_to_retrieve ]
annotation_names = [ x['name'] for x in an_list['annotations'] if x['name'] in annotations_to_retrieve ]
else:
annotation_ids = [ x['id'] for x in an_list['annotations'] if True in [ y.lower() in x['name'].lower() for y in annotations_to_retrieve ] ]
annotation_names = [ x['name'] for x in an_list['annotations'] if True in [ y.lower() in x['name'].lower() for y in annotations_to_retrieve ] ]
if not annotation_ids:
return []
#Now retrieve annotated skids
print('Looking for Annotation(s) | %s | (id: %s)' % ( str(annotation_names), str(annotation_ids) ) )
#annotation_post = {'neuron_query_by_annotation': annotation_id, 'display_start': 0, 'display_length':500}
if intersect:
annotation_post = { 'rangey_start': 0, 'range_length':500, 'with_annotations':False }
for i,e in enumerate(annotation_ids):
key = 'annotated_with[%i]' % i
annotation_post[key] = e
remote_annotated_url = remote_instance.get_annotated_url( project_id )
neuron_list = [ str(n['skeleton_ids'][0]) for n in remote_instance.fetch( remote_annotated_url, annotation_post )['entities'] if n['type'] == 'neuron' ]
else:
neuron_list = []
for e in annotation_ids:
annotation_post = { 'annotated_with[0]': e, 'rangey_start': 0, 'range_length':500, 'with_annotations':False }
remote_annotated_url = remote_instance.get_annotated_url( project_id )
neuron_list += [ str(n['skeleton_ids'][0]) for n in remote_instance.fetch( remote_annotated_url, annotation_post )['entities'] if n['type'] == 'neuron' ]
annotated_skids = list(set(neuron_list))
print('Annotation(s) found for %i neurons' % len(annotated_skids))
neuron_names = get_neuronnames(annotated_skids)
return annotated_skids
def retrieve_skeleton_list( user=None, node_count=1, start_date=[], end_date=[], reviewed_by = None ):
""" Wrapper to retrieves a list of all skeletons that fit given parameters (see variables). If no parameters are provided, all existing skeletons are returned.
Parameters:
----------
remote_instance : class
Your CATMAID instance; either pass directly to function or define globally as 'remote_instance'.
user : integer
A single user_id.
node_count : integer
Minimum number of nodes.
start_date : list of integers [year, month, day]
Only consider neurons created after.
end_date : list of integers [year, month, day]
Only consider neurons created before.
Returns:
-------
skid_list : list of skeleton ids
"""
get_skeleton_list_GET_data = {'nodecount_gt':node_count}
if user:
get_skeleton_list_GET_data['created_by'] = user
if reviewed_by:
get_skeleton_list_GET_data['reviewed_by'] = reviewed_by
if start_date and end_date:
get_skeleton_list_GET_data['from'] = ''.join( [ str(d) for d in start_date ] )
get_skeleton_list_GET_data['to'] = ''.join( [ str(d) for d in end_date ] )
remote_get_list_url = remote_instance.get_list_skeletons_url( 1 )
remote_get_list_url += '?%s' % urllib.parse.urlencode(get_skeleton_list_GET_data)
skid_list = remote_instance.fetch ( remote_get_list_url)
return skid_list
def get_annotations_from_list (skids, remote_instance):
""" Takes list of skids and retrieves their annotations. Note: It seems like this URL does not process more than 250 skids at a time!
Parameters
----------
skids : list of skeleton ids
remote_instance : CATMAID instance; either pass directly to function or define globally as 'remote_instance'
Returns
-------
dict: annotation_list = {skid1 : [annotation1,annotation2,....], skid2: []}
"""
remote_get_annotations_url = remote_instance.get_annotations_for_skid_list2( project_id )
get_annotations_postdata = {'metaannotations':0,'neuronnames':0}
for i in range(len(skids)):
key = 'skeleton_ids[%i]' % i
get_annotations_postdata[key] = str(skids[i])
print('Asking for %i skeletons annotations (Project ID: %i)' % (len(get_annotations_postdata),project_id), end = ' ')
annotation_list_temp = remote_instance.fetch( remote_get_annotations_url , get_annotations_postdata )
annotation_list = {}
for skid in annotation_list_temp['skeletons']:
annotation_list[skid] = []
for entry in annotation_list_temp['skeletons'][skid]['annotations']:
annotation_id = entry['id']
annotation_list[skid].append(annotation_list_temp['annotations'][str(annotation_id)])
print('Annotations for %i neurons retrieved' % len(annotation_list))
return(annotation_list)
def retrieve_connectivity (skids, remote_instance = None, threshold = 1):
""" Wrapper to retrieve the synaptic partners to neurons of interest
Parameters:
----------
skids : list of skeleton ids
remote_instance : CATMAID instance; either pass directly to function or define globally as 'remote_instance'
threshold : does not seem to have any effect on CATMAID API and is therefore filtered afterwards. This threshold is applied to the total number of synapses. (optional, default = 1)
Returns:
-------
filtered connectivity: {'incoming': { skid1: { 'num_nodes': XXXX, 'skids':{ 'skid3':n_snypases, 'skid4': n_synapses } } , skid2:{}, ... }, 'outgoing': { } }
"""
if remote_instance is None:
if 'remote_instance' in globals():
remote_instance = globals()['remote_instance']
else:
print('Please either pass a CATMAID instance or define globally as "remote_instance" ')
return
remote_connectivity_url = remote_instance.get_connectivity_url( 1 )
connectivity_post = {}
connectivity_post['boolean_op'] = 'OR'
i = 0
for skid in skids:
tag = 'source_skeleton_ids[%i]' %i
connectivity_post[tag] = skid
i +=1
connectivity_data = remote_instance.fetch( remote_connectivity_url , connectivity_post )
#As of 08/2015, # of synapses is returned as list of nodes with 0-5 confidence: {'skid': [0,1,2,3,4,5]}
#This is being collapsed into a single value before returning it:
for direction in ['incoming','outgoing']:
pop = []
for entry in connectivity_data[direction]:
if sum( [ sum(connectivity_data[direction][entry]['skids'][n]) for n in connectivity_data[direction][entry]['skids'] ] ) >= threshold:
for skid in connectivity_data[direction][entry]['skids']:
connectivity_data[direction][entry]['skids'][skid] = sum(connectivity_data[direction][entry]['skids'][skid])
else:
pop.append(entry)
for n in pop:
connectivity_data[direction].pop(n)
return(connectivity_data)
def get_partners (skids, remote_instance, hops, upstream=True, downstream=True):
""" Retrieves partners of given skids over several hops.
Parameters:
----------
skids : list of skeleton ids
remote_instance : CATMAID instance
either pass directly to function or define globally as 'remote_instance'
hops : integer
number of hops from the original skeleton to check
upstream/downstream : boolean
If true, this direction will be checked. I.e. hops = 2 and downstream = False will return inputs and inputs of inputs
Returns:
-------
partners : dict
{ 'incoming': list[ [hop1 connectivity data],[hop 2 connectivity data], ... ] , 'outgoing': list[ [hop1 connectivity data],[hop 2 connectivity data], ... ] }
"""
#By seperating up and downstream retrieval we make sure that we don't circle back in the second hop
#I.e. we only want inputs of inputs and NOT inputs+outputs of inputs
skids_upstream_to_retrieve = skids
skids_downstream_to_retrieve = skids
partners = {}
partners['incoming'] = []
partners['outgoing'] = []
skids_already_seen = {}
remote_connectivity_url = remote_instance.get_connectivity_url( project_id )
for hop in range(hops):
upstream_partners_temp = {}
connectivity_post = {}
#connectivity_post['threshold'] = 1
connectivity_post['boolean_op'] = 'OR'
if upstream is True:
for i in range(len(skids_upstream_to_retrieve)):
tag = 'source_skeleton_ids[%i]' % i
connectivity_post[tag] = skids_upstream_to_retrieve[i]
print( "Retrieving Upstream Partners for %i neurons [%i. hop]..." % (len(skids_upstream_to_retrieve),hop+1))
connectivity_data = []
connectivity_data = remote_instance.fetch( remote_connectivity_url , connectivity_post )
print("Done.")
new_skids_upstream_to_retrieve = []
for skid in connectivity_data['incoming']:
upstream_partners_temp[skid] = connectivity_data['incoming'][skid]
#Make sure we don't do circles (connection is still added though!):
#Unneccessary if we are already at the last hop
if skid not in skids_upstream_to_retrieve:
new_skids_upstream_to_retrieve.append(skid)
if skid in skids_already_seen:
print('Potential circle detected! %s between hops: %s and %i upstream' % (skid,skids_already_seen[skid],hop))
skids_already_seen[skid] += 'and' + str(hop) + ' upstream'
else:
skids_already_seen[skid] = str(hop) + ' upstream'
#Set skids to retrieve for next hop
skids_upstream_to_retrieve = new_skids_upstream_to_retrieve
partners['incoming'].append(upstream_partners_temp)
connectivity_post = {}
connectivity_post['threshold'] = 1
connectivity_post['boolean_op'] = 'OR'
downstream_partners_temp = {}
if downstream is True:
for i in range(len(skids_downstream_to_retrieve)):
tag = 'source_skeleton_ids[%i]' % i
connectivity_post[tag] = skids_downstream_to_retrieve[i]
print( "Retrieving Downstream Partners for %i neurons [%i. hop]..." % (len(skids_downstream_to_retrieve),hop+1))
connectivity_data = []
connectivity_data = remote_instance.fetch( remote_connectivity_url , connectivity_post )
print("Done!")
new_skids_downstream_to_retrieve = []
for skid in connectivity_data['outgoing']:
downstream_partners_temp[skid] = connectivity_data['outgoing'][skid]
#Make sure we don't do circles (connection is still added though!):
#Unneccessary if we are already at the last hop
if skid not in skids_downstream_to_retrieve:
new_skids_downstream_to_retrieve.append(skid)
if skid in skids_already_seen:
print('Potential circle detected! %s between hops: %s and %i downstream' % (skid,skids_already_seen[skid],hop))
skids_already_seen[skid] += 'and' + str(hop) + ' downstream'
else:
skids_already_seen[skid] = str(hop) + ' downstream'
#Set skids to retrieve for next hop
skids_downstream_to_retrieve = new_skids_downstream_to_retrieve
partners['outgoing'].append(downstream_partners_temp)
return(partners)
def get_user_ids(users):
""" Wrapper to retrieve user ids for a list of logins
Parameters:
-----------
users : list of strings
last names or user ids
Returns:
-------
user_ids : list of integers
"""
user_ids = []
user_list = remote_instance.fetch ( remote_instance.get_user_list_url() )
for u in users:
try:
user_ids.append( int(u) )
except:
user = [ us['id'] for us in user_list if us['login'] == u ]
if len(user) > 1:
print('Multiple/no users with lastname %s found. Adding all.' % u)
user_ids += user
elif len(user) == 0:
print('No match found for', u)
else:
user_ids.append( user[0] )
return user_ids
def get_neuronnames(skids):
"""Retrieves and Returns a list of names for a list of neurons
Parameters:
----------
skids : list of strings or integers
Skeleton ids for which to retrieve neuron names
Returns:
--------
neuron_names : dict
{ skid: neuron_name }
"""
### Get URL to neuronnames function
remote_get_names = remote_instance.get_neuronnames( project_id )
### Create postdata out of given skeleton IDs
get_names_postdata = {}
get_names_postdata['pid'] = 1
i = 0
for skid in skids:
if str(skid).isdigit():
key = 'skids[%i]' % i
get_names_postdata[key] = skid
i += 1
else:
print('Skipped illegal skid in retrieving neuron names: ', skid)
### Retrieve neuron names: {'skid': 'neuron_name' , ... }
neuron_names = remote_instance.fetch( remote_get_names , get_names_postdata )
return(neuron_names)
def get_neurons_in_volume ( left, right, top, bottom, z1, z2, remote_instance ):
""" Retrieves neurons with processes within a defined volume. Because the API returns only a limited number of neurons at a time, the defined volume has to be chopped into smaller pieces for crowded areas - may thus take some time!
Parameters
----------
left, right, top, z1, z2 : Coordinates defining the volumes. Need to be in nm, not pixels.
remote_instance : CATMAID instance; either pass directly to function or define globally as 'remote_instance'
"""
def retrieve_nodes( left, right, top, bottom, z1, z2, remote_instance, incursion ):
print(incursion,':',left, right, top, bottom, z1, z2)
remote_nodes_list = remote_instance.get_node_list (1)
x_y_resolution = 3.8
#Atnid seems to be related to fetching the active node too (will be ignored if atnid = -1)
node_list_postdata = { 'left':left * x_y_resolution,
'right':right * x_y_resolution,
'top': top * x_y_resolution,
'bottom': bottom * x_y_resolution,
'z1': z1,
'z2': z2,
'atnid':-1,
'labels': False
}
node_list = remote_instance.fetch( remote_nodes_list , node_list_postdata )
if node_list[3] is True:
print('Incursing')
incursion += 1
node_list = list()
#Front left top
node_list += retrieve_nodes( left,
left + (right-left)/2,
top,
top + (bottom-top)/2,
z1,
z1 + (z2-z1)/2,
remote_instance, incursion )
#Front right top
node_list += retrieve_nodes( left + (right-left)/2,
right,
top,
top + (bottom-top)/2,
z1,
z1 + (z2-z1)/2,
remote_instance, incursion )
#Front left bottom
node_list += retrieve_nodes( left,
left + (right-left)/2,
top + (bottom-top)/2,
bottom,
z1,
z1 + (z2-z1)/2,
remote_instance, incursion )
#Front right bottom
node_list += retrieve_nodes( left + (right-left)/2,
right,
top + (bottom-top)/2,
bottom,
z1,
z1 + (z2-z1)/2,
remote_instance, incursion )
#Back left top
node_list += retrieve_nodes( left,
left + (right-left)/2,
top,
top + (bottom-top)/2,
z1 + (z2-z1)/2,
z2,
remote_instance, incursion )
#Back right top
node_list += retrieve_nodes( left + (right-left)/2,
right,
top,
top + (bottom-top)/2,
z1 + (z2-z1)/2,
z2,
remote_instance, incursion )
#Back left bottom
node_list += retrieve_nodes( left,
left + (right-left)/2,
top + (bottom-top)/2,
bottom,
z1 + (z2-z1)/2,
z2,
remote_instance, incursion )
#Back right bottom
node_list += retrieve_nodes( left + (right-left)/2,
right,
top + (bottom-top)/2,
bottom,
z1 + (z2-z1)/2,
z2,
remote_instance, incursion )
else:
#If limit not reached, node list is still an array of 4
print("Incursion finished.",len(node_list[0]))
return node_list[0]
print("Incursion finished.",len(node_list))
return node_list
print('Retrieving Nodes in Volume...')
node_list = retrieve_nodes( left, right, top, bottom, z1, z2, remote_instance, 1 )
skeletons = set()
for node in node_list:
skeletons.add(str(node[7]))
print(len(skeletons),'found in volume')
return list(skeletons)
async def getURLasync(urls, post_data=None, max_threads=None):
responses = []
with concurrent.futures.ThreadPoolExecutor(max_workers=max_threads) as executor:
loop = asyncio.get_event_loop()
futures = [
loop.run_in_executor(
executor,
remote_instance.fetch,
u, p
)
for u, p in zip(urls, post_data)
]
for r in await asyncio.gather(*futures):
responses.append(r)
return responses
def get_urls_threaded(urls, post_data=[], max_threads=None):
if not post_data:
post_data = [None] * len(urls)
elif len(post_data) != len(urls):
raise ValueError('Must provide POST data for every URL.')
loop = asyncio.get_event_loop()
responses = loop.run_until_complete(getURLasync(urls, post_data, max_threads=max_threads))
return responses
def retrieveSkeletonData(skid_list, time_out=20, skip_existing=True, get_abutting=False, with_history=False, max_threads=None):
""" Retrieves 3D skeleton data from CATMAID server using threads.
Parameters:
-----------
skid_list : list of skeleton ids to retrieve
time_out : integer (optional, default is set in plugin properties)
Sometimes CATMAID server does not respond to request. Time out prevents infinite freeze.
skip_existing : boolean (default = True)
This prevents existing neurons to be reloaded.
get_abutting : boolean (default = False)
if True, will retrieve abutting connectors
For some reason they are not part of /compact-json/, so we have to retrieve them
via /connectors/ and add them to compact-json -> will give them connector type 3!
with_history : boolean (default = False)
If true, will retrieve skeleton data with timestamps
Returns:
-------
skdata : dict containg 3D skeletons
{ skid: [ [node_data], [connector_data], [tags] ], skid2: ...}
errors : string
Errors that occurred during import, if any
"""
threads = {}
threads_closed = []
skdata = {}
errors = None
#Check if neurons are already in scene - if so, skip
existing_skids = [ ob['skeleton_id'] for ob in bpy.data.objects if 'skeleton_id' in ob ]
if skip_existing:
if [ s for s in skid_list if str(s) in existing_skids ]:
print( 'Skipping existing neurons:', [ s for s in skid_list if str(s) in existing_skids ] )
skid_list = [ s for s in skid_list if str(s) not in existing_skids ]
#Reinitialize/clear header display
ahd.reinitalize()
osd.show("Retrieving %i neurons" % len(skid_list))
#bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=5)
urls = [remote_instance.get_compact_details_url(project_id , skid) \
+ '?%s' % urllib.parse.urlencode( {'with_history': str(with_history).lower(),
'with_tags' : 'true',
'with_connectors' : 'true',
'with_merge_history': 'false'} )
for i, skid in enumerate(skid_list)]
response = get_urls_threaded(urls, None, max_threads)
print('Data for {} neurons retrieved'.format(len(response)))
skdata = { skid_list[i] : r for i, r in enumerate(response)}
#If we want abutting connectors too, we will have to get them via /connectors/
if get_abutting:
get_connectors_GET_data = {'with_tags': 'false'}
cn_abutting = []
#Retrieve abutting connectors
for i,s in enumerate(skid_list):
tag = 'skeleton_ids[%i]' % i
get_connectors_GET_data[tag] = str( s )
get_connectors_GET_data['relation_type']='abutting'
remote_get_connectors_url = remote_instance.get_connectors_url( project_id ) + '?%s' % urllib.parse.urlencode(get_connectors_GET_data)
ab_data = remote_instance.fetch( remote_get_connectors_url )['links']
#ab_data format: [skeleton_id, connector_id, x, y, z, confidence, creator, treenode_id, creation_date ]
#Now sort to skeleton data -> give abutting connectors relation type 3 (0 = pre, 1 = post, 2 = gap)
#and put into standard compact-skeleton format: [ treenode_id, connector_id, relation_type, x, y, z ]
for s in skid_list:
skdata[s][1] += [ [ e[7], e[1], 3, e[2], e[3], e[4] ] for e in ab_data if str(e[0]) == str(s) ]
if errors is None:
osd.show("3D skeletons retrieved.")
else:
osd.show(errors)
ahd.clear()
return skdata, errors
class RetrieveNeuron(Operator):
""" Wrapper that retrieves Skeletons from CATMAID database """
bl_idname = "retrieve.neuron"
bl_label = "Enter Search Parameters"
names = StringProperty(name="Name(s)",
description = "Search by neuron names. Separate multiple names by commas."
)
partial_match = BoolProperty( name="Allow partial matches?",
default = False,
description = "Allow partial matches for neuron names and annotations! Will also become case-insensitive.")
annotations = StringProperty(name="Annotations(s)",
description = "Search by skeleton IDs. Multiple annotations comma-sparated."
)
intersect = BoolProperty( name="Intersect",
default = False,
description = "If true, all identifiers (e.g. two annotations or name + annotation) have to be true for a neuron to be loaded")
skeleton_ids = StringProperty(name="Skeleton ID(s)",
description = "Search by skeleton IDs. Multiple skids comma-sparated. Attention: Does not accept more than 400 characters!"
)
by_user = StringProperty(name="User(s)",
description = "Search by user logins or user_ids. Multiple users comma-separated!"
)
minimum_cont = IntProperty(name="Minimum contribution",
default = 1,
min = 1,
description = "Minimum node contribution per user to be loaded.")
minimum_nodes = IntProperty(name="Minimum node count",
default = 1,
min = 1,
description = "Neurons with fewer nodes will be ignored.")
import_synapses = BoolProperty( name="Synapses",
default = True,
description = "Import chemical synapses (pre- and postsynapses), similarly to 3D Viewer in CATMAID")
import_gap_junctions = BoolProperty( name="Gap Junctions",
default = False,
description = "Import gap junctions, similarly to 3D Viewer in CATMAID")
import_abutting = BoolProperty( name="Abutting Connectors",
default = False,
description = "Import abutting connectors.")
resampling = IntProperty(name="Downsampling Factor",
default = 2,
min = 1,
max = 20,
description = "Will reduce number of nodes by given factor. Root, ends and forks are preserved!")
truncate_neuron = EnumProperty(name="Truncate Neuron?",
items = (('none','No','Load full neuron'),
('main_neurite','Main Neurite','Truncate Main Neurite'),
('strahler_index','Strahler Index','Truncate Based on Strahler index')
),
default = "none",
description = "Choose if neuron should be truncated.")
truncate_value = IntProperty( name="Truncate by Value",
min=-10,
max=10,
default = 1,
description = "Defines length of truncated neurite or steps in Strahler Index from root node!"
)
interpolate_virtual = BoolProperty( name="Interpolate Virtual Nodes",
default = False,
description = "If true virtual nodes will be interpolated. Only important if you want the resolution of all neurons to be the same. Will slow down import!")
use_radius = BoolProperty( name="Use node radii",
default = False,
description = "If true, neuron will use node radii for thickness. If false, radius is assumed to be 70nm (for visibility).")
neuron_mat_for_connectors = BoolProperty( name="Connector color as neuron",
default = False,
description = "If true, connectors will have the same color as the neuron.")
color_by_user = BoolProperty( name="Color by user",
default = False,
description = "If True, color neuron by relevant users.")
skip_existing = BoolProperty( name="Skip existing",
default = True,
description = "If True, will not add neurons that are already in the scene.")
# ATTENTION:
# using check() in an operator that uses threads, will lead to segmentation faults!
def check(self, context):
return True
def draw(self, context):
layout = self.layout
box = layout.box()
row = box.row(align=False)
row.prop(self, "names")
row = box.row(align=False)
row.prop(self, "annotations")
row = box.row(align=False)
row.prop(self, "by_user")
if self.by_user:
row.prop(self, "minimum_cont")
row = box.row(align=False)
row.prop(self, "skeleton_ids")
if self.names or self.annotations:
row = box.row(align=False)
row.prop(self, "partial_match")
row.prop(self, "intersect")
row = box.row(align=False)
row.prop(self, "minimum_nodes")
layout.label(text="Import Options")
box = layout.box()
row = box.row(align=False)
row.prop(self, "import_synapses")
row.prop(self, "import_gap_junctions")
row.prop(self, "import_abutting")
if self.import_synapses or self.import_gap_junctions or self.import_abutting:
row = box.row(align=False)
row.prop(self, "neuron_mat_for_connectors")
row = box.row(align=False)
row.prop(self, "resampling")
row = box.row(align=False)
row.prop(self, "truncate_neuron")
if self.truncate_neuron != 'none':
row = box.row(align=False)
row.prop(self, "truncate_value")
row = box.row(align=False)
row.prop(self, "interpolate_virtual")
row.prop(self, "use_radius")
row.prop(self, "skip_existing")
row.prop(self, "color_by_user")
def execute(self, context):
global remote_instance
errors = []
self.conversion_factor = context.user_preferences.addons['CATMAIDImport'].preferences.conversion_factor
retrieve_by_annotations = []
retrieve_by_names = []
retrieve_by_skids = []
retrieve_by_user = []
if self.names:
osd.show("Looking for Names...")
#bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
for n in [x.strip() for x in self.names.split(',')]:
retrieve_by_names += search_neuron_names( n, allow_partial = self.partial_match )
retrieve_by_names = [str(e) for e in retrieve_by_names]
if not retrieve_by_names:
print('WARNING: Search tag(s) not found! Import stopped')
self.report({'ERROR'},'Search tag(s) not found! Import stopped')
osd.show("WARNING: Search tag(s) not found! Import stopped")
#bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
return{'FINISHED'}
if self.skeleton_ids:
retrieve_by_skids = [x.strip() for x in self.skeleton_ids.split(',')]
if self.annotations:
annotations_to_retrieve = [x.strip() for x in self.annotations.split(',')]
retrieve_by_annotations = search_annotations(annotations_to_retrieve, allow_partial=self.partial_match, intersect=self.intersect )
if not retrieve_by_annotations:
print('ERROR: No matching anotation(s) found! Import stopped')
self.report({'ERROR'},'No matching anotation(s) found! Import stopped')
osd.show("ERROR: No matching anotation(s) found! Import stopped")
#bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
return{'FINISHED'}
if self.by_user:
users_to_retrieve = [x.strip() for x in self.by_user.split(',')]
user_ids = get_user_ids( users_to_retrieve )
retrieve_by_user = []
for u in user_ids:
retrieve_by_user += retrieve_skeleton_list( user=u , node_count = self.minimum_nodes , start_date=[], end_date=[], reviewed_by = None )
retrieve_by_user = list ( set( retrieve_by_user ) )
if self.intersect:
#Find starting point
if self.annotations:
skeletons_to_retrieve = retrieve_by_annotations
elif self.names:
skeletons_to_retrieve = retrieve_by_names
elif self.by_user:
skeletons_to_retrieve = retrieve_by_user
#Now intersect
if self.annotations:
skeletons_to_retrieve = [ n for n in skeletons_to_retrieve if n in retrieve_by_annotations ]
if self.names:
skeletons_to_retrieve = [ n for n in skeletons_to_retrieve if n in retrieve_by_names ]
if self.by_user:
skeletons_to_retrieve = [ n for n in skeletons_to_retrieve if n in retrieve_by_annotations ]
if not skeletons_to_retrieve:
print('WARNING: No neurons left after intersection! Import stopped')
self.report({'ERROR'},'Intersection empty! Import stopped')
osd.show("WARNING: Intersection empty! Import stopped")
#bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
return{'FINISHED'}
else:
skeletons_to_retrieve = list( set( retrieve_by_skids + retrieve_by_names + retrieve_by_annotations + retrieve_by_user ) )
if self.minimum_nodes > 1 and skeletons_to_retrieve:
print('Filtering {0} neurons for size'.format(len(skeletons_to_retrieve)))
review_status_url = remote_instance.get_review_status(project_id)
review_post = {}
for i,skid in enumerate(skeletons_to_retrieve):
key = 'skeleton_ids[%i]' % i
review_post[key] = skid
review_status = remote_instance.fetch(review_status_url, review_post)
skeletons_to_retrieve = [ e for e in skeletons_to_retrieve if review_status[str(e)][0] >= self.minimum_nodes ]
### Extract skeleton IDs from skeleton_id string
print('%i neurons found - resolving names...' % len(skeletons_to_retrieve))
neuron_names = get_neuronnames(skeletons_to_retrieve)
self.count = 1
print("Collecting skeleton data...")
start = time.clock()
skdata, errors = retrieveSkeletonData( list(skeletons_to_retrieve),
time_out = context.user_preferences.addons['CATMAIDImport'].preferences.time_out,
get_abutting = bool(self.import_abutting),
skip_existing=self.skip_existing,
max_threads = context.user_preferences.addons['CATMAIDImport'].preferences.rqs)
if self.minimum_cont > 1 and self.by_user:
above_threshold = {}
for n in skdata:
if len([ n for n in skdata[n][0] if n[2] in user_ids ]) > self.minimum_cont:
above_threshold[n] = skdata[n]
skdata = above_threshold
print("Creating meshes for %i neurons" % len(skdata))
for skid in skdata:
CATMAIDtoBlender.extract_nodes( skdata[skid], str(skid),
neuron_name = neuron_names[str(skid)],
resampling = self.resampling,
import_synapses = self.import_synapses,
import_gap_junctions = self.import_gap_junctions,
import_abutting = self.import_abutting,
truncate_neuron = self.truncate_neuron,
truncate_value = self.truncate_value,
interpolate_virtual = self.interpolate_virtual,
conversion_factor = self.conversion_factor,
use_radius = self.use_radius,
neuron_mat_for_connectors = self.neuron_mat_for_connectors,
color_by_user = self.color_by_user)
print('Finished Import in', time.clock()-start, 's')
if errors is None:
msg = 'Success! %i neurons imported' % len(skdata)
self.report({'INFO'}, msg)
osd.show("Done.")
osd_timed = ClearOSDAfter(3)
osd_timed.start()
else:
self.report({'ERROR'}, errors)
return {'FINISHED'}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self, width = 500)
@classmethod
def poll(cls, context):
if connected:
return True
else:
return False
class UpdateNeurons(Operator):
""" Updates existing Neurons in Scene from CATMAID Server.
"""
bl_idname = "reload.neurons"
bl_label = "Update Neurons from CATMAID Server"
bl_options = {'UNDO'}
which_neurons = EnumProperty( name = "Which Neurons?",
items = [('Selected','Selected','Selected'),('All','All','All')],
description = "Choose which neurons to reload." )
keep_resampling = BoolProperty( name = "Keep Old Resampling?", default = True
)
new_resampling = IntProperty( name = "New Downsampling Factor", default = 2, min = 1, max = 20,
description = "Will reduce node count by given factor. Root, ends and forks are preserved!" )
import_synapses = BoolProperty( name="Import Synapses",
default = True,
description = "Import chemical synapses (pre- and postsynapses), similarly to 3D Viewer in CATMAID")
import_gap_junctions = BoolProperty( name="Import Gap Junctions",
default = False,
description = "Import gap junctions, similarly to 3D Viewer in CATMAID")
import_abutting = BoolProperty( name="Import Abutting Connectors",
default = False,
description = "Import abutting connectors.")
neuron_mat_for_connectors = BoolProperty( name="Connector color as neuron",
default = False,
description = "If true, connectors will have the same color as the neuron.")
truncate_neuron = EnumProperty( name = "Truncate Neuron?",
items = ( ('none','No','Load full neuron'),
('main_neurite','Main Neurite','Truncate Main Neurite'),
('strahler_index','Strahler Index','Truncate Based on Strahler index')
),
default = "none",
description = "Choose if neuron should be truncated." )
truncate_value = IntProperty( name = "Truncate by Value",
min = -10,
max = 10,
default = 1,
description = "Defines length of truncated neurite or steps in Strahler Index from root node!" )
interpolate_virtual = BoolProperty( name = "Interpolate Virtual Nodes",
default = False,
description = "If true virtual nodes will be interpolated. Only important if you want the resolution of all neurons to be the same. Will slow down import!" )
use_radius = BoolProperty( name = "Use node radii",
default = False,
description = "If true, neuron will use node radii for thickness. If false, radius is assumed to be 70nm (for visibility)." )
def check(self, context):
return True
def draw(self, context):
layout = self.layout
row = layout.row(align=True)
row.prop(self, "which_neurons")
row = layout.row(align=True)
row.prop(self, "keep_resampling")
if not self.keep_resampling:
row.prop(self, "new_resampling")
for k in ['interpolate_virtual', 'use_radius', 'import_synapses',
'import_gap_junctions','import_abutting']:
row = layout.row(align=True)
row.prop(self, k)
if self.import_synapses or self.import_gap_junctions or self.import_abutting:
row = layout.row(align=True)
row.prop(self, 'neuron_mat_for_connectors')
row = layout.row(align=True)
row.prop(self, 'truncate_neuron')
if self.truncate_neuron != 'none':
row = layout.row(align=True)
row.prop(self, 'truncate_value')
def execute(self,context):
neurons_to_reload = {}
resampling = 1
self.conversion_factor = context.user_preferences.addons['CATMAIDImport'].preferences.conversion_factor
### Gather skeleton IDs
if self.which_neurons == 'All':
to_check = bpy.data.objects
elif self.which_neurons == 'Selected':
to_check = bpy.context.selected_objects
elif self.which_neurons == 'Active':
to_check = [ bpy.context.active_object ]
for neuron in to_check:
if neuron.name.startswith('#'):
try:
skid = re.search('#(.*?) -',neuron.name).group(1)
neurons_to_reload[neuron.name] = {}
neurons_to_reload[neuron.name]['skid'] = skid
if 'resampling' in neuron:
neurons_to_reload[neuron.name]['resampling'] = neuron['resampling']
else:
neurons_to_reload[neuron.name]['resampling'] = 1
except:
print('Unable to process neuron', neuron.name)
print(len(neurons_to_reload),'neurons to reload')
print('Reloading %i neurons' % len(neurons_to_reload))
### Deselect all objects, then select objects to update (Skeletons, Inputs/Outputs)
for object in bpy.data.objects:
object.select = False
if object.name.startswith('#') or object.name.startswith('Outputs of') or object.name.startswith('Inputs of') or object.name.startswith('Soma of'):
for neuron in neurons_to_reload:
if neurons_to_reload[neuron]['skid'] in object.name:
object.select = True
### Delete selected objects
bpy.ops.object.delete(use_global=False)
### Get Neuron Names (in case they changed):
print('Retrieving most recent neuron names from server...')
skids_to_retrieve = []
for neuron in neurons_to_reload:
skids_to_retrieve.append(neurons_to_reload[neuron]['skid'])
neuron_names = get_neuronnames(skids_to_retrieve)
print("Collecting updated skeleton data for %i neurons" % len(neurons_to_reload) )
threads = {}
skdata = {}
start = time.clock()
resampling_factors = {}
skids_to_reload = []
for i,n in enumerate(neurons_to_reload):
skid = neurons_to_reload[n]['skid']
skids_to_reload.append(skid)
if self.keep_resampling is True:
resampling_factors[skid] = neurons_to_reload[n]['resampling']
else:
resampling_factors[skid] = self.new_resampling
skdata, errors = retrieveSkeletonData( skids_to_reload ,
time_out = context.user_preferences.addons['CATMAIDImport'].preferences.time_out,
get_abutting = self.import_abutting,
max_threads = context.user_preferences.addons['CATMAIDImport'].preferences.rqs )
print("Creating new meshes for %i neurons" % len(skdata))
for skid in skdata:
CATMAIDtoBlender.extract_nodes( skdata[skid], str(skid),
neuron_name = neuron_names[str(skid)],
resampling = resampling_factors[skid],
import_synapses = self.import_synapses,
import_gap_junctions = self.import_gap_junctions,
import_abutting = self.import_abutting,
truncate_neuron = self.truncate_neuron,
truncate_value = self.truncate_value,
interpolate_virtual = self.interpolate_virtual,
conversion_factor = self.conversion_factor,
use_radius = self.use_radius,
neuron_mat_for_connectors = self.neuron_mat_for_connectors)
print('Finished Import in', time.clock()-start, 's')
if errors is None:
msg = 'Success! %i neurons imported' % len(skdata)
self.report({'INFO'}, msg)
osd.show("Done.")
osd_timed = ClearOSDAfter(3)
osd_timed.start()
else:
self.report({'ERROR'}, errors)
return{'FINISHED'}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self, width = 800)
@classmethod
def poll(cls, context):
if connected:
return True
else:
return False
class RetrievePairs (Operator):
""" Imports neurons with given annotations.
This is based on neurons pairs of neurons having corresponding 'paired with #<skid>' annotations.
E.g. neuron A has annotation 'paired with #123' and neuron B has annotation 'paired with #456'.
"""
bl_idname = "retrieve.by_pairs"
bl_label = "Retrieve paired Neurons of existing Neurons"
which_neurons = EnumProperty( name = "For which Neuron(s)?",
items = [('Active','Active','Active'),('Selected','Selected','Selected'),('All','All','All')],
default = 'All',
description = "Choose for which neurons to load paired partners.")
import_synapses = BoolProperty( name="Import Synapses",
default = True,
description = "Import chemical synapses (pre- and postsynapses), similarly to 3D Viewer in CATMAID")
import_gap_junctions = BoolProperty( name="Import Gap Junctions",
default = False,
description = "Import gap junctions, similarly to 3D Viewer in CATMAID")
import_abutting = BoolProperty( name="Import Abutting Connectors",
default = False,
description = "Import abutting connectors.")
resampling = IntProperty( name = "Downsampling Factor",
default = 2,
min = 1,
max = 20,
description = "Will reduce number of nodes by given factor n. Root, ends and forks are preserved!")
truncate_neuron = EnumProperty( name = "Truncate Neuron?",
items = ( ('none','No','Load full neuron'),
('main_neurite','Main Neurite','Truncate Main Neurite'),
('strahler_index','Strahler Index','Truncate Based on Strahler index')
),
default = "none",
description = "Choose if neuron should be truncated.")
truncate_value = IntProperty( name =" Truncate by Value",
min = -10,
max = 10,
default = 1,
description = "Defines length of truncated neurite or steps in Strahler Index from root node!"
)
interpolate_virtual = BoolProperty( name = "Interpolate Virtual Nodes",
default = False,
description = "If true virtual nodes will be interpolated. Only important if you want the resolution of all neurons to be the same. Will slow down import!")
use_radius = BoolProperty( name = "Use node radii",
default = False,
description = "If true, neuron will use node radii for thickness. If false, radius is assumed to be 70nm (for visibility).")
def execute(self, context):
global remote_instance
neurons = []
self.conversion_factor = context.user_preferences.addons['CATMAIDImport'].preferences.conversion_factor
if self.which_neurons == 'Active':
if bpy.context.active_object != None:
if bpy.context.active_object.name.startswith('#'):
try:
neurons.append(re.search('#(.*?) -',neuron.name).group(1))
except:
pass
else:
self.report({'ERROR'},'ERROR: Active object not a neuron')
print('ERROR: Active object not a neuron!')
else:
self.report({'ERROR'},'ERROR: No active Object')
print('ERROR: No active Object')
elif self.which_neurons == 'Selected':
for neuron in bpy.context.selected_objects:
if neuron.name.startswith('#'):
try:
neurons.append(re.search('#(.*?) -',neuron.name).group(1))
except:
pass
elif self.which_neurons == 'All':
for neuron in bpy.data.objects:
if neuron.name.startswith('#'):
try:
neurons.append(re.search('#(.*?) -',neuron.name).group(1))
except:
pass
annotations = get_annotations_from_list (neurons, remote_instance)
#Determine pairs
paired = []
for neuron in annotations:
paired_skid = None
try:
for annotation in annotations[neuron]:
if annotation.startswith('paired with #'):
skid = annotation[13:]
#Filter for errors in annotation:
if neuron == paired_skid:
print('Warning - Neuron %s paired with itself' % str(neuron))
self.report({'ERROR'},'Error(s) occurred: see console')
continue
if paired_skid != None:
print('Warning - Multiple paired Annotations found for neuron %s! Neuron skipped!' % str(neuron))
self.report({'ERROR'},'Error(s) occurred: see console')
paired_skid = None
continue
paired_skid = skid
except:
pass
if paired_skid != None:
if paired_skid in paired:
print('Warning - Neuron %s annotated as paired in multiple Neurons!' % str(paired_skid))
self.report({'ERROR'},'Error(s) occurred: see console')
else:
paired.append(paired_skid)
if len(paired) != 0:
self.retrieve_paired(paired)
return{'FINISHED'}
def retrieve_paired(self, paired):
neuron_names = get_neuronnames(paired)
if len(neuron_names) < len(paired):
print('Warning! Incorrect annotated skid(s) among pairs found!')
self.report({'ERROR'},'Error(s) occurred: see console')
for skid in paired:
if skid not in neuron_names:
print('Did not retrieve name for skid', skid)
neuron_names[skid] = 'ERROR - SKID does not exists'
print("Collection skeleton data for:", paired)
start = time.clock()
skdata, errors = retrieveSkeletonData( paired,
time_out = bpy.context.user_preferences.addons['CATMAIDImport'].preferences.time_out,
get_abutting = self.import_abutting,
max_threads = bpy.context.user_preferences.addons['CATMAIDImport'].preferences.rqs )
print("Creating meshes for %i neurons" % len(skdata))
for skid in skdata:
try:
CATMAIDtoBlender.extract_nodes( skdata[skid], str(skid),
neuron_name = neuron_names[str(skid)],
resampling = self.resampling,
import_synapses = self.import_synapses,
import_gap_junctions = self.import_gap_junctions,
import_abutting = self.import_abutting,
truncate_neuron = self.truncate_neuron,
truncate_value = self.truncate_value,
interpolate_virtual = self.interpolate_virtual,
conversion_factor = self.conversion_factor,
use_radius = self.use_radius)
except:
print('Error importing skid %s - wrong annotated skid?' %skid)
self.report({'ERROR'},'Error(s) occurred: see console')
print('Finished Import in', time.clock()-start, 's')
if errors is None:
msg = 'Success! %i neurons imported' % len(skdata)
self.report({'INFO'}, msg)
osd.show("Done.")
osd_timed = ClearOSDAfter(3)
osd_timed.start()
else:
self.report({'ERROR'}, errors)
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self, width = 800)
@classmethod
def poll(cls, context):
if connected:
return True
else:
return False
class RetrieveInVolume(Operator):
""" Import neurons that have neurites in given volume.
"""
bl_idname = "retrieve.in_volume"
bl_label = "Retrieve Neurons in Volume"
top = IntProperty( name = "Top", default = 40000, min = 1)
bot = IntProperty( name = "Bottom", default = 50000, min = 1)
left = IntProperty( name = "Left", default = 90000, min = 1)
right = IntProperty( name = "Right", default = 100000, min = 1)
z1 = IntProperty( name = "Z1", default = 75000, min = 1,
description = "Not Slices!")
z2 = IntProperty( name = "Z2", default = 76000, min = 1,
description = "Not Slices!")
resampling = IntProperty( name = "Downsampling Factor",
default = 2,
min = 1,
max = 20,
description = "Will reduce number of nodes by given factor n. Root, ends and forks are preserved!")
minimum_nodes = IntProperty( name = 'Minimum node count',
default = 1,
description = 'Only neurons with more than defined nodes will be loaded.')
import_synapses = BoolProperty( name="Import Synapses",
default = True,
description = "Import chemical synapses (pre- and postsynapses), similarly to 3D Viewer in CATMAID")
import_gap_junctions = BoolProperty( name="Import Gap Junctions",
default = False,
description = "Import gap junctions, similarly to 3D Viewer in CATMAID")
import_abutting = BoolProperty( name="Import Abutting Connectors",
default = False,
description = "Import abutting connectors.")
truncate_neuron = EnumProperty( name = "Truncate Neuron",
items = ( ('none','No','Load full neuron'),
('main_neurite','Main Neurite','Truncate Main Neurite'),
('strahler_index','Strahler Index','Truncate Based on Strahler index')
),
default = "none",
description = "Choose if neuron should be truncated.")
truncate_value = IntProperty( name = "Truncate by Value",
min = -10,
max = 10,
default = 1,
description = "Defines length of truncated neurite or steps in Strahler Index from root node!"
)
interpolate_virtual = BoolProperty( name = "Interpolate Virtual Nodes",
default = False,
description = "If true virtual nodes will be interpolated. Only important if you want the resolution of all neurons to be the same. Will slow down import!")
use_radius = BoolProperty( name= "Use node radii",
default = False,
description = "If true, neuron will use node radii for thickness. If false, radius is assumed to be 70nm (for visibility).")
def execute(self, context):
global remote_instance
self.conversion_factor = context.user_preferences.addons['CATMAIDImport'].preferences.conversion_factor
#Get Neurons in Volume:
skid_list = get_neurons_in_volume ( self.left, self.right, self.top, self.bot, self.z1, self.z2, remote_instance )
if not skid_list:
osd.show("No neurons in given volume found! Make sure to provide CATMAID coordinates")
osd_timed = ClearOSDAfter(3)
osd_timed.start()
return{'FINISHED'}
if self.minimum_nodes > 1 and skid_list:
print('Filtering neurons for size:', skid_list)
review_status_url = remote_instance.get_review_status(project_id)
review_post = {}
for i,skid in enumerate(skid_list):
key = 'skeleton_ids[%i]' % i
review_post[key] = skid
review_status = remote_instance.fetch(review_status_url, review_post)
print(review_status)
skid_list = [e for e in skid_list if review_status[str(e)][0] >= self.minimum_nodes]
neuron_names = get_neuronnames(skid_list)
print(skid_list)
print(neuron_names)
print("Collection skeleton data for %i neurons" % len(skid_list))
start = time.clock()
skdata, errors = retrieveSkeletonData( skid_list ,
time_out = context.user_preferences.addons['CATMAIDImport'].preferences.time_out,
get_abutting = self.import_abutting,
max_threads = context.user_preferences.addons['CATMAIDImport'].preferences.rqs )
print("Creating meshes for %i neurons" % len(skdata))
for skid in skdata:
CATMAIDtoBlender.extract_nodes( skdata[skid], str(skid),
neuron_name = neuron_names[str(skid)],
resampling = self.resampling,
import_synapses = self.import_synapses,
import_gap_junctions = self.import_gap_junctions,
import_abutting = self.import_abutting,
truncate_neuron = self.truncate_neuron,
truncate_value = self.truncate_value,
interpolate_virtual = self.interpolate_virtual,
conversion_factor = self.conversion_factor,
use_radius = self.use_radius)
print('Finished Import in', time.clock()-start, 's')
if errors is None:
msg = 'Success! %i neurons imported' % len(skdata)
self.report({'INFO'}, msg)
osd.show("Done.")
osd_timed = ClearOSDAfter(3)
osd_timed.start()
else:
self.report({'ERROR'}, errors)
return{'FINISHED'}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self, width = 800)
def draw(self, context):
layout = self.layout
layout.label(text="Define Bounding Box (CATMAID Coordinates):")
row = layout.row(align=True)
row.prop(self, "top")
row = layout.row(align=True)
row.prop(self, "bot")
row = layout.row(align=True)
row.prop(self, "left")
row = layout.row(align=True)
row.prop(self, "right")
row = layout.row(align=True)
row.prop(self, "z1")
row = layout.row(align=True)
row.prop(self, "z2")
layout.label(text="Import Options:")
row = layout.row(align=True)
row.prop(self, "resampling")
row = layout.row(align=True)
row.prop(self, "import_connectors")
row = layout.row(align=True)
row.prop(self, "minimum_nodes")
row = layout.row(align=True)
row.prop(self, "truncate_neuron")
row = layout.row(align=True)
row.prop(self, "truncate_value")
row = layout.row(align=True)
row.prop(self, "interpolate_virtual")
@classmethod
def poll(cls, context):
if connected:
return True
else:
return False
class RetrieveTags(Operator):
"""Retrieves Tags of active/selected/all Neuron from CATMAID database"""
bl_idname = "retrieve.tags"
bl_label = "Retrieve Tags"
which_neurons = EnumProperty( name = "For which Neuron(s)?",
items = [('Selected','Selected','Selected'),('All','All','All')],
description = "Choose for which neurons to retrieve tags.")
color_prop = EnumProperty( name = "Colors",
items = [('Black','Black','Black'),('Mesh color','Mesh color','Mesh color'),('By tag','By tag','By Tag')],
default = 'By tag',
description = "How to color the tags.")
basic_radius = FloatProperty( name="Size",
default = 0.03,
description = "Set size of spheres representing tags.")
layer = IntProperty( name="Create in Layer",
default = 2,
min = 0,
max = 19,
description = "Set layer in which to create tags.")
filter_str = StringProperty( name="Filter Tags",
description='Filter tags.')
@classmethod
def poll(cls, context):
if connected:
return True
else:
return False
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self, width = 800)
def execute(self, context):
global remote_instance
bpy.context.scene.layers[self.layer] = True
layers = [i == self.layer for i in [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19]]
self.conversion_factor = context.user_preferences.addons['CATMAIDImport'].preferences.conversion_factor
if self.which_neurons == 'All':
to_search = bpy.data.objects
elif self.which_neurons == 'Selected':
to_search = bpy.context.selected_objects
filtered_skids = []
colormap = {}
for ob in to_search:
if ob.name.startswith('#'):
skid = re.search('#(.*?) -',ob.name).group(1)
filtered_skids.append(skid)
colormap[skid] = tuple( ob.active_material.diffuse_color )
if not filtered_skids:
print('Error - no neurons found! Cancelled')
self.report({'ERROR'},'No neurons found!')
return {'FINISHED'}
start = time.clock()
print("Retrieving connector data for %i neurons" % len(filtered_skids))
skdata, errors = retrieveSkeletonData( filtered_skids,
time_out = context.user_preferences.addons['CATMAIDImport'].preferences.time_out,
skip_existing = False,
max_threads = context.user_preferences.addons['CATMAIDImport'].preferences.rqs )
if self.color_prop == 'By tag':
all_tags = set( [ t for n in skdata for t in skdata[n][2] ] )
colors = ColorCreator.random_colors( len( all_tags ) )
colormap = { t : colors[i] for i,t in enumerate(all_tags) }
for n in skdata:
coords = { n[0] : ( n[3]/self.conversion_factor, n[5]/self.conversion_factor, n[4]/-self.conversion_factor ) for n in skdata[n][0] }
if self.color_prop == 'Black':
color = (0,0,0)
elif self.color_prop == 'Mesh color':
color = colormap[n]
for tag in skdata[n][2]:
if self.filter_str and self.filter_str not in tag:
continue
if self.color_prop == 'By tag':
color = colormap[tag]
for tn in skdata[n][2][tag]:
tag_ob = bpy.ops.mesh.primitive_ico_sphere_add( subdivisions=2, view_align=False, enter_editmode=False, \
location=coords[tn], size = self.basic_radius, \
layers=layers)
bpy.context.active_object.name = '%s (#%s)' % ( tag, n )
bpy.ops.object.shade_smooth()
if self.color_prop == 'Black':
mat_name = 'Tag_mat'
elif self.color_prop == 'Mesh color':
mat_name = 'Tag_mat of #%s' % n
elif self.color_prop == 'By tag':
mat_name = 'Tag_mat for %s' % tag
Create_Mesh.assign_material (bpy.context.active_object, mat_name , color[0] , color[1] , color[2])
if errors is None:
self.report({'INFO'},'Import successfull. Look in layer %i' % self.layer)
osd.show("Done.")
osd_timed = ClearOSDAfter(3)
osd_timed.start()
else:
self.report({'ERROR'}, errors)
return {'FINISHED'}
class RetrieveConnectors(Operator):
"""Retrieves Connectors of active/selected/all Neuron from CATMAID database"""
bl_idname = "retrieve.connectors"
bl_label = "Retrieve Connectors"
which_neurons = EnumProperty( name = "For which Neuron(s)?",
items = [('Selected','Selected','Selected'),('All','All','All')],
description = "Choose for which neurons to retrieve connectors.")
color_prop = EnumProperty( name = "Colors",
items = [('Black','Black','Black'),('Mesh-color','Mesh-color','Mesh-color'),('Random','Random','Random')],
description = "How to color the connectors.")
create_as = EnumProperty( name = "Create as",
items = [('Spheres','Spheres','Spheres'),('Curves','Curves','Curves')],
description = "As what to create them. Curves suggested for large numbers.")
basic_radius = FloatProperty( name="Basic Radius",
default = 0.01,
description = "Set to -1 to not weigh connectors")
layer = IntProperty( name="Create in Layer",
default = 2,
min = 0,
max = 19,
description = "Set layer in which to create connectors")
get_inputs = BoolProperty( name="Retrieve Inputs", default = True)
get_outputs = BoolProperty( name="Retrieve Outputs", default = True)
weight_outputs = BoolProperty( name="Weight Outputs",
description = "If True, presynaptic sites will be scaled relative to the number of postsynaptically connected neurons.",
default = True)
restr_sources = StringProperty( name="Restrict to sources",
description='Use e.g. "12345,6789" or "annotation:glomerulus DA1" to restrict connectors to those that target this set of neurons')
restr_targets = StringProperty( name="Restrict to targets",
description='Use e.g. "12345,6789" or "annotation:glomerulus DA1" to restrict connectors to those coming from this set of neurons')
separate_connectors = BoolProperty(
name="Separate connectors (slow!)",
description = "If True, each connector will be generate as separate object. Beware: this is very slow.",
default = False )
@classmethod
def poll(cls, context):
if connected:
return True
else:
return False
def execute(self, context):
global remote_instance
bpy.context.scene.layers[self.layer] = True
self.conversion_factor = context.user_preferences.addons['CATMAIDImport'].preferences.conversion_factor
if self.which_neurons == 'All':
to_search = bpy.data.objects
elif self.which_neurons == 'Selected':
to_search = bpy.context.selected_objects
if self.restr_sources:
self.source_skids = eval_skids(self.restr_sources)
if self.restr_targets:
self.target_skids = eval_skids(self.restr_targets)
filtered_ob_list = []
filtered_skids = []
for ob in to_search:
if ob.name.startswith('#'):
skid = re.search('#(.*?) -',ob.name).group(1)
filtered_ob_list.append(ob)
filtered_skids.append(skid)
if not filtered_skids:
print('Error - no neurons found! Cancelled')
self.report({'ERROR'},'No neurons found!')
return {'FINISHED'}
start = time.clock()
print("Retrieving connector data for %i neurons" % len(filtered_ob_list))
skdata, errors = retrieveSkeletonData( filtered_skids,
time_out = context.user_preferences.addons['CATMAIDImport'].preferences.time_out,
skip_existing = False,
max_threads = context.user_preferences.addons['CATMAIDImport'].preferences.rqs )
cndata, neuron_names = self.get_all_connectors( skdata )
for i,neuron in enumerate(filtered_ob_list):
print('Creating Connectors for Neuron %i [of %i]' % ( i, len(filtered_ob_list) ) )
skid = re.search('#(.*?) -',neuron.name).group(1)
self.get_connectors(skid, skdata[skid], cndata, neuron_names, neuron.active_material.diffuse_color[0:3])
#bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP',iterations = 1)
if errors is None:
self.report({'INFO'},'Import successfull. Look in layer %i' % self.layer)
osd.show("Done.")
osd_timed = ClearOSDAfter(3)
osd_timed.start()
else:
self.report({'ERROR'}, errors)
return {'FINISHED'}
def get_all_connectors(self, skdata):
connector_id_list = set()
for skid in skdata:
for c in skdata[skid][1]:
if self.get_outputs is True and c[2] == 0:
connector_id_list.add(c[1])
if self.get_inputs is True and c[2] == 1:
connector_id_list.add(c[1])
connector_id_list = list(connector_id_list)
# Retrieve connectors in chunks
chunk_size = 5000
temp_data = []
for k, ch in enumerate( range(0,len(connector_id_list), chunk_size)):
connector_postdata = {}
print('Retrieving connectors chunk {0} of {1}'.format( k+1, math.ceil( len(connector_id_list)/chunk_size ) ))
for i, c in enumerate( connector_id_list[ch:ch+chunk_size] ):
connector_tag = 'connector_ids[%i]' % i
connector_postdata[connector_tag] = c
remote_connector_url = remote_instance.get_connector_details_url( project_id )
temp_data += remote_instance.fetch( remote_connector_url , connector_postdata )
skids_to_check = []
cn_data = {}
for c in temp_data:
cn_data[ c[0] ] = c[1]
if c[1]['presynaptic_to'] != None:
skids_to_check.append(c[1]['presynaptic_to'])
for target_skid in c[1]['postsynaptic_to']:
if target_skid != None:
skids_to_check.append(target_skid)
neuron_names = get_neuronnames( list ( set( skids_to_check + list(skdata) ) ) )
return cn_data, neuron_names
def get_connectors(self, active_skeleton, node_data, cndata, neuron_names ,mesh_color = None):
connector_ids = []
i_pre = 0
i_post = 0
connector_post_postdata = {}
connector_pre_postdata = {}
connector_post_coords = {}
connector_pre_coords = {}
connector_data_pre = []
connector_data_post = []
print('Extracting coordinates..')
### Get coordinates, divide into pre-/postsynapses and bring them into Blender space: switch y and z, divide by 10.000/10.000/-10.000
for connection in node_data[1]:
if connection[2] == 0 and self.get_outputs is True:
connector_pre_coords[connection[1]] = {}
connector_pre_coords[connection[1]]['id'] = connection[1]
connector_pre_coords[connection[1]]['parent_node'] = connection[0]
connector_pre_coords[connection[1]]['coords'] = (connection[3]/self.conversion_factor,connection[5]/self.conversion_factor,connection[4]/-self.conversion_factor)
#connector_tag = 'connector_ids[%i]' % i_pre
#connector_pre_postdata[connector_tag] = connection[1]
#i_pre += 1
connector_data_pre.append ( [connection[1] , cndata[ connection[ 1 ] ] ] )
if connection[2] == 1 and self.get_inputs is True:
connector_post_coords[connection[1]] = {}
connector_post_coords[connection[1]]['id'] = connection[1]
connector_post_coords[connection[1]]['parent_node'] = connection[0]
connector_post_coords[connection[1]]['coords'] = (connection[3]/self.conversion_factor,connection[5]/self.conversion_factor,connection[4]/-self.conversion_factor)
#connector_ids.append(connection[1])
#connector_tag = 'connector_ids[%i]' % i_post
### Add connector_id of this synapse to postdata
#connector_post_postdata[connector_tag] = connection[1]
#i_post += 1
connector_data_post.append ( [connection[1] , cndata[ connection[ 1 ] ] ] )
print('%s Down- / %s Upstream connectors for skid %s found' % (len(connector_post_coords), len(connector_pre_coords), active_skeleton))
if connector_data_post or connector_data_pre:
number_of_targets = {}
neurons_included = []
if self.restr_targets:
#Filter Downstream Targets
connectors_to_delete = {}
for connector in connector_data_pre:
connectors_to_delete[connector[0]] = True
for target_skid in connector[1]['postsynaptic_to']:
if str(target_skid) in self.target_skids:
connectors_to_delete[connector[0]] = False
neurons_included.append(neuron_names[str(target_skid)])
for connector_id in connectors_to_delete:
if connectors_to_delete[connector_id] is True:
connector_post_coords.pop(connector_id)
print('Postsynaptic neurons remaining after filtering: ',list(set(neurons_included)))
if self.restr_sources:
#Filter Upstream Targets
connectors_to_delete = {}
for connector in connector_data_post:
connectors_to_delete[connector[0]] = True
if str(connector[1]['presynaptic_to']) in self.source_skids:
connectors_to_delete[connector[0]] = False
neurons_included.append(neuron_names[str(connector[1]['presynaptic_to'])])
for connector_id in connectors_to_delete:
if connectors_to_delete[connector_id] is True:
connector_pre_coords.pop(connector_id)
print('Presynaptic neurons remaining after filtering: ',list(set(neurons_included)))
if len(connector_data_pre) > 0:
### Extract number of postsynaptic targets for connectors
for connector in connector_data_pre:
number_of_targets[connector[0]] = max( 1, len(connector[1]['postsynaptic_to']) )
#print('Number of postsynapses/connector:', number_of_targets)
### Create a sphere for every connector - presynapses will be scaled based on number of postsynaptic targets
if self.color_prop == 'Black':
connector_color = (0,0,0)
elif self.color_prop == 'Random':
connector_color = [random.randrange(0,100)/100 for e in [0,0,0]]
elif self.color_prop == 'Mesh-color':
connector_color = mesh_color
Create_Mesh.make_connector_objects (active_skeleton, connector_post_coords, connector_pre_coords,
node_data, number_of_targets, connector_color,
self.create_as ,self.basic_radius, self.layer,
self.weight_outputs, self.conversion_factor,
self.separate_connectors)
else:
print('No connector data for presnypases retrieved')
return {'FINISHED'}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self, width = 800)
def availableObjects(self, context):
"""
Polls for available density objects for export to svg (skeletons as well as connectors)
"""
available_objects = []
for obj in bpy.data.objects:
name = obj.name
available_objects.append((name,name,name))
if connected:
available_objects.append(('synapses','Synapses','Use neuron synapses (fetched from server).'))
else:
available_objects.append(('connect_for_more', 'Connect for more', 'Connect for more options.'))
return available_objects
class ConnectorsToSVG(Operator, ExportHelper):
"""Retrieves Connectors of active Neuron from CATMAID database and outputs SVG"""
bl_idname = "connectors.to_svg"
bl_label = "Export Connectors (=Synapses) to SVG"
# ExportHelper mixin class uses this
filename_ext = ".svg"
which_neurons = EnumProperty(name = "Which Neurons?",
items = [('Active','Active','Active'),('Selected','Selected','Selected'),('All','All','All')],
description = "Choose for which neurons to export connectors.")
random_colors = BoolProperty(name="Use Random Colors", default = False)
use_mesh_colors = BoolProperty(name="Use Mesh Colors", default = False,
description = "Neurons are exported with their Blender material diffuse color")
#gray_colors = BoolProperty(name="Use Gray Colors", default = False)
merge = BoolProperty(name="Merge into One", default = True,
description = "All neurons to process are rendered into the same brain.")
color_by_input = BoolProperty(name="Color by Input", default = False,
description = "Postsynapses from the same presynaptic neuron are given the same color.")
color_by_strength = BoolProperty(name="Color Presynapses by # of Postsynapses", default = False)
color_by_connections = StringProperty(name="Color by Connections to Neuron (Skid)", default = '',
description="Count connections of neuron to process and given neuron -> colors connectors appropriately. Attention: whether up- and or downstream partners are counted is set by [export inputs] and [export outputs]")
color_by_density = BoolProperty(name = "Color by Density",
default = False,
description = "Colors Connectors by # of Nodes of given [Object for Density] within [Proximity Threshold]")
object_for_density = EnumProperty(name = "Object for Density",
items = availableObjects,
description = "Choose Object for Coloring Connetors by Density")
proximity_radius_for_density = FloatProperty(name="Proximity Threshold (Blender Units!)",
default = 0.25,
description = "Maximum allowed distance between Connector and a Node")
export_inputs = BoolProperty(name="Export Synaptic Inputs", default = True )
export_outputs = BoolProperty(name="Export Synaptic Outputs", default = True )
export_gaps = BoolProperty(name="Export Gap Junctions", default = True )
export_abutting = BoolProperty(name="Export Abutting Connectors", default = False )
scale_outputs = BoolProperty(name="Scale Presynapses", default = False,
description = "Size of Presynapses based on number of postsynaptically connected neurons")
basic_radius = FloatProperty(name="Base Radius", default = 0.5)
export_as = EnumProperty(name="Export as:",
items = (("Circles","Circles","Circles"),
("Arrows","Arrows","Arrows"),
("Lines","Lines","Lines")
),
default = "Circles",
description = "Choose symbol that connectors will be exported as.")
export_brain_outlines = BoolProperty(name="Export Brain Outlines",
default = True,
description = "Adds Outlines of Brain to SVG (Drosophila L1 dataset)")
export_ring_gland = BoolProperty(name="Export Ring Gland",
default = True,
description = "Adds Outlines of Ring Gland to SVG (Drosophila L1 dataset)")
export_neuron = BoolProperty(name="Include Neuron", default = True,
description = "Export neurons skeletons as well")
barplot = BoolProperty(name="Add Barplot", default = False,
description = "Export Barplot along X/Y axis to show synapse distribution")
filter_connectors = StringProperty(name="Filter Connector:", default = '',
description="Filter Connectors by edges from/to neuron name(s)! (syntax: to exclude start with ! / to set synapse threshold start with > / applies to neuron names / case INsensitive / comma-separated -> ORDER MATTERS! ) ")
#filter_downstream = StringProperty(name="Filter Outputs:", default = '')
x_persp_offset = FloatProperty(name="Horizontal Perspective", default = 0.9, max = 2, min = -2)
y_persp_offset = FloatProperty(name="Vertical Perspective", default = -0.01, max = 2, min = -2)
views_to_export = EnumProperty(name="Views to export",
items = (("Front/Top/Lateral/Perspective-Dorsal","Front/Top/Lateral/Perspective-Dorsal","Front/Top/Lateral/Perspective-Dorsal"),
("Front/Top/Lateral","Front/Top/Lateral","Front/Top/Lateral"),
("Front","Front","Front"),
("Top","Top","Top"),
("Lateral","Lateral","Lateral"),
("Perspective-Front","Perspective-Front","Perspective-Front"),
("Perspective-Dorsal","Perspective-Dorsal","Perspective-Dorsal")
),
default = "Front/Top/Lateral/Perspective-Dorsal",
description = "Choose which views should be included in final SVG")
add_legend = BoolProperty(name="Add legend", default = True,
description = "Add legend to figure")
neuron_names = {}
connections_for_color = {}
mesh_color = {}
@classmethod
def poll(cls, context):
if connected:
return True
else:
return False
def execute(self, context):
global remote_instance
print('\nConnector export started:')
connector_data = {}
neurons_to_export = []
skids_to_export = []
# Make sure to reset variables
self.mesh_color = {}
self.conversion_factor = context.user_preferences.addons['CATMAIDImport'].preferences.conversion_factor
if self.which_neurons == 'Active':
if bpy.context.active_object is None and self.which_neurons == 'Active':
print ('No Object Active')
self.report({'ERROR'},'No Active Object!')
return{'CANCELLED'}
elif bpy.context.active_object is not None and '#' not in bpy.context.active_object.name and self.which_neurons == 'Active':
print ('Active Object not a Neuron')
self.report({'ERROR'},'Active Object not a Neuron!')
return{'CANCELLED'}
active_skid = re.search('#(.*?) -',bpy.context.active_object.name).group(1)
skids_to_export.append(active_skid)
neurons_to_export.append(bpy.context.active_object)
if self.use_mesh_colors:
self.mesh_color[active_skeleton] = bpy.context.active_object.active_material.diffuse_color
elif self.which_neurons == 'Selected':
for neuron in bpy.context.selected_objects:
if neuron.name.startswith('#'):
skid = re.search('#(.*?) -',neuron.name).group(1)
skids_to_export.append(skid)
neurons_to_export.append(neuron)
if self.use_mesh_colors:
self.mesh_color[skid] = neuron.active_material.diffuse_color
elif self.which_neurons == 'All':
for neuron in bpy.data.objects:
if neuron.name.startswith('#'):
skid = re.search('#(.*?) -',neuron.name).group(1)
skids_to_export.append(skid)
neurons_to_export.append(neuron)
if self.use_mesh_colors:
self.mesh_color[skid] = neuron.active_material.diffuse_color
print("Retrieving connector data for %i neurons" % len(skids_to_export))
skdata,errors = retrieveSkeletonData( skids_to_export,
time_out = context.user_preferences.addons['CATMAIDImport'].preferences.time_out,
skip_existing = False,
get_abutting = self.export_abutting,
max_threads = context.user_preferences.addons['CATMAIDImport'].preferences.rqs
)
#Cndata is a dictionary containing details of all connectors
cndata = self.get_all_connectors( skdata )
if cndata is None:
return {'CANCELLED'}
if errors is not None:
self.report({'ERROR'},errors)
for skid in skids_to_export:
connector_data[skid] = self.get_connectors(skid, skdata[skid], cndata)
if self.color_by_connections:
#If outputs are exported then count only upstream connections (upstream sources of these outputs)
#If inputs are exported then count only downstream connections (downstream targets of these inputs)
#-> just use them invertedly for use_inputs/outputs when calling get_connectivity
self.connections_for_color = self.get_connectivity( skids_to_export,
self.export_outputs,
self.export_inputs
)
if self.export_neuron is True:
neurons_svg_string = self.create_svg_for_neuron(neurons_to_export)
else:
neurons_svg_string = {}
#Sort skids_to_export by color
if self.use_mesh_colors:
color_strings = { skid:str(color) for (skid,color) in self.mesh_color.items() }
skids_to_export = list( sorted( skids_to_export, key = color_strings.__getitem__ ) )
self.export_to_svg( skids_to_export, connector_data, neurons_svg_string)
osd.show("Done.")
osd_timed = ClearOSDAfter(3)
osd_timed.start()
return {'FINISHED'}
def get_all_connectors(self, skdata):
""" Get details for connectors for all neuronss
"""
connector_id_list = []
connector_postdata = {}
for skid in skdata:
for c in skdata[skid][1]:
if self.export_outputs is True and c[2] == 0:
connector_id_list.append(c[1])
if self.export_inputs is True and c[2] == 1:
connector_id_list.append(c[1])
if self.export_gaps is True and c[2] == 2:
connector_id_list.append(c[1])
if self.export_abutting is True and c[2] == 3:
connector_id_list.append(c[1])
if not connector_id_list:
print('ERROR: no connectors found!')
self.report({'ERROR'},'Export aborted: No connectors found.')
return None
for i, c in enumerate( list( set( connector_id_list ) ) ):
connector_tag = 'connector_ids[%i]' % i
connector_postdata[ connector_tag ] = c
remote_connector_url = remote_instance.get_connector_details_url( project_id )
"""
Format of temp_data = [ [ cn_id, { 'connector_id' : int(),
'presynaptic_to': skid,
'postsynaptic_to' : [skid, skid, ...],
'presynaptic_to_node' : tn_id,
'postsynaptic_to_node': [tn_id, tn_id, ...] }
] ]
"""
temp_data = remote_instance.fetch( remote_connector_url , connector_postdata )
skids_to_check = []
cn_data = { c[0] : c[1] for c in temp_data }
skids_to_check = [ c[1]['presynaptic_to'] for c in temp_data if c[1]['presynaptic_to'] != None ] + [ s for c in temp_data for s in c[1]['postsynaptic_to'] if s != None ]
self.check_ancestry ( list ( set( skids_to_check + list(skdata) ) ) )
#Format of cn_data = { connector_id : {} }
return cn_data
def get_connectors(self, active_skeleton, node_data, cndata ):
""" Get a list of connectors for each neuron. Apply filters if necessary
"""
if self.filter_connectors:
filter_list = self.filter_connectors.split(',')
#Check if filter is based on inclusion, exclusion or both:
filter_exclusion = False
filter_inclusion = False
for entry in filter_list:
if entry[0] == '!' or entry[0] == '>':
filter_exclusion = True
else:
filter_inclusion = True
connector_post_coords = {}
connector_pre_coords = {}
connector_data_post = []
connector_data_pre = []
print('Extracting coordinates..')
### Convert coordinates to Blender
nodes_list = { n[0] : ( float(n[3])/self.conversion_factor,
float(n[5])/self.conversion_factor,
float(n[4])/-self.conversion_factor
) for n in node_data[0] }
connector_coords = { cn[1] : (cn[3]/self.conversion_factor,cn[5]/self.conversion_factor,cn[4]/-self.conversion_factor) for cn in node_data[1] }
for cn in node_data[1]:
if cn[2] == 1 and self.export_inputs is True:
### For Sources the Treenodes the Connector is connecting TO are listed
### Reason: One connector can connect to the same neuron (at different treenodes) multiple times!!!
### !!!Attention: Treenode can be connected to multiple connectors (up- and downstream)
if cn[0] not in connector_pre_coords:
connector_pre_coords[cn[0]] = {}
#Format: connector_pre_coord[target_treenode_id][upstream_connector_id] = coords of target treenode
connector_pre_coords[cn[0]][cn[1]] = {}
connector_pre_coords[cn[0]][cn[1]]['coords'] = nodes_list[cn[0]] #these are treenode coords, NOT connector coords
connector_data_pre.append( [ cn[1] , cndata[ cn[1] ] ] )
if cn[2] == 0 and self.export_outputs is True:
connector_post_coords[cn[1]] = {}
connector_post_coords[cn[1]]['id'] = cn[1]
connector_post_coords[cn[1]]['coords'] = (cn[3]/self.conversion_factor,cn[5]/self.conversion_factor,cn[4]/-self.conversion_factor)
connector_data_post.append( [ cn[1] , cndata[ cn[1] ] ] )
print('%s Down- / %s Upstream connectors for skid %s found' % (len(connector_post_coords), len(connector_pre_coords), active_skeleton))
remote_connector_url = remote_instance.get_connector_details_url( project_id )
if connector_data_pre or connector_data_post:
print("Connectors successfully retrieved")
number_of_targets = { }
presynaptic_to = {}
postsynaptic_to = {}
### Only proceed if neuron actually has Outputs (e.g. motor neurons)
if len(connector_post_coords) > 0:
skids_to_check = []
total_synapse_count = {}
### Count all neurons postsynaptic to the connector
for connector in connector_data_post:
number_of_targets[connector[0]] = len(connector[1]['postsynaptic_to'])
for entry in connector[1]['postsynaptic_to']:
skids_to_check.append(entry)
### Count number of connections for each presynaptic neuron
if entry not in total_synapse_count:
total_synapse_count[entry] = 1
else:
total_synapse_count[entry] += 1
print('Retrieving Ancestry of all downstream neurons...')
self.check_ancestry( skids_to_check )
print('Done')
neurons_included = []
entries_to_delete = {}
neurons_included = []
### Create list of targets for all source treenodes:
### connector_post_coords[connector_id]
for connector in connector_data_post:
connector_id = connector[0]
if connector_id in connector_post_coords:
connector_post_coords[connector_id]['postsynaptic_to'] = connector[1]['postsynaptic_to']
if connector_id not in postsynaptic_to:
postsynaptic_to[connector_id] = []
entries_to_delete[ connector_id ] = True
if self.filter_connectors:
print('Filtering Connector %i (postsynaptic to: %s) for: < %s >' % (connector[0], str(connector[1]['postsynaptic_to']), self.filter_connectors))
if len(connector[1]['postsynaptic_to']) == 0 or None in connector[1]['postsynaptic_to']:
print('Connector w/o postsynaptic connection found: %s - will NOT be exported' % connector[0] )
### Connector_data_XXX is a list NOT a dictionary, so we have to cycle through it
for target_skid in connector[1]['postsynaptic_to']:
if self.filter_connectors:
#Set whether connector will is included unless exclusion_tag is found or whether they will be excluded unless inclusion_tag is found
if filter_inclusion is True:
include_connector = False
else:
include_connector = True
for tag in filter_list:
### Check for match with filter:
### If filter startswith '!' then those neurons will be excluded
if tag.startswith('!'):
if target_skid != None and tag[1:].lower() in self.neuron_names[target_skid].lower():
print('Excluded: match with %s - %s (# %s)' % (tag,self.neuron_names[target_skid],target_skid))
include_connector = False
#else:
#If a single target of connector is to be exlucded, remove the whole connector from dict[connector_id]
#connector_post_coords.pop(connector_id)
elif tag.startswith('>'):
try:
synapse_threshold = int(tag[1:])
if total_synapse_count[target_skid] >= synapse_threshold:
print('Above threshold: -- %s -- : %s (%i)' % (connector[1]['presynaptic_to'],self.neuron_names[connector[1]['presynaptic_to']],total_synapse_count[connector[1]['presynaptic_to']]))
include_connector = True
#else:
#If connector is below threshold: remove him from dict[treenode]
#connector_post_coords.pop(connector_id)
except:
print('Unable to convert filter string to int for synapse threshold!!')
else:
if target_skid != None and tag.lower() in self.neuron_names[target_skid].lower():
print('Included: match with %s - %s (# %s)' % (tag,self.neuron_names[target_skid],target_skid))
include_connector = True
if include_connector is True:
postsynaptic_to[connector_id].append(target_skid)
entries_to_delete[connector_id] = False
neurons_included.append(self.neuron_names[target_skid])
else:
postsynaptic_to[connector_id].append(target_skid)
entries_to_delete[connector_id] = False
#print(entries_to_delete)
### Delete Treenode from connectors list, if no match has been found
count = 0
for connector_id in entries_to_delete:
if entries_to_delete[connector_id] is True:
#print('Deleted entry for treenode %s' % treenode)
connector_post_coords.pop(connector_id)
count += 1
print('%i target treenodes left (%s removed by Filter)' % (len(connector_post_coords),count))
if self.filter_connectors:
print('Downstream Neurons remaining after filtering:')
print(set(neurons_included))
### Only proceed if neuron actually has Inputs (e.g. sensory neurons)
if len( connector_pre_coords ) > 0:
print('Total of %s connectors for %s inputs found: ' % (str(len(connector_data_pre)), str(len(connector_pre_coords))))
### Retrieve Ancestry(= name for all upstream neurons):
print('Retrieving Ancestry of all upstream neurons...')
skids_to_check = []
total_synapse_count = {}
neurons_included = []
entries_to_delete = {}
for connector in connector_data_pre:
skids_to_check.append(connector[1]['presynaptic_to'])
self.check_ancestry(skids_to_check)
print('Done')
#Create weight map for subsequent threshold filtering
for connector in connector_data_pre:
### If connector IDs match. Keep in mind: A single treenode can receive input from more than one connector!!!
input = connector[1]['presynaptic_to']
### Count number of connections for each presynaptic neuron
if input not in total_synapse_count:
total_synapse_count[input] = 1
else:
total_synapse_count[input] += 1
#print(total_synapse_count)
### Create list of sources for all target treenodes:
for treenode in connector_pre_coords:
#print('Searching for treenode %s connected to connector %s' % (str(treenode),str(connector_pre_coords[treenode]['connector_id']) ) )
if treenode not in presynaptic_to:
presynaptic_to[treenode] = []
entries_to_delete[treenode] = True
### Connector_data_XXX is a list NOT a dictionary, so we have to cycle through it
for connector in connector_data_pre:
### If connector IDs match. Keep in mind: A single treenode can receive input from more than one connector!!!
#if connector[0] == connector_pre_coords[treenode]['connector_id']:
if connector[0] in connector_pre_coords[treenode]:
connector_pre_coords[treenode][connector[0]]['presynaptic_to'] = connector[1]['presynaptic_to']
if self.filter_connectors:
print('Filtering Connector %s (presynaptic to %s) for: %s' % (connector[0], connector[1]['presynaptic_to'] ,self.filter_connectors))
#Set whether connector will is included unless exclusion_tag is found or whether they will be excluded unless inclusion_tag is found
if filter_inclusion is True:
include_connector = False
else:
include_connector = True
if connector[1]['presynaptic_to'] is None:
print('Connector w/o presynaptic connection found: %s - will NOT be exported' % connector[0] )
include_connector = False
for tag in filter_list:
### Check for match with filter:
### If filter startswith '!' then those neurons will be excluded
if tag.startswith('!'):
if connector[1]['presynaptic_to'] != None and tag[1:].lower() in self.neuron_names[connector[1]['presynaptic_to']].lower():
print('Excluded: match with < %s > : %s (# %s)' % (tag,self.neuron_names[connector[1]['presynaptic_to']],connector[1]['presynaptic_to']))
include_connector = False
elif tag.startswith('>'):
try:
synapse_threshold = int(tag[1:])
if total_synapse_count[connector[1]['presynaptic_to']] >= synapse_threshold:
print('Above threshold: -- %s -- : %s (%i)' % (connector[1]['presynaptic_to'],self.neuron_names[connector[1]['presynaptic_to']],total_synapse_count[connector[1]['presynaptic_to']]))
include_connector = True
except:
print('Unable to convert filter string to int')
else:
if connector[1]['presynaptic_to'] != None and tag.lower() in self.neuron_names[connector[1]['presynaptic_to']].lower():
print('Included: match with < %s >: %s (# %s)' % (tag,self.neuron_names[connector[1]['presynaptic_to']],connector[1]['presynaptic_to']))
include_connector = True
if include_connector is True:
presynaptic_to[treenode].append(connector[1]['presynaptic_to'])
entries_to_delete[treenode] = False
neurons_included.append(self.neuron_names[connector[1]['presynaptic_to']])
else:
presynaptic_to[treenode].append(connector[1]['presynaptic_to'])
entries_to_delete[treenode] = False
### Delete Treenode from connectors list, if no match has been found
count = 0
for treenode in entries_to_delete:
if entries_to_delete[treenode] is True:
#print('Deleted entry for treenode %s' % treenode)
connector_pre_coords.pop(treenode)
count += 1
print('%i target treenodes left (%s removed by Filter)' % (len(connector_pre_coords),count))
if self.filter_connectors:
print('Upstream Neurons remaining after filtering:')
print(set(neurons_included))
return( ( number_of_targets, connector_pre_coords, connector_post_coords, presynaptic_to ) )
else:
print('No data retrieved')
return((0, [], [], []))
def export_to_svg(self, skids_to_export, connector_data, neurons_svg_string):
print('%i Neurons in Connector data found' % len(connector_data))
svg_header = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1">\n'
svg_end = '\n</svg> '
offsetX = 0
offsetY = 0
offsetY_for_top = 60
offsetX_for_top = 135
offsetY_for_front = -150
offsetX_for_front = 5
offsetY_for_lateral = 0
offsetX_for_lateral = 0
offsetY_for_persp = 150
offsetX_for_persp = 0
offsetY_forMergeLegend = -150
if "Perspective-Dorsal" in self.views_to_export:
#For dorsal perspective change offsets:
y_persp_offset = -1 * self.x_persp_offset
x_persp_offset = 0
#y_center sets the pivot along y axis (0-25) -> all this does is move the object along y axis, does NOT change perspective
y_center = 5
else:
x_persp_offset = self.x_persp_offset
y_persp_offset = self.y_persp_offset
if self.merge is True:
offsetIncrease = 0
else:
offsetIncrease = 250
basic_radius = self.basic_radius
density_gradient = {'start_rgb': (0,255,0),
'end_rgb':(255,0,0)}
density_data = []
brain_shape_top_string = '<g id="brain shape top">\n <polyline points="28.3,-5.8 34.0,-7.1 38.0,-9.4 45.1,-15.5 50.8,-20.6 57.7,-25.4 59.6,-25.6 63.2,-22.8 67.7,-18.7 70.7,-17.2 74.6,-14.3 78.1,-12.8 84.3,-12.6 87.7,-15.5 91.8,-20.9 98.1,-32.4 99.9,-38.3 105.2,-48.9 106.1,-56.4 105.6,-70.1 103.2,-75.8 97.7,-82.0 92.5,-87.2 88.8,-89.1 82.6,-90.0 75.0,-89.9 67.4,-89.6 60.8,-85.6 55.3,-77.2 52.4,-70.2 51.9,-56.7 55.0,-47.0 55.9,-36.4 56.0,-32.1 54.3,-31.1 51.0,-33.4 50.7,-42.5 52.7,-48.6 49.9,-58.4 44.3,-70.8 37.4,-80.9 33.1,-84.0 24.7,-86.0 14.2,-83.9 8.3,-79.1 2.9,-68.3 1.3,-53.5 2.5,-46.9 3.0,-38.3 6.3,-28.2 10.9,-18.7 16.3,-9.7 22.2,-6.4 28.3,-5.8" \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n <polyline points="88.8,-89.1 90.9,-97.7 92.9,-111.3 95.6,-125.6 96.7,-139.4 95.9,-152.0 92.8,-170.2 89.4,-191.0 87.2,-203.7 80.6,-216.6 73.4,-228.3 64.5,-239.9 56.4,-247.3 48.8,-246.9 39.0,-238.3 29.6,-226.9 24.7,-212.0 22.9,-201.2 23.1,-186.9 18.7,-168.3 14.1,-150.4 12.6,-138.0 13.7,-121.5 16.3,-105.1 18.3,-84.8 " \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n </g>'
brain_shape_front_string = '<g id="brain shape front"> \n <polyline points="51.5,24.0 52.0,21.3 52.0,17.6 50.2,11.2 46.8,6.5 40.5,2.5 33.8,1.1 25.4,3.4 18.8,8.0 13.2,12.5 8.3,17.9 4.3,23.8 1.8,29.3 1.4,35.6 1.6,42.1 4.7,48.3 7.9,52.5 10.8,56.9 13.1,64.3 14.3,73.2 12.8,81.0 16.2,93.6 20.9,101.5 28.2,107.5 35.3,112.7 42.2,117.0 50.8,119.3 57.9,119.3 67.1,118.0 73.9,114.1 79.0,110.4 91.1,102.7 96.3,94.2 96.3,85.3 94.0,81.4 95.4,74.8 96.6,68.3 97.5,64.7 100.9,59.7 103.8,52.5 105.4,46.7 106.1,38.8 105.4,32.4 103.1,26.4 98.9,21.0 94.1,16.3 88.3,11.1 82.0,6.5 74.8,3.3 67.8,3.1 61.7,5.1 56.8,9.6 53.4,15.2 52.2,19.7 52.3,25.3 51.4,24.1 " \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n <polyline points="46.6,34.0 45.5,36.1 43.2,38.6 41.1,43.3 39.7,48.7 39.7,51.0 42.6,55.2 51.4,59.5 54.9,60.9 60.8,60.8 62.9,58.2 62.9,52.6 60.3,47.6 57.7,43.9 56.1,40.2 55.1,35.9 55.1,34.4 51.8,33.6 49.1,33.5 46.6,34.0 " \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n </g>'
brain_shape_lateral_string = '<g id="brain shape lateral"> \n <polyline points="247.2,91.6 246.8,94.6 246.3,95.5 245.0,96.7 239.8,99.0 225.8,103.4 210.9,107.5 200.8,109.1 186.0,109.9 166.0,110.7 150.8,111.3 135.8,112.8 120.9,114.2 107.3,114.9 98.6,115.7 88.7,117.9 81.3,119.1 66.2,119.2 58.3,118.7 51.6,118.5 46.0,116.4 40.7,114.4 36.6,112.0 34.2,109.6 30.7,104.8 27.3,100.3 25.3,98.2 22.2,91.9 21.1,86.8 19.6,80.6 17.4,73.9 15.2,68.9 11.2,61.8 11.0,52.3 9.1,49.9 7.4,46.4 6.6,42.6 6.3,35.7 7.0,27.1 7.4,24.5 10.2,18.7 15.8,13.2 22.3,8.5 26.2,7.1 32.6,7.0 36.1,6.2 41.2,3.9 47.2,1.8 54.8,1.7 64.5,3.2 73.4,5.3 81.1,11.2 86.7,16.4 89.0,21.1 90.2,33.2 89.3,42.8 86.6,48.7 82.1,53.9 78.8,57.2 77.9,59.2 91.4,61.6 98.5,62.2 116.6,62.4 131.7,61.0 146.1,59.8 161.1,60.1 176.0,61.3 190.8,63.3 206.2,66.0 219.5,70.6 224.5,72.8 239.5,82.1 245.5,86.0 246.9,87.9 247.2,91.6 " \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n </g>'
brain_shape_dorsal_perspective_05_string = '<g id="brain shape dorsal perspective" transform="scale(0.21) translate(-511,-30)"> \n <polyline points="255,974 238,968 184,939 174,932 113,880 100,863 92,845 79,793 64,751 46,706 45,685 51,636 72,565 77,536 78,524 73,508 64,462 60,427 52,395 31,370 17,348 9,321 3,284 2,230 7,185 22,153 40,126 59,105 88,82 126,60 145,51 163,47 175,46 201,53 214,62 234,88 243,104 263,90 275,63 280,33 285,27 293,14 308,5 319,2 343,3 389,21 424,44 451,74 469,110 491,145 504,177 508,204 507,235 501,269 482,309 466,334 452,345 445,351 443,377 435,393 429,433 427,462 425,515 436,558 444,571 452,600 451,624 454,655 441,690 429,707 423,729 403,839 382,893 365,913 335,936 271,969 255,974" \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n <polyline points="52,395 90,401 129,392 145,374 153,346" \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n <polyline points="445,351 433,355 417,355 396,346 381,336 382,337" \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round" /> \n <polygon points="257,349 242,348 230,332 216,313 208,300 215,283 228,261 245,234 260,201 265,168 262,143 266,141 270,164 283,192 288,208 303,242 312,265 318,276 305,303 290,323 281,332 268,343" \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n </g>'
brain_shape_dorsal_perspective_09_string = '<g id="brain shape dorsal perspective" transform="scale(0.173) translate(-620,-112)"> \n <path d="M514 676l5 64 1 92 30 122 9 144 -40 122 -26 223 -29 121 -108 118 -28 20 -26 8 -29 -20 -68 -78 -31 -46 -43 -69 -21 -34 -17 -115 -16 -86 -23 -101 0 -104 33 -235 -4 -146c-3,-22 -5,-31 -7,-42 -1,-12 4,-18 -2,-27 -6,-10 -22,-17 -32,-27 -9,-9 -19,-16 -26,-30 -7,-15 -9,-38 -12,-54 -2,-17 -3,-28 -4,-45 0,-17 0,-34 1,-57 0,-23 2,-64 3,-81 1,-17 0,-14 3,-22 3,-8 3,-8 13,-27 9,-19 33,-67 43,-85 4,-7 28,-41 33,-46 9,-9 28,-24 38,-30 31,-20 63,1 99,17 18,7 23,15 29,19 6,4 2,5 6,6 5,2 13,4 21,2 8,-2 21,-8 27,-15 6,-7 3,-14 6,-23 3,-9 9,-22 13,-31 3,-9 5,-15 9,-24 3,-8 5,-19 10,-26 5,-6 13,-9 20,-13 8,-4 15,-7 23,-9 8,-3 16,-6 27,-6 11,0 21,1 35,8 15,8 37,25 49,35 12,11 16,17 24,29 8,13 15,27 24,47 9,20 25,49 32,71 8,23 9,48 13,64 3,16 6,21 9,31 3,10 7,19 8,31 1,12 -1,28 -1,40 -1,13 -1,22 -3,35 -2,13 -3,30 -7,45 -5,15 -8,22 -18,42 -9,20 -30,60 -40,75 -11,14 -15,0 -20,9 -5,9 -5,19 -7,38 -3,19 -8,50 -8,74l0 2z" \n style="fill:#D9DADA;stroke-width:0" /> \n <path d="M301 495c-9,-17 -19,-33 -28,-50 3,-2 6,-4 9,-6 4,-6 8,-11 12,-17 5,-10 9,-20 13,-30 5,-20 10,-40 15,-60 -2,-14 -4,-28 -6,-41 0,-4 1,-7 2,-11 -1,-10 -2,-21 -4,-31 -2,-3 -4,-7 -6,-10 3,-2 6,-3 8,-5 1,9 1,17 2,25 5,16 11,32 16,48 3,17 7,35 10,52 8,17 17,34 25,50 -9,21 -17,42 -26,63 -8,12 -16,24 -25,36 -5,-4 -11,-9 -17,-13z" \n style="fill:#FEFEFE;stroke-width:0"/> \n </g> \n'
ring_gland_top = '<g id="ring gland top"> \n <polyline points="57.8,-43.9 59.9,-43.8 62.2,-43.3 64.4,-41.1 67.3,-37.7 70.8,-34.0 73.9,-30.7 75.1,-28.3 76.2,-24.8 76.0,-22.1 75.2,-19.7 73.0,-17.3 70.4,-16.1 66.5,-16.1 64.4,-15.2 61.8,-12.3 58.8,-9.5 55.7,-8.6 51.3,-8.1 47.6,-8.3 44.0,-8.7 41.4,-10.3 40.8,-12.6 42.5,-16.1 45.4,-20.7 47.9,-25.5 48.9,-28.9 50.1,-32.3 51.8,-33.0 51.5,-35.1 51.7,-37.9 52.4,-41.2 53.9,-42.8 55.8,-43.8 57.8,-43.9 " \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n </g>'
ring_gland_front = '<g id="ring gland front"> \n <polyline points="45.5,11.3 44.3,12.3 41.9,14.2 40.9,16.8 41.3,20.1 42.7,24.7 44.0,27.8 45.9,28.6 49.0,27.7 50.1,27.7 53.0,28.1 56.5,28.4 59.2,28.3 62.2,27.5 64.5,26.6 67.1,26.6 69.7,27.2 70.9,26.9 73.1,25.4 74.8,22.8 75.9,20.3 75.9,17.6 74.8,15.1 72.8,12.8 69.3,10.2 66.7,8.6 64.2,7.7 61.9,7.6 59.0,8.4 57.1,9.4 56.6,11.1 55.1,10.0 53.5,9.2 51.3,8.9 49.6,9.2 45.5,11.3 " \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n </g>'
ring_gland_lateral = '<g id="ring gland lateral"> \n <polyline points="9.0,16.8 13.7,13.3 23.4,9.8 27.9,9.1 31.1,9.5 34.8,8.1 38.8,7.7 41.2,8.4 42.6,9.8 44.0,12.7 44.2,16.6 43.5,22.3 41.2,25.1 36.3,26.4 31.6,26.4 26.9,27.2 22.1,26.7 20.2,27.1 15.7,28.6 12.7,28.2 11.0,28.7 9.3,27.7 8.3,24.8 8.3,20.9 9.0,16.8 " \n style="fill:none;stroke:darkslategray;stroke-width:0.5;stroke-linecap:round;stroke-linejoin:round"/> \n </g>'
ring_gland_dorsal_perspective_05 = '<g id="ring gland perspective" transform="scale(1.5) translate(-51,-4)"> \n <polygon points="15,18 13,17 11,15 10,13 5,11 3,12 1,10 0,8 1,6 3,4 7,3 10,3 13,2 17,0 20,0 20,0 23,0 24,2 24,5 23,8 22,10 18,10 17,10 17,12 16,14 16,16 " style="fill:#D8D9D9;stroke-width:0;stroke-linecap:round;stroke-linejoin:round"/> \n </g>'
ring_gland_dorsal_perspective_09 = '<g id="ring gland perspective" transform="scale(0.094) translate(-818,-220)"> \n <polygon points="249,25 257,21 266,16 275,13 283,9 292,7 300,5 301,5 302,5 316,2 330,0 343,0 355,1 366,3 375,6 384,11 390,17 394,24 396,33 397,45 395,59 391,77 387,96 381,115 375,132 369,144 363,152 356,157 350,161 343,163 335,163 327,162 318,161 313,159 310,163 303,167 298,170 294,173 293,173 292,177 289,183 285,187 284,187 281,194 280,196 279,199 277,205 274,211 271,218 268,223 264,228 262,229 263,230 262,237 265,241 270,254 273,270 274,287 274,303 271,318 267,332 261,344 261,352 259,366 256,380 252,392 247,403 242,410 235,415 227,415 219,411 215,407 215,407 210,405 205,400 200,394 194,387 189,380 185,374 182,367 179,362 179,361 177,359 171,348 167,339 165,332 165,326 165,326 164,324 162,320 160,316 159,313 158,310 157,308 157,306 158,303 158,303 155,299 151,292 147,289 141,286 135,282 128,278 128,278 125,279 120,279 115,279 111,277 107,274 104,271 101,268 99,264 96,261 95,260 87,256 78,252 68,248 60,244 56,241 54,241 44,236 35,230 28,225 21,218 15,212 10,205 5,197 2,190 1,182 1,177 1,175 0,163 2,151 8,141 16,132 26,123 38,116 51,111 64,106 77,103 88,101 98,101 107,101 115,104 118,105 120,103 131,95 142,86 154,77 167,69 181,61 195,54 210,47 217,44 229,37 243,29 " style="fill:#9D9E9E;stroke-width:0"/> \n </g> \n'
arrows_defs = '<defs> \n <marker id="markerArrow" markerWidth="13" markerHeight="13" refx="2" refy="6" orient="auto"> \n <path d="M2,2 L2,11 L10,6 L2,2" style="fill: #000000;" /> \n </marker> \n </defs>'
print('Writing SVG to file %s' % self.filepath)
f = open(self.filepath, 'w', encoding='utf-8')
f.write(svg_header)
"""
if self.use_arrows is True:
f.write(arrows_defs)
"""
#Create list of nodes for given density object
if self.color_by_density is True:
density_color_map = {}
max_density = 0
try:
for spline in bpy.data.objects[self.object_for_density].data.splines:
for node in spline.points:
#node.co = vector(x,y,z,?)
if node.co not in density_data:
density_data.append(node.co)
#print(density_data)
except:
print('ERROR: Unable to create density data for object!')
self.report({'ERROR'},'Error(s) occurred: see console')
#Fill density_color_map with density counts first and get max_density
for neuron in connector_data:
#Presynaptic connectors (=Treenodes)
for target_treenode in connector_data[neuron][1]:
for connector in connector_data[neuron][1][target_treenode]:
if connector not in density_color_map:
connector_co = connector_data[neuron][1][target_treenode][connector]['coords']
density_count = 0
for node in density_data:
dist1 = math.sqrt(
(connector_co[0]-node[0])**2 +
(connector_co[1]-node[1])**2 +
(connector_co[2]-node[2])**2
)
if dist1 < self.proximity_radius_for_density:
density_count += 1
if density_count > max_density:
max_density = density_count
density_color_map[connector] = density_count
#Postsynaptic connectors
for connector in connector_data[neuron][2]:
if connector not in density_color_map:
connector_co = connector_data[neuron][2][connector]['coords']
density_count = 0
for node in density_data:
dist1 = math.sqrt(
(connector_co[0]-node[0])**2 +
(connector_co[1]-node[1])**2 +
(connector_co[2]-node[2])**2
)
if dist1 < self.proximity_radius_for_density:
density_count += 1
if density_count > max_density:
max_density = density_count
density_color_map[connector] = density_count
#Convert density_color_map from density counts to colors
for connector in density_color_map:
density_count = density_color_map[connector]
if max_density > 0 and density_count > 0:
density_color = (
int(density_gradient['start_rgb'][0] + (density_gradient['end_rgb'][0] - density_gradient['start_rgb'][0])/max_density * density_count),
int(density_gradient['start_rgb'][1] + (density_gradient['end_rgb'][1] - density_gradient['start_rgb'][1])/max_density * density_count),
int(density_gradient['start_rgb'][2] + (density_gradient['end_rgb'][2] - density_gradient['start_rgb'][2])/max_density * density_count)
)
else:
#print('No density data within given radius found!')
density_color = (0,0,0)
density_color_map[connector] = density_color
### Create random color map for every input / red is reserved for all outputs
if self.color_by_input is True or self.color_by_strength is True:
input_color_map = {}
input_weight_map = {}
max_values = {}
presynaptic_to = {}
print('Creating input/weight color map...')
for neuron in connector_data:
presynaptic_to[neuron] = connector_data[neuron][3]
#print(presynaptic_to[neuron])
input_weight_map[neuron] = {}
max_values[neuron] = []
for target_treenode in presynaptic_to[neuron]:
for input in presynaptic_to[neuron][target_treenode]:
### Create random color map for all source neurons
if input not in input_color_map:
input_color_map[input] = (random.randrange(0,255), random.randrange(0,255),random.randrange(0,255))
outputs_color = (255, 0, 0)
### ATTENTION: this input color map is replaced down the page by a non-random version!
### Count number of connections for each presynaptic neuron
if input not in input_weight_map[neuron]:
input_weight_map[neuron][input] = {}
input_weight_map[neuron][input]['connections'] = 1
else:
input_weight_map[neuron][input]['connections'] += 1
### Get min & max values of weight map
for entry in input_weight_map[neuron]:
if entry != None:
max_values[neuron].append(input_weight_map[neuron][entry]['connections'])
#print(input_weight_map)
if self.export_inputs is True:
half_max = max(max_values[neuron])/2
print('Half_max = ' + str(half_max))
else:
half_max = 0
### Create color scheme from green to red based on min/max
for input in input_weight_map[neuron]:
### If Input weight is bigger than half max then gradually reduce green channel, red channel stays max
if input_weight_map[neuron][input]['connections'] > half_max:
red_channel = 255
green_channel = int(255 - (255/half_max) * (input_weight_map[neuron][input]['connections']/2))
### Else gradually increase red channel
else:
green_channel = 255
red_channel = int((255/half_max) * (input_weight_map[neuron][input]['connections']))
input_weight_map[neuron][input]['color'] = (red_channel, green_channel, 0)
"""
print('Calculating weight-based color for input %s (%s synapses) of neuron %s: %s' % (str(input), str(input_weight_map[neuron][input]['connections']), \
str(neuron), str(input_weight_map[neuron][input]['color'])))
"""
#Create more evenly distributed input_color_map:
new_input_color_map = ColorCreator.random_colors(len(input_color_map))
shapes = ShapeCreator.create_shapes(2,self.basic_radius)
input_shape_map = {}