Skip to content

Commit

Permalink
Code quality: fix PEP8 violations
Browse files Browse the repository at this point in the history
  • Loading branch information
clbarnes committed Jan 18, 2018
1 parent 3ab4431 commit 2fa6357
Show file tree
Hide file tree
Showing 13 changed files with 94 additions and 56 deletions.
2 changes: 2 additions & 0 deletions synapsesuggestor/admin.py
Expand Up @@ -18,9 +18,11 @@

admin.site.register(SynapseDetectionTile)


class SynapseSliceAdmin(admin.ModelAdmin):
pass


admin.site.register(SynapseSlice, SynapseSliceAdmin)

admin.site.register(SynapseObject)
Expand Down
30 changes: 16 additions & 14 deletions synapsesuggestor/control/analysis.py
Expand Up @@ -3,7 +3,6 @@
Methods called by the frontend analysis widget
"""
from __future__ import division
import numpy as np

from django.db import connection
from django.http import JsonResponse
Expand Down Expand Up @@ -77,25 +76,27 @@ def get_skeleton_synapses(request, project_id=None):
if ssw_id is None:
pssw = get_most_recent_project_SS_workflow(project_id)
else:
pssw = ProjectSynapseSuggestionWorkflow.objects.get(synapse_suggestion_workflow_id=ssw_id, project_id=project_id)
pssw = ProjectSynapseSuggestionWorkflow.objects.get(
synapse_suggestion_workflow_id=ssw_id, project_id=project_id
)

cursor = connection.cursor()

# todo: why is this casting necessary? unit tests produced strings
cursor.execute('''
SELECT
SELECT
ss_so_synapse_object_id, array_agg(tn_id), tn_skeleton_id,
cast(round(avg(that_ss_xs_centroid)) as int), cast(round(avg(that_ss_ys_centroid)) as int),
cast(round(avg(that_ss_xs_centroid)) as int), cast(round(avg(that_ss_ys_centroid)) as int),
cast(round(avg(tile_z_tile_idx)) as int), array_agg(tile_z_tile_idx),
sum(that_ss_size_px), sum(ss_tn_contact_px), avg(that_ss_uncertainty)
FROM (
SELECT DISTINCT ON (that_ss.id)
SELECT DISTINCT ON (that_ss.id)
ss_so.synapse_object_id, tn.id, tn.skeleton_id,
that_ss.xs_centroid, that_ss.ys_centroid, tile.z_tile_idx,
that_ss.size_px, ss_tn.contact_px, that_ss.uncertainty
FROM treenode tn
INNER JOIN synapse_slice_treenode ss_tn
ON tn.id = ss_tn.treenode_id
ON tn.id = ss_tn.treenode_id
INNER JOIN synapse_slice this_ss
ON ss_tn.synapse_slice_id = this_ss.id
INNER JOIN synapse_slice_synapse_object ss_so
Expand Down Expand Up @@ -223,9 +224,9 @@ def _get_intersecting_connectors_edge(cursor=None, **kwargs):
SELECT subq.syn_id, subq.c_id, subq.c_x, subq.c_y, subq.c_z, subq.c_conf, subq.c_user,
array_agg(tn.skeleton_id), array_agg(tn.id), subq.min_dist
FROM (
SELECT
ss_so.synapse_object_id,
c.id, c.location_x, c.location_y, c.location_z, c.confidence, c.user_id,
SELECT
ss_so.synapse_object_id,
c.id, c.location_x, c.location_y, c.location_z, c.confidence, c.user_id,
min(ST_Distance(tce.edge, ss_trans.geom_2d))
FROM synapse_slice_synapse_object ss_so
INNER JOIN unnest(%(obj_ids)s::BIGINT[]) AS syns (id)
Expand Down Expand Up @@ -271,8 +272,8 @@ def _get_intersecting_connectors_node(cursor=None, **kwargs):
array_agg(tn.skeleton_id), array_agg(tn.id), subq.min_dist
FROM (
SELECT
ss_so.synapse_object_id,
c.id, c.location_x, c.location_y, c.location_z, c.confidence, c.user_id,
ss_so.synapse_object_id,
c.id, c.location_x, c.location_y, c.location_z, c.confidence, c.user_id,
min(ST_Distance(ST_MakePoint(c.location_x, c.location_y), ss_trans.geom_2d))
FROM synapse_slice_synapse_object ss_so
INNER JOIN unnest(%(obj_ids)s::BIGINT[]) AS syns (id)
Expand All @@ -288,7 +289,8 @@ def _get_intersecting_connectors_node(cursor=None, **kwargs):
ON ss_trans.synapse_detection_tile_id = tile.id
INNER JOIN connector c
ON (tile.z_tile_idx + %(offset_zs)s) * %(resolution_z)s = c.location_z
AND ST_DWithin(ST_MakePoint(c.location_x, c.location_y), ss_trans.geom_2d, %(tolerance)s) -- better way to handle geom?
-- better way to handle geom?
AND ST_DWithin(ST_MakePoint(c.location_x, c.location_y), ss_trans.geom_2d, %(tolerance)s)
AND c.project_id = %(project_id)s
GROUP BY ss_so.synapse_object_id, c.id
) AS subq (syn_id, c_id, c_x, c_y, c_z, c_conf, c_user, min_dist)
Expand Down Expand Up @@ -346,7 +348,7 @@ def get_synapse_extents(request, project_id=None):
min(ST_YMin(ss.geom_2d)) - %(xy_pad)s, max(ST_YMax(ss.geom_2d)) + %(xy_pad)s,
min(tile.z_tile_idx) - %(z_pad)s, max(tile.z_tile_idx) + %(z_pad)s
FROM synapse_slice_synapse_object ss_so
INNER JOIN synapse_slice ss
INNER JOIN synapse_slice ss
ON ss_so.synapse_slice_id = ss.id
INNER JOIN synapse_detection_tile tile
ON ss.synapse_detection_tile_id = tile.id
Expand Down Expand Up @@ -388,7 +390,7 @@ def get_partners(request, project_id=None):
cursor = connection.cursor()

cursor.execute('''
SELECT
SELECT
ss_so.synapse_object_id, array_agg(tn.id), tn.skeleton_id, sum(ss_tn.contact_px)
FROM synapse_slice_synapse_object ss_so
INNER JOIN unnest(%(syns)s::bigint[]) AS syns(id)
Expand Down
13 changes: 7 additions & 6 deletions synapsesuggestor/control/common.py
Expand Up @@ -36,7 +36,8 @@ def list_into_query(query, arg_lst, fmt='%s'):
Args:
query(str): A string with a single {} in it
arg_lst(list): List of arguments to supply to SQL
fmt(str, optional): Placeholder to use for each element (e.g. use this to wrap stuff in brackets), or to account for tuples (Default value = '%s')
fmt(str, optional): Placeholder to use for each element (e.g. use this to wrap stuff in brackets), or to account
for tuples (Default value = '%s')
Returns:
(str, tuple): The two arguments to pass to cursor.execute
Expand Down Expand Up @@ -74,8 +75,8 @@ def list_into_query_multi(query, fmt=None, **kwargs):
--------
>>> query = "INSERT INTO table_name1 (a, b) VALUES ({first}); INSERT INTO table_name2 (a, b) VALUES ({second});"
>>> list_into_query_multi(query, fmt={'second': '(%s, %s)'}, first=[1, 2, 3], second=[[1,2], [3,4]])
>>> ('INSERT INTO table_name1 (a, b) VALUES (%s, %s, %s); INSERT INTO table_name2 (a, b) VALUES ((%s, %s), (%s, %s));',
>>> [1, 2, 3, 1, 2, 3, 4])
('INSERT INTO table_name1 (a, b) VALUES (%s, %s, %s); INSERT INTO table_name2 (a, b) VALUES ((%s, %s), (%s, %s));',
[1, 2, 3, 1, 2, 3, 4])
"""
if fmt is None:
fmt = dict()
Expand Down Expand Up @@ -112,9 +113,9 @@ def get_translation_resolution(project_id, ssw_id, cursor=None):
cursor = connection.cursor()

cursor.execute('''
SELECT
(ps.translation).x, (ps.translation).y, (ps.translation).z,
(stack.resolution).x, (stack.resolution).y, (stack.resolution).z
SELECT
(ps.translation).x, (ps.translation).y, (ps.translation).z,
(stack.resolution).x, (stack.resolution).y, (stack.resolution).z
FROM synapse_suggestion_workflow ssw
INNER JOIN synapse_detection_tiling tiling
ON ssw.synapse_detection_tiling_id = tiling.id
Expand Down
2 changes: 1 addition & 1 deletion synapsesuggestor/control/synapse_detection.py
Expand Up @@ -11,7 +11,7 @@
from django.http import JsonResponse
from rest_framework.decorators import api_view

from catmaid.control.authentication import requires_user_role
# from catmaid.control.authentication import requires_user_role
from catmaid.control.common import get_request_list
from synapsesuggestor.control.common import list_into_query
from synapsesuggestor.models import SynapseDetectionTile, SynapseObject
Expand Down
23 changes: 12 additions & 11 deletions synapsesuggestor/control/treenode_association.py
Expand Up @@ -9,15 +9,17 @@
from django.http import JsonResponse
from rest_framework.decorators import api_view

from catmaid.control.authentication import requires_user_role
# from catmaid.control.authentication import requires_user_role
from catmaid.control.common import get_request_list
from synapsesuggestor.models import ProjectSynapseSuggestionWorkflow
from synapsesuggestor.control.common import list_into_query, get_most_recent_project_SS_workflow, \
get_project_SS_workflow, get_translation_resolution
from synapsesuggestor.control.common import (
list_into_query, get_most_recent_project_SS_workflow, get_translation_resolution, # get_project_SS_workflow
)


logger = logging.getLogger(__name__)


@api_view(['POST'])
def add_treenode_synapse_associations(request, project_id=None):
"""
Expand Down Expand Up @@ -142,11 +144,11 @@ def get_synapse_objects_info(translation, resolution, synapse_object_ids, cursor

cursor.execute('''
SELECT combined.so_id, combined.ss_ids, combined.comb_geom
FROM (SELECT
three_d.so_id AS so_id,
array_agg(three_d.ss_id) AS ss_ids,
FROM (SELECT
three_d.so_id AS so_id,
array_agg(three_d.ss_id) AS ss_ids,
ST_3DUnion(three_d.ss_geom3d) AS comb_geom
FROM (SELECT
FROM (SELECT
ss_so.synapse_object_id AS so_id,
ss.id AS ss_id,
ST_Extrude(
Expand All @@ -165,8 +167,7 @@ def get_synapse_objects_info(translation, resolution, synapse_object_ids, cursor
ON ss.synapse_detection_tile_id = tile.id
WHERE ss_so.id = ANY(%s)) three_d
GROUP BY three_d.so_id) combined
''',
(
''', (
resolution[0], resolution[1],
translation[0], translation[1], translation[2], resolution[2],
synapse_object_ids
Expand Down Expand Up @@ -217,7 +218,7 @@ def get_synapse_slices_near_skeletons(request, project_id=None):
if dimensions == 2:

cursor.execute('''
SELECT tn.skeleton_id, tn.id, ss_so2.synapse_object_id, array_agg(DISTINCT ss2.id),
SELECT tn.skeleton_id, tn.id, ss_so2.synapse_object_id, array_agg(DISTINCT ss2.id),
tile2.z_tile_idx, ARRAY[
ST_XMin(ST_Extent(ss2.geom_2d)),
ST_YMin(ST_Extent(ss2.geom_2d)),
Expand All @@ -231,7 +232,7 @@ def get_synapse_slices_near_skeletons(request, project_id=None):
ON tile1.z_tile_idx = (tn.location_z / %s) - %s
AND ST_DWithin(
ST_MakePoint((tn.location_x / %s) - %s, (tn.location_y / %s) - %s),
ss1.geom_2d,
ss1.geom_2d,
(%s / %s) - %s
)
INNER JOIN synapse_slice_synapse_object ss_so1
Expand Down
6 changes: 4 additions & 2 deletions synapsesuggestor/control/workflow.py
Expand Up @@ -7,7 +7,7 @@
from django.db import connection
from rest_framework.decorators import api_view

from catmaid.control.authentication import requires_user_role
# from catmaid.control.authentication import requires_user_role
from synapsesuggestor.models import (
SynapseSuggestionWorkflow, ProjectSynapseSuggestionWorkflow, SynapseDetectionAlgorithm,
SynapseAssociationAlgorithm, SynapseDetectionTiling
Expand Down Expand Up @@ -130,7 +130,9 @@ def _get_valid_workflows(project_id, stack_id):
#
# for row in info:
# detection_algos.add((row['detection_algo_hash'], row['detection_algo_date'], row['detection_algo_notes']))
# association_algos.add((row['association_algo_hash'], row['association_algo_date'], row['association_algo_notes']))
# association_algos.add(
# (row['association_algo_hash'], row['association_algo_date'], row['association_algo_notes'])
# )
# valid_pairs.add((row['detection_algo_hash'], row['association_algo_hash']))
#
# return JsonResponse({
Expand Down
1 change: 1 addition & 0 deletions synapsesuggestor/management/commands/drop_ss_tables.py
Expand Up @@ -6,6 +6,7 @@

from psycopg2.extensions import AsIs


class Command(BaseCommand):
help = 'Closes the specified poll for voting'

Expand Down
53 changes: 40 additions & 13 deletions synapsesuggestor/migrations/0001_initial.py
Expand Up @@ -102,7 +102,9 @@ class Migration(migrations.Migration):
('xs_centroid', models.IntegerField(verbose_name='x coord of centroid in stack coordinates')),
('ys_centroid', models.IntegerField(verbose_name='y coord of centroid in stack coordinates')),
('uncertainty', models.FloatField(null=True)),
('synapse_detection_tile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseDetectionTile')),
('synapse_detection_tile', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseDetectionTile'
)),
],
options={
'db_table': 'synapse_slice',
Expand All @@ -112,8 +114,12 @@ class Migration(migrations.Migration):
name='SynapseSliceSynapseObject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('synapse_object', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseObject')),
('synapse_slice', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseSlice')),
('synapse_object', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseObject'
)),
('synapse_slice', models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseSlice'
)),
],
options={
'db_table': 'synapse_slice_synapse_object',
Expand All @@ -123,10 +129,18 @@ class Migration(migrations.Migration):
name='SynapseSliceTreenode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contact_px', models.IntegerField(verbose_name='Size in pixels of 1D contact area between neuron and synapse')),
('project_synapse_suggestion_workflow', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.ProjectSynapseSuggestionWorkflow')),
('synapse_slice', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseSlice')),
('treenode', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='catmaid.Treenode')),
('contact_px', models.IntegerField(
verbose_name='Size in pixels of 1D contact area between neuron and synapse'
)),
('project_synapse_suggestion_workflow', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.ProjectSynapseSuggestionWorkflow'
)),
('synapse_slice', models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseSlice'
)),
('treenode', models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to='catmaid.Treenode'
)),
],
options={
'db_table': 'synapse_slice_treenode',
Expand All @@ -136,9 +150,16 @@ class Migration(migrations.Migration):
name='SynapseSuggestionWorkflow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('synapse_detection_algorithm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseDetectionAlgorithm')),
('synapse_detection_tiling', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseDetectionTiling')),
('synapse_image_store', models.OneToOneField(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseImageStore')),
('synapse_detection_algorithm', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseDetectionAlgorithm'
)),
('synapse_detection_tiling', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseDetectionTiling'
)),
('synapse_image_store', models.OneToOneField(
default=None, null=True, on_delete=django.db.models.deletion.CASCADE,
to='synapsesuggestor.SynapseImageStore'
)),
],
options={
'db_table': 'synapse_suggestion_workflow',
Expand All @@ -147,17 +168,23 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='synapsedetectiontile',
name='synapse_suggestion_workflow',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseSuggestionWorkflow'),
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseSuggestionWorkflow'
),
),
migrations.AddField(
model_name='projectsynapsesuggestionworkflow',
name='synapse_association_algorithm',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseAssociationAlgorithm'),
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseAssociationAlgorithm'
),
),
migrations.AddField(
model_name='projectsynapsesuggestionworkflow',
name='synapse_suggestion_workflow',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseSuggestionWorkflow'),
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to='synapsesuggestor.SynapseSuggestionWorkflow'
),
),
migrations.AlterUniqueTogether(
name='synapsesuggestionworkflow',
Expand Down
2 changes: 1 addition & 1 deletion synapsesuggestor/models.py
Expand Up @@ -5,7 +5,7 @@
from django.contrib.gis.db import models as spatial_models
from django.utils.encoding import python_2_unicode_compatible

from catmaid.models import Treenode, Stack, StackMirror, Project
from catmaid.models import Treenode, Stack, Project


@python_2_unicode_compatible
Expand Down
Expand Up @@ -659,7 +659,7 @@
* @param stop
* @param count
* @param round
* @return {[*]}
* @return {Array.<number>}
*/
const linspace = function(start, stop, count, round) {
const out = [start];
Expand Down
4 changes: 1 addition & 3 deletions synapsesuggestor/tests/apis/test_analysis.py
Expand Up @@ -93,7 +93,7 @@ def create_treenode_connector(
confidence=confidence
)

created_links = create_connector_link(
create_connector_link(
project_id, self.test_user.id, new_treenode.treenode_id, new_treenode.skeleton_id,
[[new_connector.id, relation.id, confidence]]
)
Expand All @@ -116,8 +116,6 @@ def get_intersecting_connectors(self, obj_ids=None, workflow_id=None):
self.assertEqual(response.status_code, 200)
parsed_response = json.loads(response.content.decode('utf-8'))

print(parsed_response)

if parsed_response['data']:
self.assertEqual(len(parsed_response['columns']), len(parsed_response['data'][0]))

Expand Down
6 changes: 4 additions & 2 deletions synapsesuggestor/tests/apis/test_synapse_detection.py
Expand Up @@ -304,8 +304,10 @@ def test_agglomerate_synapse_slices_two_objects(self):
new_ids = self.insert_synapses(0, (0, 1), height=2)
parsed_response = self.agglomerate_synapses(new_ids)

self.assertEqual(len(parsed_response['slice_object_mappings']), 4) # all 4 syn slices are merged
self.assertEqual(len(set(parsed_response['slice_object_mappings'].values())), 1) # they all map to the same synapse object
# check all 4 syn slices are merged
self.assertEqual(len(parsed_response['slice_object_mappings']), 4)
# check they all map to the same synapse object
self.assertEqual(len(set(parsed_response['slice_object_mappings'].values())), 1)

# test old mapping was cleared up
mappings = SynapseSliceSynapseObject.objects.all()
Expand Down
6 changes: 4 additions & 2 deletions synapsesuggestor/tests/apis/test_treenode_association.py
Expand Up @@ -8,8 +8,10 @@

URL_PREFIX = '/ext/synapsesuggestor/treenode-association'

SYN_SLICE_NEAR_SKEL_COLS = ['skeleton_id', 'treenode_id', 'synapse_object_id', 'synapse_slice_ids', 'synapse_z_s',
'synapse_bounds_s']
SYN_SLICE_NEAR_SKEL_COLS = [
'skeleton_id', 'treenode_id', 'synapse_object_id', 'synapse_slice_ids', 'synapse_z_s', 'synapse_bounds_s'
]


def stack_to_project(translation, resolution, coords_s):
"""Convert a dictionary of stack coordinates into a dictionary of project coordinates"""
Expand Down

0 comments on commit 2fa6357

Please sign in to comment.