Skip to content

Commit

Permalink
Merge branch 'master' of github.com:alleninstitute/aisynphys
Browse files Browse the repository at this point in the history
  • Loading branch information
campagnola committed Jan 27, 2023
2 parents 05f9a04 + 2a3da1c commit b5cb279
Show file tree
Hide file tree
Showing 8 changed files with 74 additions and 53 deletions.
10 changes: 0 additions & 10 deletions README

This file was deleted.

26 changes: 26 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
Allen Institute Synaptic Physiology
-----------------------------------

This package provides data access and analysis tools for the Allen Institute Synaptic Physiology datasets.

**Main website:** http://portal.brain-map.org/explore/connectivity/synaptic-physiology

**Quick start tutorial:** https://mybinder.org/v2/gh/AllenInstitute/aisynphys/binder?filepath=doc/tutorial.ipynb

**Documentation:** https://aisynphys.readthedocs.io/en/latest/

**Community Forum:** https://community.brain-map.org/tag/synaptic-physiology (use the `synaptic-physiology` tag when posting about aisynphys or the dataset)


License
-------

This code is released under the Allen Institute Software License (2-clause BSD license
plus a third clause that prohibits redistribution for commercial purposes without further permission). See the LICENSE file for more information.

Level of Support
----------------

This package is released to enable public users to access and analyze the Synaptic Physiology dataset. We welcome requests for support at the
[Community Forum](https://community.brain-map.org/tag/synaptic-physiology), as well as issues or pull requests on this repository.
However, please be patient with us as we may not be able to repond promptly.
5 changes: 4 additions & 1 deletion aisynphys/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,10 @@
n_headstages = 8
rig_data_paths = {}
known_addrs = {}
pipeline = {}
pipeline = {
'disable_modules': []
}
downloads_url = 'https://raw.githubusercontent.com/AllenInstitute/aisynphys/download_urls/download_urls'

configfile = os.path.join(os.path.dirname(__file__), '..', 'config.yml')

Expand Down
72 changes: 36 additions & 36 deletions aisynphys/dynamics.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ def sorted_pulse_responses(pr_recs):
sorted_recs : dict
Nested dictionary structure with keys [(clamp_mode, ind_freq, recovery_delay)][recording][pulse_number]::
{('ic', 50.0, 250e-3): {
rec1: {1:pr, 2:pr, 3:pr, ...},
rec2: {1:pr, 2:pr, 3:pr, ...},
rec1: {0:pr, 1:pr, 2:pr, ...},
rec2: {0:pr, 1:pr, 2:pr, ...},
...},
...}
Expand All @@ -26,7 +26,7 @@ def sorted_pulse_responses(pr_recs):
pr_recs = pulse_response_query(pair).all()
sorted = sorted_pulse_responses(pr_recs)
prs = [d[1] for d in sorted['ic', 50, 250e-3].values()]
prs = [d[0] for d in sorted['ic', 50, 250e-3].values()]
"""

# group records by (clamp mode, ind_freq, rec_delay), recording, and then by pulse number
Expand Down Expand Up @@ -139,7 +139,7 @@ def generate_pair_dynamics(pair, db, session, pr_recs=None):
}

paired_pulse_ratio = []
# caclulate dynamics for all frequencie and recovery delays
# caclulate dynamics for all frequencies and recovery delays
# [(clamp_mode, ind_freq, recovery_delay), {'stp_induction':(mean, std, n),
# 'stp_initial': (mean, std, n), 'stp_recovery': (mean, std, n)}), ...]
all_metrics = []
Expand Down Expand Up @@ -169,44 +169,44 @@ def generate_pair_dynamics(pair, db, session, pr_recs=None):
amps = {k:getattr(r.PulseResponseFit, amp_field) for k,r in pulses.items()}

# calculate metrics if the proper conditions are met
if 1 in pulses:
if 0 in pulses:
if ind_freq == 50:
col_metrics['pulse_amp_first_50hz'].append(amps[1])
if 1 in pulses and 2 in pulses:
initial = (amps[2] - amps[1]) / amp_90p
col_metrics['pulse_amp_first_50hz'].append(amps[0])
if 0 in pulses and 1 in pulses:
initial = (amps[1] - amps[0]) / amp_90p
collect_initial.append(initial)
# we separate out 50Hz into its own column because the induction frequency spans
# multiple recovery delays
if ind_freq == 50:
col_metrics['stp_initial_50hz'].append(initial)
col_metrics['pulse_amp_stp_initial_50hz'].append(amps[2])
if amps[1] != 0:
paired_pulse_ratio.append(amps[2] / amps[1])
if all([k in pulses for k in [1,6,7,8]]):
induction_amp = np.median([amps[6], amps[7], amps[8]])
induction = (induction_amp - amps[1]) / amp_90p
col_metrics['pulse_amp_stp_initial_50hz'].append(amps[1])
if amps[0] != 0:
paired_pulse_ratio.append(amps[1] / amps[0])
if all([k in pulses for k in [0,5,6,7]]):
induction_amp = np.median([amps[5], amps[6], amps[7]])
induction = (induction_amp - amps[0]) / amp_90p
collect_induction.append(induction)
if ind_freq == 50:
col_metrics['stp_induction_50hz'].append(induction)
col_metrics['pulse_amp_stp_induction_50hz'].append(induction_amp)
if delay is not None and all([k in pulses for k in range(1,13)]):
recovery_amp = np.median([amps[i] for i in range(9,13)])
r = [amps[i+8] - amps[i] for i in range(1,5)]
if delay is not None and all([k in pulses for k in range(0,12)]):
recovery_amp = np.median([amps[i] for i in range(8,12)])
r = [amps[i+8] - amps[i] for i in range(0,4)]
recovery = np.median(r) / amp_90p
collect_recovery.append(recovery)
if delay == 250e-3:
col_metrics['stp_recovery_250ms'].append(recovery)
col_metrics['pulse_amp_stp_recovery_250ms'].append(recovery_amp)
if delay is not None and all([k in pulses for k in range(1,10)]):
r = amps[9] - amps[1]
if delay is not None and all([k in pulses for k in range(0,9)]):
r = amps[8] - amps[0]
recovery = np.median(r) / amp_90p
collect_recovery_single.append(recovery)
if delay == 250e-3:
col_metrics['stp_recovery_single_250ms'].append(recovery)
col_metrics['pulse_amp_stp_recovery_single_250ms'].append(amps[9])
col_metrics['pulse_amp_stp_recovery_single_250ms'].append(amps[8])

# collect individual pulse amplitudes
for i in range(1, 13):
for i in range(0, 12):
if i not in pulses:
break
collect_pulse_amps[i-1].append(amps[i])
Expand Down Expand Up @@ -259,8 +259,8 @@ def variability(x):

# Variability in STP-induced state (5th-8th pulses)
pulse_amps = {
(2,3): [],
(5,9): [],
(1,2): [],
(4,8): [],
}
# collect pulse amplitudes in each category
for key,recs in sorted_prs.items():
Expand All @@ -278,35 +278,35 @@ def variability(x):
pulse_var = {n:(variability(a) if len(a) > 0 else np.nan) for n,a in pulse_amps.items()}

# record changes in vairabilty
dynamics.variability_second_pulse_50hz = pulse_var[2,3]
dynamics.variability_stp_induced_state_50hz = pulse_var[5,9]
dynamics.variability_change_initial_50hz = pulse_var[2,3] - dynamics.variability_resting_state
dynamics.variability_change_induction_50hz = pulse_var[5,9] - dynamics.variability_resting_state
dynamics.variability_second_pulse_50hz = pulse_var[1,2]
dynamics.variability_stp_induced_state_50hz = pulse_var[4,8]
dynamics.variability_change_initial_50hz = pulse_var[1,2] - dynamics.variability_resting_state
dynamics.variability_change_induction_50hz = pulse_var[4,8] - dynamics.variability_resting_state

# Look for evidence of vesicle depletion -- correlations between adjacent events in 50Hz pulses 5-8.
amps_1_2 = ([], [])
amps_2_4 = ([], [])
amps_4_8 = ([], [])
pulse_amps = [[] for i in range(9)]
pulse_amps = [[] for i in range(8)]
for key,recs in sorted_prs.items():
clamp_mode, ind_freq, rec_delay = key
if ind_freq != 50:
continue
for recording, pulses in recs.items():
for i in range(1,9):
for i in range(0,8):
if i not in pulses:
break
if i == 2:
if i == 1:
amps_1_2[0].append(getattr(pulses[i-1].PulseResponseFit, amp_field))
amps_1_2[1].append(getattr(pulses[i].PulseResponseFit, amp_field))
if i == 4:
first = np.median([getattr(pulses[i].PulseResponseFit, amp_field) for i in range(1, 3)])
second = np.median([getattr(pulses[i].PulseResponseFit, amp_field) for i in range(3, 5)])
if i == 3:
first = np.median([getattr(pulses[i].PulseResponseFit, amp_field) for i in range(0, 2)])
second = np.median([getattr(pulses[i].PulseResponseFit, amp_field) for i in range(2, 4)])
amps_2_4[0].append(np.median(first))
amps_2_4[1].append(np.median(second))
if i == 8:
first = np.median([getattr(pulses[i].PulseResponseFit, amp_field) for i in range(1, 5)])
second = np.median([getattr(pulses[i].PulseResponseFit, amp_field) for i in range(5, 9)])
if i == 7:
first = np.median([getattr(pulses[i].PulseResponseFit, amp_field) for i in range(0, 4)])
second = np.median([getattr(pulses[i].PulseResponseFit, amp_field) for i in range(4, 8)])
amps_4_8[0].append(np.median(first))
amps_4_8[1].append(np.median(second))

Expand Down
2 changes: 1 addition & 1 deletion aisynphys/pipeline/multipatch/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def create_db_entries(cls, job, session):
all_pulse_entries[rec.device_id] = pulse_entries

for i,pulse in enumerate(pulses):
# Record information about all pulses, including test pulse.
# Record information about all stim pulses, excluding test pulse.
t0, t1 = pulse.meta['pulse_edges']
resampled = pulse['primary'].resample(sample_rate=db.default_sample_rate)
clock_time = t0 + datetime_to_timestamp(rec_entry.start_time)
Expand Down
4 changes: 3 additions & 1 deletion aisynphys/pipeline/multipatch/pulse_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ def create_db_entries(cls, job, session):

prs = [rec.PulseResponse for rec in rq.all()]
print("%s: got %d pulse responses" % (expt_id, len(prs)))
pairs = list(set([pr.pair for pr in prs]))
# print(f'Pulses responses selected from pairs: {pairs}')

# best estimate of response amplitude using known latency for this synapse
fits = 0
Expand Down Expand Up @@ -135,7 +137,7 @@ def pulse_response_query(expt_id, db, session):
.join(db.SyncRec, db.Recording.sync_rec)
.join(db.Experiment, db.SyncRec.experiment)
.join(db.Pair, db.PulseResponse.pair)
.join(db.Synapse, db.Pair.synapse)
.outerjoin(db.Synapse, db.Pair.synapse)
)

rq = rq.filter(db.Experiment.ext_id==expt_id)
Expand Down
6 changes: 3 additions & 3 deletions aisynphys/ui/notebook.py
Original file line number Diff line number Diff line change
Expand Up @@ -771,7 +771,7 @@ def plot_stim_sorted_pulse_amp(pair, ax, db, ind_f=50, avg_line=False, avg_trace
except KeyError:
print('No fit amps for pair: %s' % pair)
ax.set_ylim(0, filtered['dec_fit_reconv_amp'].max())
ax.set_xlim(0, 13)
ax.set_xlim(0, 12)

scatter_opts = {'color': (0.7, 0.7, 0.7, ), 'size': 3}
scatter_opts.update(scatter_args)
Expand All @@ -790,14 +790,14 @@ def plot_stim_sorted_pulse_amp(pair, ax, db, ind_f=50, avg_line=False, avg_trace
ax.plot(range(8,12), pulse_means[8:12], **line_opts)
# plot avg trace for each pulse number
if avg_trace:
for pulse_number in np.arange(1,13):
for pulse_number in np.arange(0,12):
pulse_ids = filtered[filtered['pulse_number']==pulse_number]['id'].to_list()
prs = db.query(db.PulseResponse).filter(db.PulseResponse.id.in_(pulse_ids))
pr_list = PulseResponseList(prs)
post_trace = pr_list.post_tseries(align='spike', bsub=True, bsub_win=1e-3)
trace_mean = post_trace.mean()*1e3
trace_slice = trace_mean.time_slice(-1e-3, 8e-3)
ax.plot(trace_slice.time_values*1e2 + (pulse_number-1.4), abs(trace_slice.data), **line_opts)
ax.plot(trace_slice.time_values*1e2 + (pulse_number-0.4), abs(trace_slice.data), **line_opts)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)

Expand Down
2 changes: 1 addition & 1 deletion desktop-environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@ dependencies:
- pip:
- lmfit
- git+https://github.com/pyqtgraph/pyqtgraph
# - git+https://github.com/alleninstitute/neuroanalysis
- git+https://github.com/alleninstitute/neuroanalysis

0 comments on commit b5cb279

Please sign in to comment.