From 5112f05c9906bd6c98841f0f7a8e90e7efb8341d Mon Sep 17 00:00:00 2001 From: CBroz1 Date: Mon, 20 Feb 2023 16:49:45 -0600 Subject: [PATCH 01/58] Migrate from previous repo --- .github/ISSUE_TEMPLATE/bug_report.md | 39 + .github/ISSUE_TEMPLATE/config.yml | 5 + .github/ISSUE_TEMPLATE/feature_request.md | 57 + .../u24_workflow_before_release.yaml | 18 + .../workflows/u24_workflow_release_call.yaml | 20 + .../u24_workflow_tag_to_release.yaml | 15 + .gitignore | 122 ++ .markdownlint.yaml | 16 + .pre-commit-config.yaml | 58 + CHANGELOG.md | 10 + CODE_OF_CONDUCT.md | 132 ++ CONTRIBUTING.md | 5 + README.md | 12 + cspell.json | 32 + docker/Dockerfile.test | 30 + docker/apt_requirements.txt | 2 + docker/docker-compose.yaml | 63 + docker/setup.sh | 37 + notebooks/01-configure.ipynb | 151 ++ .../02-workflow-structure-optional.ipynb | 646 ++++++ notebooks/03-process.ipynb | 1792 +++++++++++++++++ notebooks/06-drop-optional.ipynb | 96 + notebooks/py_scripts/01-configure.py | 82 + .../02-workflow-structure-optional.py | 161 ++ notebooks/py_scripts/03-process.py | 287 +++ notebooks/py_scripts/06-drop-optional.py | 51 + requirements.txt | 9 + requirements_dev.txt | 2 + setup.py | 39 + tests/__init__.py | 5 + tests/conftest.py | 268 +++ tests/test_ingest.py | 26 + tests/test_pipeline_generation.py | 28 + user_data/behavior_recordings.csv | 6 + user_data/blocks.csv | 9 + user_data/events.csv | 38 + user_data/opto_events.csv | 3 + user_data/opto_sessions.csv | 2 + user_data/opto_surgeries.csv | 2 + user_data/opto_waveforms.csv | 2 + user_data/sessions.csv | 2 + user_data/subjects.csv | 2 + user_data/trials.csv | 41 + workflow_optogenetics/__init__.py | 6 + workflow_optogenetics/ingest.py | 187 ++ workflow_optogenetics/paths.py | 8 + workflow_optogenetics/pipeline.py | 48 + workflow_optogenetics/reference.py | 26 + workflow_optogenetics/version.py | 5 + 49 files changed, 4703 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/workflows/u24_workflow_before_release.yaml create mode 100644 .github/workflows/u24_workflow_release_call.yaml create mode 100644 .github/workflows/u24_workflow_tag_to_release.yaml create mode 100644 .gitignore create mode 100644 .markdownlint.yaml create mode 100644 .pre-commit-config.yaml create mode 100644 CHANGELOG.md create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md create mode 100644 README.md create mode 100644 cspell.json create mode 100755 docker/Dockerfile.test create mode 100755 docker/apt_requirements.txt create mode 100644 docker/docker-compose.yaml create mode 100644 docker/setup.sh create mode 100644 notebooks/01-configure.ipynb create mode 100644 notebooks/02-workflow-structure-optional.ipynb create mode 100644 notebooks/03-process.ipynb create mode 100644 notebooks/06-drop-optional.ipynb create mode 100644 notebooks/py_scripts/01-configure.py create mode 100644 notebooks/py_scripts/02-workflow-structure-optional.py create mode 100644 notebooks/py_scripts/03-process.py create mode 100644 notebooks/py_scripts/06-drop-optional.py create mode 100644 requirements.txt create mode 100644 requirements_dev.txt create mode 100644 setup.py create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_ingest.py create mode 100644 tests/test_pipeline_generation.py create mode 100644 user_data/behavior_recordings.csv create mode 100644 user_data/blocks.csv create mode 100644 user_data/events.csv create mode 100644 user_data/opto_events.csv create mode 100644 user_data/opto_sessions.csv create mode 100644 user_data/opto_surgeries.csv create mode 100644 user_data/opto_waveforms.csv create mode 100644 user_data/sessions.csv create mode 100644 user_data/subjects.csv create mode 100644 user_data/trials.csv create mode 100644 workflow_optogenetics/__init__.py create mode 100644 workflow_optogenetics/ingest.py create mode 100644 workflow_optogenetics/paths.py create mode 100644 workflow_optogenetics/pipeline.py create mode 100644 workflow_optogenetics/reference.py create mode 100644 workflow_optogenetics/version.py diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..31fe9fc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,39 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: 'bug' +assignees: '' + +--- + +## Bug Report + +### Description + +A clear and concise description of what is the overall operation that is intended to be +performed that resulted in an error. + +### Reproducibility +Include: +- OS (WIN | MACOS | Linux) +- DataJoint Element Version +- MySQL Version +- MySQL Deployment Strategy (local-native | local-docker | remote) +- Minimum number of steps to reliably reproduce the issue +- Complete error stack as a result of evaluating the above steps + +### Expected Behavior +A clear and concise description of what you expected to happen. + +### Screenshots +If applicable, add screenshots to help explain your problem. + +### Additional Research and Context +Add any additional research or context that was conducted in creating this report. + +For example: +- Related GitHub issues and PR's either within this repository or in other relevant + repositories. +- Specific links to specific lines or a focus within source code. +- Relevant summary of Maintainers development meetings, milestones, projects, etc. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..d31fbac --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: DataJoint Contribution Guideline + url: https://docs.datajoint.org/python/community/02-Contribute.html + about: Please make sure to review the DataJoint Contribution Guidelines \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..1f2b784 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,57 @@ +--- +name: Feature request +about: Suggest an idea for a new feature +title: '' +labels: 'enhancement' +assignees: '' + +--- + +## Feature Request + +### Problem + +A clear and concise description how this idea has manifested and the context. Elaborate +on the need for this feature and/or what could be improved. Ex. I'm always frustrated +when [...] + +### Requirements + +A clear and concise description of the requirements to satisfy the new feature. Detail +what you expect from a successful implementation of the feature. Ex. When using this +feature, it should [...] + +### Justification + +Provide the key benefits in making this a supported feature. Ex. Adding support for this +feature would ensure [...] + +### Alternative Considerations + +Do you currently have a work-around for this? Provide any alternative solutions or +features you've considered. + +### Related Errors +Add any errors as a direct result of not exposing this feature. + +Please include steps to reproduce provided errors as follows: +- OS (WIN | MACOS | Linux) +- DataJoint Element Version +- MySQL Version +- MySQL Deployment Strategy (local-native | local-docker | remote) +- Minimum number of steps to reliably reproduce the issue +- Complete error stack as a result of evaluating the above steps + +### Screenshots +If applicable, add screenshots to help explain your feature. + +### Additional Research and Context +Add any additional research or context that was conducted in creating this feature request. + +For example: +- Related GitHub issues and PR's either within this repository or in other relevant + repositories. +- Specific links to specific lines or a focus within source code. +- Relevant summary of Maintainers development meetings, milestones, projects, etc. +- Any additional supplemental web references or links that would further justify this + feature request. diff --git a/.github/workflows/u24_workflow_before_release.yaml b/.github/workflows/u24_workflow_before_release.yaml new file mode 100644 index 0000000..28a5ff5 --- /dev/null +++ b/.github/workflows/u24_workflow_before_release.yaml @@ -0,0 +1,18 @@ +name: u24_workflow_before_release_0.0.1 +on: + pull_request: + push: + branches: + - '**' + tags-ignore: + - '**' + workflow_dispatch: +jobs: + call_context_check: + uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main + call_u24_workflow_build_debian: + uses: dj-sciops/djsciops-cicd/.github/workflows/u24_workflow_build.yaml@main + with: + jhub_ver: 1.4.2 + py_ver: 3.9 + dist: debian diff --git a/.github/workflows/u24_workflow_release_call.yaml b/.github/workflows/u24_workflow_release_call.yaml new file mode 100644 index 0000000..8196673 --- /dev/null +++ b/.github/workflows/u24_workflow_release_call.yaml @@ -0,0 +1,20 @@ +name: u24_workflow_release_call_0.0.1 +on: + workflow_run: + workflows: ["u24_workflow_tag_to_release_0.0.1"] + types: + - completed +jobs: + call_context_check: + uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main + call_u24_workflow_release_debian: + if: >- + github.event.workflow_run.conclusion == 'success' && github.repository_owner == 'datajoint' + uses: dj-sciops/djsciops-cicd/.github/workflows/u24_workflow_release.yaml@main + with: + jhub_ver: 1.4.2 + py_ver: 3.9 + dist: debian + secrets: + REGISTRY_USERNAME: ${{secrets.DOCKER_USERNAME}} + REGISTRY_PASSWORD: ${{secrets.DOCKER_PASSWORD}} diff --git a/.github/workflows/u24_workflow_tag_to_release.yaml b/.github/workflows/u24_workflow_tag_to_release.yaml new file mode 100644 index 0000000..3a6ce58 --- /dev/null +++ b/.github/workflows/u24_workflow_tag_to_release.yaml @@ -0,0 +1,15 @@ +name: u24_workflow_tag_to_release_0.0.1 +on: + push: + tags: + - '*.*.*' + - 'test*.*.*' +jobs: + call_context_check: + uses: dj-sciops/djsciops-cicd/.github/workflows/context_check.yaml@main + call_u24_workflow_build_debian: + uses: dj-sciops/djsciops-cicd/.github/workflows/u24_workflow_build.yaml@main + with: + jhub_ver: 1.4.2 + py_ver: 3.9 + dist: debian diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c99cdf8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,122 @@ +# User data +.DS_Store + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +.idea/ + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy +scratchpaper.* + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# dotenv +./.env + +# virtualenv +.venv +venv/ +ENV/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# datajoint +dj_local_conf*.json +temp* + +# emacs +**/*~ +**/#*# +**/.#* +docker-compose.yml +Diagram.ipynb +*swp + +# docker +.env +tests/user_dat* diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 0000000..7420359 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,16 @@ +# Markdown Linter configuration for docs +# https://github.com/DavidAnson/markdownlint +# https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md +MD009: false # permit trailing spaces +MD007: false # List indenting - permit 4 spaces +MD013: + line_length: "88" # Line length limits + tables: false # disable for tables + headings: false # disable for headings +MD030: false # Number of spaces after a list +MD033: # HTML elements allowed + allowed_elements: + - "br" +MD034: false # Permit bare URLs +MD031: false # Spacing w/code blocks. Conflicts with `??? Note` and code tab styling +MD046: false # Spacing w/code blocks. Conflicts with `??? Note` and code tab styling diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..03788bc --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,58 @@ +default_stages: [commit, push] +exclude: (^.github/|^docs/|^images/|^notebooks/py_scripts/) + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files # prevent giant files from being committed + - id: requirements-txt-fixer + - id: mixed-line-ending + args: ["--fix=lf"] + description: Forces to replace line ending by the UNIX 'lf' character. + + # black + - repo: https://github.com/psf/black + rev: 22.12.0 + hooks: + - id: black + - id: black-jupyter + args: + - --line-length=88 + + # isort + - repo: https://github.com/pycqa/isort + rev: 5.11.2 + hooks: + - id: isort + args: ["--profile", "black"] + description: Sorts imports in an alphabetical order + + # flake8 + - repo: https://github.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + args: # arguments to configure flake8 + # making isort line length compatible with black + - "--max-line-length=88" + - "--max-complexity=18" + - "--select=B,C,E,F,W,T4,B9" + + # these are errors that will be ignored by flake8 + # https://www.flake8rules.com/rules/{code}.html + - "--ignore=E203,E501,W503,W605,E402" + # E203 - Colons should not have any space before them. + # Needed for list indexing + # E501 - Line lengths are recommended to be no greater than 79 characters. + # Needed as we conform to 88 + # W503 - Line breaks should occur after the binary operator. + # Needed because not compatible with black + # W605 - a backslash-character pair that is not a valid escape sequence now + # generates a DeprecationWarning. This will eventually become a SyntaxError. + # Needed because we use \d as an escape sequence + # E402 - Place module level import at the top. + # Needed to prevent circular import error diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..824f14c --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and +[Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. + +## [0.1.0] - 2023-02-20 + ++ Add - Version + +[0.1.0]: https://github.com/datajoint/workflow-optogenetics/releases/tag/0.1.0 diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..0502528 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,132 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or advances of + any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email address, + without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +[Support@DataJoint.com](mailto:support@datajoint.com). +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at +[https://www.contributor-covenant.org/translations][translations]. + +[homepage]: https://www.contributor-covenant.org +[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html +[Mozilla CoC]: https://github.com/mozilla/diversity +[FAQ]: https://www.contributor-covenant.org/faq +[translations]: https://www.contributor-covenant.org/translations diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..e04d170 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,5 @@ +# Contribution Guidelines + +This project follows the +[DataJoint Contribution Guidelines](https://datajoint.com/docs/community/contribute/). +Please reference the link for more full details. diff --git a/README.md b/README.md new file mode 100644 index 0000000..5eb9d33 --- /dev/null +++ b/README.md @@ -0,0 +1,12 @@ +# DataJoint Workflow - Optogenetics + +The DataJoint Workflow for Optogenetics combines multiple DataJoint Elements for +optogenetics research. DataJoint Elements collectively standardize and automate data +collection and analysis for neuroscience experiments. Each Element is a modular +pipeline for data storage and processing with corresponding database tables that can be +combined with other Elements to assemble a fully functional pipeline. + +Installation and usage instructions can be found at the +[Element documentation](datajoint.com/docs/elements/element-optogenetics). + +![element-Optogenetics diagram](https://raw.githubusercontent.com/datajoint/element-Optogenetics/main/images/diagram_opto.svg) diff --git a/cspell.json b/cspell.json new file mode 100644 index 0000000..a01173c --- /dev/null +++ b/cspell.json @@ -0,0 +1,32 @@ +// cSpell Settings +//https://github.com/streetsidesoftware/vscode-spell-checker +{ + "version": "0.2", // Version of the setting file. Always 0.2 + "language": "en", // language - current active spelling language + "enabledLanguageIds": [ + "markdown", "yaml", "python" + ], + // flagWords - list of words to be always considered incorrect + // This is useful for offensive words and common spelling errors. + // For example "hte" should be "the" + "flagWords": [], + "allowCompoundWords": true, + "ignorePaths": [ + "./*.egg-info/*", + "./images/*" + ], + "words": [ + "FUCNTION", + "ipykernel", + "jupytext", + "opto", + "Opto", + "optogenetic", + "Optogenetic", + "optogenetics", + "Optogenetics", + "pytest", + "stim", + "Stim" + ] +} diff --git a/docker/Dockerfile.test b/docker/Dockerfile.test new file mode 100755 index 0000000..33951a0 --- /dev/null +++ b/docker/Dockerfile.test @@ -0,0 +1,30 @@ +FROM datajoint/djbase:py3.9-debian-8eb1715 + +# ARG GITHUB_USERNAME=datajoint # tried moving to ENV +USER anaconda:anaconda + +COPY ./workflow-optogenetics/docker/apt_requirements.txt /tmp/ +RUN /entrypoint.sh echo "Installed dependencies." + +WORKDIR /main/workflow-optogenetics + +# Always get interface/djarchive +RUN pip install --no-deps "element-interface@git+https://github.com/datajoint/element-interface" +RUN pip install --no-deps "djarchive-client@git+https://github.com/datajoint/djarchive-client" + +# Always move local - conditional install in setup.sh +COPY --chown=anaconda:anaconda ./element-lab/ /main/element-lab/ +COPY --chown=anaconda:anaconda ./element-animal/ /main/element-animal/ +COPY --chown=anaconda:anaconda ./element-session/ /main/element-session/ +COPY --chown=anaconda:anaconda ./element-event/ /main/element-event/ +COPY --chown=anaconda:anaconda ./element-interface/ /main/element-interface/ +COPY --chown=anaconda:anaconda ./element-optogenetics/ /main/element-optogenetics/ +COPY --chown=anaconda:anaconda ./workflow-optogenetics/ /main/workflow-optogenetics/ + +# Conditional install - local-all, local-dlc, or git +COPY --chown=anaconda:anaconda ./workflow-optogenetics/docker/setup.sh /main/ +COPY --chown=anaconda:anaconda ./workflow-optogenetics/docker/.env /main/ +RUN chmod 755 /main/setup.sh +RUN chmod 755 /main/.env +RUN /main/setup.sh +RUN rm -f ./dj_local_conf.json diff --git a/docker/apt_requirements.txt b/docker/apt_requirements.txt new file mode 100755 index 0000000..3505bb3 --- /dev/null +++ b/docker/apt_requirements.txt @@ -0,0 +1,2 @@ +git +locales-all diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml new file mode 100644 index 0000000..960b88a --- /dev/null +++ b/docker/docker-compose.yaml @@ -0,0 +1,63 @@ +# .env file. Careful that vscode black does not add spaces around '=' +# COMPOSE_PROJECT_NAME='opto' +# TEST_DATA_DIR= +# GITHUB_USERNAME=datajoint +# INSTALL_OPTION=local-all, local-only, or git +# TEST_CMD="pytest" # pytest --dj-{verbose,teardown} False # options +# # to do nothing, set as "True" +# export COMPOSE_DOCKER_CLI_BUILD=0 # some machines need for smooth --build +# docker-compose --env-file ./docker/.env -f ./docker/docker-compose-test.yaml up --build --force-recreate --detached +# docker exec -it workflow-optogenetics /bin/bash +# docker-compose -f ./docker/docker-compose-test.yaml down --volumes + +version: "2.4" + +services: + db: + networks: + optogenetics: + image: datajoint/mysql:5.7 + environment: + MYSQL_ROOT_PASSWORD: simple + container_name: workflow-optogenetics-db + + workflow: + networks: + optogenetics: + build: + context: ../../ + dockerfile: ./workflow-optogenetics/docker/Dockerfile.test + args: + - GITHUB_USERNAME=${GITHUB_USERNAME} + image: workflow-optogenetics:0.0.0 + container_name: workflow-optogenetics + environment: + - DJ_HOST=db + - DJ_USER=root + - DJ_PASS=simple + - opto_ROOT_DATA_DIR=/main/test_data/ + - DATABASE_PREFIX=test_ + - COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME} + - GITHUB_USERNAME=${GITHUB_USERNAME} + - INSTALL_OPTION=${INSTALL_OPTION} + - TEST_CMD=${TEST_CMD} + command: + - bash + - -c + - | + eval ${TEST_CMD} + tail -f /dev/null + volumes: + - ${TEST_DATA_DIR}:/main/test_data/ + - ../../workflow-optogenetics/docker/apt_requirements.txt:/tmp/apt_requirements.txt + - ../../element-lab:/main/element-lab + - ../../element-animal:/main/element-animal + - ../../element-session:/main/element-session + - ../../element-optogenetics:/main/element-optogenetics + - ../../workflow-optogenetics:/main/workflow-optogenetics + depends_on: + db: + condition: service_healthy + +networks: + optogenetics: diff --git a/docker/setup.sh b/docker/setup.sh new file mode 100644 index 0000000..68ef81c --- /dev/null +++ b/docker/setup.sh @@ -0,0 +1,37 @@ +#! /bin/bash +export $(grep -v '^#' /main/.env | xargs) + +echo "INSALL OPTION:" $INSTALL_OPTION +cd /main/ +# all local installs, mapped from host +if [ "$INSTALL_OPTION" == "local-all" ]; then + for f in lab animal session event optogenetics; do + pip install -e ./element-${f} + done + pip install -e ./workflow-optogenetics +# all except workflow pip installed +else + pip install git+https://github.com/${GITHUB_USERNAME}/element-lab.git + pip install git+https://github.com/${GITHUB_USERNAME}/element-animal.git + pip install git+https://github.com/${GITHUB_USERNAME}/element-session.git + pip install git+https://github.com/${GITHUB_USERNAME}/element-event.git + # only optogenetics items from local install + if [ "$INSTALL_OPTION" == "local-only" ]; then + pip install -e ./element-optogenetics + pip install -e ./workflow-optogenetics + # all from github + elif [ "$INSTALL_OPTION" == "git" ]; then + pip install git+https://github.com/${GITHUB_USERNAME}/element-optogenetics.git + pip install git+https://github.com/${GITHUB_USERNAME}/workflow-optogenetics.git + fi +fi + +# If test cmd contains pytest, install +if [[ "$TEST_CMD" == *pytest* ]]; then + pip install pytest + pip install pytest-cov +fi + +# additional installs for running DLC +pip install torch +pip install ffmpeg diff --git a/notebooks/01-configure.ipynb b/notebooks/01-configure.ipynb new file mode 100644 index 0000000..ebc3f56 --- /dev/null +++ b/notebooks/01-configure.ipynb @@ -0,0 +1,151 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# DataJoint configuration\n", + "\n", + "## Setup - Working Directory\n", + "\n", + "To run the workflow, we need to properly set up the DataJoint configuration. The configuration can be saved in a local directory as `dj_local_conf.json` or at your root directory as a hidden file. This notebook walks you through the setup process.\n", + "\n", + "**The configuration only needs to be set up once**, if you have gone through the configuration before, directly go to [02-workflow-structure](02-workflow-structure-optional.ipynb).\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import datajoint as dj\n", + "\n", + "if os.path.basename(os.getcwd()) == \"notebooks\":\n", + " os.chdir(\"..\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup - Credentials\n", + "\n", + "Now let's set up the host, user and password in the `dj.config` global variable\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "\n", + "dj.config[\"database.host\"] = \"{YOUR_HOST}\" # CodeBook users should omit this\n", + "dj.config[\"database.user\"] = \"{YOUR_USERNAME}\"\n", + "dj.config[\"database.password\"] = getpass.getpass() # enter the password securily" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You should be able to connect to the database at this stage.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dj.conn()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup - `dj.config['custom']`\n", + "\n", + "The major component of the current workflow is Element Optogenetics (see [GitHub repository](https://github.com/datajoint/element-optogenetics) and [documentation](https://datajoint.com/docs/elements/element-optogenetics)). Many Elements require configurations in the field `custom` in `dj.config`:\n", + "\n", + "### Database prefix\n", + "\n", + "Giving a prefix to schemas could help when configuring database privileges. If we set the prefix to `neuro_`, e.g. `neuro_lab`, `neuro_subject`, etc.\n", + "\n", + "The prefix could be configurated to your username in `dj.config` as follows.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "username_as_prefix = dj.config[\"database.user\"] + \"_\"\n", + "dj.config[\"custom\"] = {\"database.prefix\": username_as_prefix}" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Save configuration\n", + "\n", + "We could save this as a file, either as a local json file, or a global file. Local configuration file is saved as `dj_local_conf.json` in current directory, which is great for project-specific settings.\n", + "\n", + "For first-time users, we recommend saving globally. This will create a hidden configuration file in your root directory, which will be loaded whenever there is no local version to override it.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# dj.config.save_local()\n", + "dj.config.save_global()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Next Step\n", + "\n", + "After the configuration, we will be able to run through the workflow with the [02-workflow-structure](02-workflow-structure-optional.ipynb) notebook.\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.9.13 ('ele')", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + }, + "vscode": { + "interpreter": { + "hash": "d00c4ad21a7027bf1726d6ae3a9a6ef39c8838928eca5a3d5f51f3eb68720410" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/notebooks/02-workflow-structure-optional.ipynb b/notebooks/02-workflow-structure-optional.ipynb new file mode 100644 index 0000000..b53af38 --- /dev/null +++ b/notebooks/02-workflow-structure-optional.ipynb @@ -0,0 +1,646 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Introduction to the workflow structure\n", + "\n", + "This notebook gives a brief overview of the workflow structure and introduces some useful DataJoint tools to facilitate the exploration.\n", + "\n", + "- DataJoint needs to be pre-configured before running this notebook, if you haven't set up the configuration, refer to notebook [01-configure](01-configure.ipynb).\n", + "\n", + "- If you are familar with DataJoint and the workflow structure, proceed to the next notebook [03-process](03-process.ipynb) directly to run the workflow.\n", + "\n", + "- For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/)\n", + "\n", + "To load the local configuration, we will change the directory to the package root.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "if os.path.basename(os.getcwd()) == \"notebooks\":\n", + " os.chdir(\"..\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Schemas and tables\n", + "\n", + "The current workflow is composed of multiple database schemas, each of them corresponds to a module within `workflow_optogenetics.pipeline`\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2022-12-06 14:56:19,370][INFO]: Connecting cbroz@dss-db.datajoint.io:3306\n", + "[2022-12-06 14:56:19,698][INFO]: Connected cbroz@dss-db.datajoint.io:3306\n" + ] + } + ], + "source": [ + "import datajoint as dj\n", + "from workflow_optogenetics.pipeline import lab, subject, surgery, session, opto, Device" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Each module contains a schema object that enables interaction with the schema in the database.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "title": "Each module imported above corresponds to one schema inside the database. For example, `ephys` corresponds to `neuro_ephys` schema in the database." + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Schema `cbroz_wfopto_opto`" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "opto.schema" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The table classes in the module corresponds to a table in the schema in the database.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "title": "Each datajoint table class inside the module corresponds to a table inside the schema. For example, the class `ephys.EphysRecording` correponds to the table `_ephys_recording` in the schema `neuro_ephys` in the database." + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " OptoWaveform defines the shape of one cycle of the optogenetic stimulus\n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "
\n", + "

waveform_name

\n", + " \n", + "
\n", + "

waveform_type

\n", + " \n", + "
\n", + "

normalized_waveform

\n", + " For one cycle, normalized to peak\n", + "
\n", + "

waveform_description

\n", + " description of the waveform\n", + "
square_10square=BLOB=Square waveform: 10%/90% on/off cycle
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*waveform_name waveform_type normalized waveform_descr\n", + "+------------+ +------------+ +--------+ +------------+\n", + "square_10 square =BLOB= Square wavefor\n", + " (Total: 1)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# preview columns and contents in a table\n", + "opto.OptoWaveform()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "lines_to_next_cell": 0, + "title": "The first time importing the modules, empty schemas and tables will be created in the database." + }, + "source": [ + "By importing the modules for the first time, the schemas and tables will be created inside the database.\n", + "\n", + "Once created, importing modules will not create schemas and tables again, but the existing schemas/tables can be accessed and manipulated by the modules.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "lines_to_next_cell": 0, + "title": "The schemas and tables will not be re-created when importing modules if they have existed." + }, + "source": [ + "## DataJoint tools to explore schemas and tables\n", + "\n", + "`dj.list_schemas()`: list all schemas a user has access to in the current database\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "title": "`dj.list_schemas()`: list all schemas a user could access." + }, + "outputs": [], + "source": [ + "dj.list_schemas()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`dj.Diagram()`: plot tables and dependencies in a schema. See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/).\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "title": "`dj.Diagram()`: plot tables and dependencies" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nopto.OptoEvent\n\n\nopto.OptoEvent\n\n\n\n\n\nopto.OptoProtocol->opto.OptoEvent\n\n\n\n\nopto.OptoWaveform\n\n\nopto.OptoWaveform\n\n\n\n\n\nopto.OptoStimParams\n\n\nopto.OptoStimParams\n\n\n\n\n\nopto.OptoWaveform->opto.OptoStimParams\n\n\n\n\nopto.OptoWaveform.Ramp\n\n\nopto.OptoWaveform.Ramp\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Ramp\n\n\n\n\nopto.OptoWaveform.Sine\n\n\nopto.OptoWaveform.Sine\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Sine\n\n\n\n\nopto.OptoWaveform.Square\n\n\nopto.OptoWaveform.Square\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Square\n\n\n\n\nopto.OptoStimParams->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveformType\n\n\nopto.OptoWaveformType\n\n\n\n\n\nopto.OptoWaveformType->opto.OptoWaveform\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Plot diagram for all tables in a schema\n", + "dj.Diagram(opto)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "title": "`dj.Diagram()`: plot the diagram of the tables and dependencies. It could be used to plot tables in a schema or selected tables." + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\n7\n\n7\n\n\n\nsurgery.Implantation\n\n\nsurgery.Implantation\n\n\n\n\n\n7->surgery.Implantation\n\n\n\n\n10\n\n10\n\n\n\n10->surgery.Implantation\n\n\n\n\n8\n\n8\n\n\n\n8->surgery.Implantation\n\n\n\n\n9\n\n9\n\n\n\n9->surgery.Implantation\n\n\n\n\nsubject.Subject\n\n\nsubject.Subject\n\n\n\n\n\nsession.Session\n\n\nsession.Session\n\n\n\n\n\nsubject.Subject->session.Session\n\n\n\n\nsubject.Subject->surgery.Implantation\n\n\n\n\nsurgery.Hemisphere\n\n\nsurgery.Hemisphere\n\n\n\n\n\nsurgery.Hemisphere->surgery.Implantation\n\n\n\n\nopto.OptoWaveform.Sine\n\n\nopto.OptoWaveform.Sine\n\n\n\n\n\nDevice\n\n\nDevice\n\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nDevice->opto.OptoProtocol\n\n\n\n\nopto.OptoStimParams\n\n\nopto.OptoStimParams\n\n\n\n\n\nopto.OptoStimParams->opto.OptoProtocol\n\n\n\n\nopto.OptoEvent\n\n\nopto.OptoEvent\n\n\n\n\n\nopto.OptoProtocol->opto.OptoEvent\n\n\n\n\nopto.OptoWaveformType\n\n\nopto.OptoWaveformType\n\n\n\n\n\nopto.OptoWaveform\n\n\nopto.OptoWaveform\n\n\n\n\n\nopto.OptoWaveformType->opto.OptoWaveform\n\n\n\n\nsurgery.CoordinateReference\n\n\nsurgery.CoordinateReference\n\n\n\n\n\nsurgery.CoordinateReference->10\n\n\n\n\nsurgery.CoordinateReference->8\n\n\n\n\nsurgery.CoordinateReference->9\n\n\n\n\nsession.Session->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveform.Square\n\n\nopto.OptoWaveform.Square\n\n\n\n\n\nsurgery.Implantation->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveform.Ramp\n\n\nopto.OptoWaveform.Ramp\n\n\n\n\n\nlab.User\n\n\nlab.User\n\n\n\n\n\nlab.User->7\n\n\n\n\nsurgery.BrainRegion\n\n\nsurgery.BrainRegion\n\n\n\n\n\nsurgery.BrainRegion->surgery.Implantation\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Sine\n\n\n\n\nopto.OptoWaveform->opto.OptoStimParams\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Square\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Ramp\n\n\n\n\nsurgery.ImplantationType\n\n\nsurgery.ImplantationType\n\n\n\n\n\nsurgery.ImplantationType->surgery.Implantation\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Plot diagram of tables in multiple schemas.\n", + "# Adding and subtracting looks downstream and upstream respectively\n", + "dj.Diagram(surgery) + dj.Diagram(opto) - 1" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nsession.Session\n\n\nsession.Session\n\n\n\n\n\nsession.Session->opto.OptoProtocol\n\n\n\n\nsurgery.Implantation\n\n\nsurgery.Implantation\n\n\n\n\n\nsurgery.Implantation->opto.OptoProtocol\n\n\n\n\nsubject.Subject\n\n\nsubject.Subject\n\n\n\n\n\nsubject.Subject->session.Session\n\n\n\n\nsubject.Subject->surgery.Implantation\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Plot diagram of selected tables and schemas\n", + "(\n", + " dj.Diagram(subject.Subject)\n", + " + dj.Diagram(session.Session)\n", + " + dj.Diagram(surgery.Implantation)\n", + " + dj.Diagram(opto.OptoProtocol)\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "lines_to_next_cell": 0, + "title": "`heading`:" + }, + "source": [ + "`describe()`: show table definition with foreign key references.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-> session.Session\n", + "protocol_id : int \n", + "---\n", + "-> opto.OptoStimParams\n", + "-> surgery.Implantation\n", + "-> [nullable] Device\n", + "\n" + ] + }, + { + "data": { + "text/plain": [ + "'-> session.Session\\nprotocol_id : int \\n---\\n-> opto.OptoStimParams\\n-> surgery.Implantation\\n-> [nullable] Device\\n'" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "opto.OptoProtocol.describe()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`heading`: show attribute definitions regardless of foreign key references\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "title": "`heading`: show table attributes regardless of foreign key references." + }, + "outputs": [ + { + "data": { + "text/plain": [ + "# \n", + "subject : varchar(8) # \n", + "session_id : int # \n", + "protocol_id : int # \n", + "---\n", + "opto_params_id : smallint # \n", + "implant_date : datetime # surgery date\n", + "implant_type : varchar(16) # Short name for type of implanted device\n", + "region_acronym : varchar(32) # Brain region shorthand\n", + "hemisphere : varchar(8) # Brain region hemisphere\n", + "device=null : varchar(32) # " + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "opto.OptoProtocol.heading" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "title": "ephys" + }, + "source": [ + "## Elements in `workflow-optogenetics`\n", + "\n", + "[`lab`](https://datajoint.com/docs/elements/element-animal/): lab management related information, such as Lab, User, Project, Protocol, Source.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\nlab.Lab.Organization\n\n\nlab.Lab.Organization\n\n\n\n\n\nlab.Location\n\n\nlab.Location\n\n\n\n\n\nlab.Project\n\n\nlab.Project\n\n\n\n\n\nlab.ProjectKeywords\n\n\nlab.ProjectKeywords\n\n\n\n\n\nlab.Project->lab.ProjectKeywords\n\n\n\n\nlab.ProjectPublication\n\n\nlab.ProjectPublication\n\n\n\n\n\nlab.Project->lab.ProjectPublication\n\n\n\n\nlab.ProjectUser\n\n\nlab.ProjectUser\n\n\n\n\n\nlab.Project->lab.ProjectUser\n\n\n\n\nlab.ProjectSourceCode\n\n\nlab.ProjectSourceCode\n\n\n\n\n\nlab.Project->lab.ProjectSourceCode\n\n\n\n\nlab.UserRole\n\n\nlab.UserRole\n\n\n\n\n\nlab.LabMembership\n\n\nlab.LabMembership\n\n\n\n\n\nlab.UserRole->lab.LabMembership\n\n\n\n\nlab.ProtocolType\n\n\nlab.ProtocolType\n\n\n\n\n\nlab.Protocol\n\n\nlab.Protocol\n\n\n\n\n\nlab.ProtocolType->lab.Protocol\n\n\n\n\nlab.User\n\n\nlab.User\n\n\n\n\n\nlab.User->lab.ProjectUser\n\n\n\n\nlab.User->lab.LabMembership\n\n\n\n\nlab.Organization\n\n\nlab.Organization\n\n\n\n\n\nlab.Organization->lab.Lab.Organization\n\n\n\n\nlab.Lab\n\n\nlab.Lab\n\n\n\n\n\nlab.Lab->lab.Lab.Organization\n\n\n\n\nlab.Lab->lab.Location\n\n\n\n\nlab.Lab->lab.LabMembership\n\n\n\n\nDevice\n\n\nDevice\n\n\n\n\n\nlab.Source\n\n\nlab.Source\n\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dj.Diagram(lab)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[`subject`](https://datajoint.com/docs/elements/element-animal/): general animal metadata and surgery information" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\nsubject.Line\n\n\nsubject.Line\n\n\n\n\n\nsubject.Subject.Line\n\n\nsubject.Subject.Line\n\n\n\n\n\nsubject.Line->subject.Subject.Line\n\n\n\n\nsubject.Line.Allele\n\n\nsubject.Line.Allele\n\n\n\n\n\nsubject.Line->subject.Line.Allele\n\n\n\n\nsubject.Zygosity\n\n\nsubject.Zygosity\n\n\n\n\n\nsubject.Subject.Lab\n\n\nsubject.Subject.Lab\n\n\n\n\n\nsubject.SubjectCull\n\n\nsubject.SubjectCull\n\n\n\n\n\nsubject.SubjectDeath\n\n\nsubject.SubjectDeath\n\n\n\n\n\nsubject.SubjectDeath->subject.SubjectCull\n\n\n\n\nsubject.Subject.Strain\n\n\nsubject.Subject.Strain\n\n\n\n\n\nsubject.Subject.User\n\n\nsubject.Subject.User\n\n\n\n\n\nsubject.Subject.Protocol\n\n\nsubject.Subject.Protocol\n\n\n\n\n\nsubject.Strain\n\n\nsubject.Strain\n\n\n\n\n\nsubject.Strain->subject.Subject.Strain\n\n\n\n\nsubject.Subject.Source\n\n\nsubject.Subject.Source\n\n\n\n\n\nsubject.Allele.Source\n\n\nsubject.Allele.Source\n\n\n\n\n\nsubject.Allele\n\n\nsubject.Allele\n\n\n\n\n\nsubject.Allele->subject.Zygosity\n\n\n\n\nsubject.Allele->subject.Line.Allele\n\n\n\n\nsubject.Allele->subject.Allele.Source\n\n\n\n\nsubject.Subject\n\n\nsubject.Subject\n\n\n\n\n\nsubject.Subject->subject.Zygosity\n\n\n\n\nsubject.Subject->subject.Subject.Lab\n\n\n\n\nsubject.Subject->subject.SubjectDeath\n\n\n\n\nsubject.Subject->subject.Subject.Strain\n\n\n\n\nsubject.Subject->subject.Subject.Line\n\n\n\n\nsubject.Subject->subject.Subject.User\n\n\n\n\nsubject.Subject->subject.Subject.Protocol\n\n\n\n\nsubject.Subject->subject.Subject.Source\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dj.Diagram(subject)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "title": "[subject](https://github.com/datajoint/element-animal): contains the basic information of subject, including Strain, Line, Subject, Zygosity, and SubjectDeath information." + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "subject : varchar(8) \n", + "---\n", + "subject_nickname=\"\" : varchar(64) \n", + "sex : enum('M','F','U') \n", + "subject_birth_date : date \n", + "subject_description=\"\" : varchar(1024) \n", + "\n" + ] + }, + { + "data": { + "text/plain": [ + "'subject : varchar(8) \\n---\\nsubject_nickname=\"\" : varchar(64) \\nsex : enum(\\'M\\',\\'F\\',\\'U\\') \\nsubject_birth_date : date \\nsubject_description=\"\" : varchar(1024) \\n'" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "subject.Subject.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\n148\n\n148\n\n\n\nsurgery.Implantation\n\n\nsurgery.Implantation\n\n\n\n\n\n148->surgery.Implantation\n\n\n\n\n150\n\n150\n\n\n\n150->surgery.Implantation\n\n\n\n\n149\n\n149\n\n\n\n149->surgery.Implantation\n\n\n\n\nsurgery.Hemisphere\n\n\nsurgery.Hemisphere\n\n\n\n\n\nsurgery.Hemisphere->surgery.Implantation\n\n\n\n\nsurgery.CoordinateReference\n\n\nsurgery.CoordinateReference\n\n\n\n\n\nsurgery.CoordinateReference->148\n\n\n\n\nsurgery.CoordinateReference->150\n\n\n\n\nsurgery.CoordinateReference->149\n\n\n\n\nsurgery.BrainRegion\n\n\nsurgery.BrainRegion\n\n\n\n\n\nsurgery.BrainRegion->surgery.Implantation\n\n\n\n\nsurgery.ImplantationType\n\n\nsurgery.ImplantationType\n\n\n\n\n\nsurgery.ImplantationType->surgery.Implantation\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dj.Diagram(surgery)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[`session`](https://datajoint.com/docs/elements/element-session/): General information of experimental sessions.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\nsession.Session.Attribute\n\n\nsession.Session.Attribute\n\n\n\n\n\nsession.Session\n\n\nsession.Session\n\n\n\n\n\nsession.Session->session.Session.Attribute\n\n\n\n\nsession.SessionDirectory\n\n\nsession.SessionDirectory\n\n\n\n\n\nsession.Session->session.SessionDirectory\n\n\n\n\nsession.SessionExperimenter\n\n\nsession.SessionExperimenter\n\n\n\n\n\nsession.Session->session.SessionExperimenter\n\n\n\n\nsession.SessionNote\n\n\nsession.SessionNote\n\n\n\n\n\nsession.Session->session.SessionNote\n\n\n\n\nsession.ProjectSession\n\n\nsession.ProjectSession\n\n\n\n\n\nsession.Session->session.ProjectSession\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dj.Diagram(session)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "title": "[session](https://github.com/datajoint/element-session): experimental session information" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-> subject.Subject\n", + "session_id : int \n", + "---\n", + "session_datetime : datetime \n", + "\n" + ] + }, + { + "data": { + "text/plain": [ + "'-> subject.Subject\\nsession_id : int \\n---\\nsession_datetime : datetime \\n'" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "session.Session.describe()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[`opto`](https://github.com/datajoint/element-optogenetics): Optogenetics stimulus and timing data\n" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "title": "[probe and ephys](https://github.com/datajoint/element-array-ephys): Neuropixel based probe and ephys tables" + }, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nopto.OptoEvent\n\n\nopto.OptoEvent\n\n\n\n\n\nopto.OptoProtocol->opto.OptoEvent\n\n\n\n\nopto.OptoWaveform\n\n\nopto.OptoWaveform\n\n\n\n\n\nopto.OptoStimParams\n\n\nopto.OptoStimParams\n\n\n\n\n\nopto.OptoWaveform->opto.OptoStimParams\n\n\n\n\nopto.OptoWaveform.Ramp\n\n\nopto.OptoWaveform.Ramp\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Ramp\n\n\n\n\nopto.OptoWaveform.Sine\n\n\nopto.OptoWaveform.Sine\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Sine\n\n\n\n\nopto.OptoWaveform.Square\n\n\nopto.OptoWaveform.Square\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Square\n\n\n\n\nopto.OptoStimParams->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveformType\n\n\nopto.OptoWaveformType\n\n\n\n\n\nopto.OptoWaveformType->opto.OptoWaveform\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dj.Diagram(opto)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Summary and next step\n", + "\n", + "- This notebook introduced the overall structures of the schemas and tables in the workflow and relevant tools to explore the schema structure and table definitions.\n", + "\n", + "- In the next notebook [03-process](03-process.ipynb), we will introduce the detailed steps to run through `workflow-optogenetics`.\n" + ] + } + ], + "metadata": { + "jupytext": { + "encoding": "# -*- coding: utf-8 -*-" + }, + "kernelspec": { + "display_name": "Python 3.9.13 ('ele')", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + }, + "vscode": { + "interpreter": { + "hash": "d00c4ad21a7027bf1726d6ae3a9a6ef39c8838928eca5a3d5f51f3eb68720410" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/notebooks/03-process.ipynb b/notebooks/03-process.ipynb new file mode 100644 index 0000000..bcfa5a4 --- /dev/null +++ b/notebooks/03-process.ipynb @@ -0,0 +1,1792 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Interactively run workflow optogenetics\n", + "\n", + "- This notebook walks you through the steps in detail to run the `workflow-optogenetics`.\n", + "\n", + "- If you haven't configured the paths, refer to [01-configure](01-configure.ipynb).\n", + "\n", + "- To overview the schema structures, refer to [02-workflow-structure](02-workflow-structure.ipynb).\n", + "\n", + "- For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's change the directory to the package root directory to load the local configuration (`dj_local_conf.json`).\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "if os.path.basename(os.getcwd()) == \"notebooks\":\n", + " os.chdir(\"..\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## `pipeline.py`\n", + "\n", + "This script `activates` the DataJoint Elements and declares other required tables.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2022-12-08 15:27:54,191][INFO]: Connecting cbroz@dss-db.datajoint.io:3306\n", + "[2022-12-08 15:27:54,471][INFO]: Connected cbroz@dss-db.datajoint.io:3306\n" + ] + } + ], + "source": [ + "import datajoint as dj\n", + "from workflow_optogenetics.pipeline import lab, subject, surgery, session, opto, Device" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Schema diagrams\n", + "\n", + "See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/).\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": "\n\n\n\n\nsurgery.Implantation\n\n\nsurgery.Implantation\n\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nsurgery.Implantation->opto.OptoProtocol\n\n\n\n\nopto.OptoEvent\n\n\nopto.OptoEvent\n\n\n\n\n\nopto.OptoProtocol->opto.OptoEvent\n\n\n\n\nsubject.Subject\n\n\nsubject.Subject\n\n\n\n\n\nsubject.Subject->surgery.Implantation\n\n\n\n\nsession.Session\n\n\nsession.Session\n\n\n\n\n\nsubject.Subject->session.Session\n\n\n\n\nopto.OptoStimParams\n\n\nopto.OptoStimParams\n\n\n\n\n\nopto.OptoStimParams->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveform\n\n\nopto.OptoWaveform\n\n\n\n\n\nopto.OptoWaveform->opto.OptoStimParams\n\n\n\n\nopto.OptoWaveform.Sine\n\n\nopto.OptoWaveform.Sine\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Sine\n\n\n\n\nopto.OptoWaveform.Ramp\n\n\nopto.OptoWaveform.Ramp\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Ramp\n\n\n\n\nopto.OptoWaveform.Square\n\n\nopto.OptoWaveform.Square\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Square\n\n\n\n\nopto.OptoWaveformType\n\n\nopto.OptoWaveformType\n\n\n\n\n\nopto.OptoWaveformType->opto.OptoWaveform\n\n\n\n\nsession.Session->opto.OptoProtocol\n\n\n\n", + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "(\n", + " dj.Diagram(subject.Subject)\n", + " + dj.Diagram(session.Session)\n", + " + dj.Diagram(surgery.Implantation)\n", + " + dj.Diagram(opto)\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Inserting data\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### `lab` schema\n", + "\n", + "`pipeline.py` adds a Device table to the `lab` schema. This table, like other `Lookup` tables, has default contents, but we can always add more.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "Device.insert1(\n", + " dict(\n", + " device=\"OPTG_8\",\n", + " modality=\"Optogenetics\",\n", + " description=\"8 channel pulse sequence device\",\n", + " )\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "lab.User.insert1(\n", + " dict(user=\"User1\")\n", + ") # For the surgeon attribute in surgery.Implantation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### `subject` schema\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "# \n", + "subject : varchar(8) # \n", + "---\n", + "subject_nickname=\"\" : varchar(64) # \n", + "sex : enum('M','F','U') # \n", + "subject_birth_date : date # \n", + "subject_description=\"\" : varchar(1024) # " + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "subject.Subject.heading" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "subject.Subject.insert1(\n", + " dict(\n", + " subject=\"subject3\",\n", + " sex=\"F\",\n", + " subject_birth_date=\"2020-03-03\",\n", + " subject_description=\"Optogenetic pilot subject.\",\n", + " )\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In order to conduct optogenetic stimulation, our subject must have an implant in the target brain region. Again, some `Lookup` tables have useful default content.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "
\n", + "

reference

\n", + " \n", + "
bregma
dura
lambda
sagittal_suture
sinus
skull_surface
\n", + " \n", + "

Total: 6

\n", + " " + ], + "text/plain": [ + "*reference \n", + "+------------+\n", + "bregma \n", + "dura \n", + "lambda \n", + "sagittal_sutur\n", + "sinus \n", + "skull_surface \n", + " (Total: 6)" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "surgery.CoordinateReference()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "
\n", + "

hemisphere

\n", + " Brain region hemisphere\n", + "
left
middle
right
\n", + " \n", + "

Total: 3

\n", + " " + ], + "text/plain": [ + "*hemisphere \n", + "+------------+\n", + "left \n", + "middle \n", + "right \n", + " (Total: 3)" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "surgery.Hemisphere()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "surgery.BrainRegion.insert1(\n", + " dict(region_acronym=\"dHP\", region_name=\"Dorsal Hippocampus\")\n", + ")\n", + "surgery.Implantation.insert1(\n", + " dict(\n", + " subject=\"subject3\",\n", + " implant_date=\"2022-04-01 12:13:14\",\n", + " implant_type=\"opto\",\n", + " target_region=\"dHP\",\n", + " target_hemisphere=\"left\",\n", + " surgeon=\"user1\",\n", + " )\n", + ")\n", + "\n", + "surgery.Implantation.Coordinate.insert1(\n", + " dict(\n", + " subject=\"subject3\",\n", + " implant_date=\"2022-04-01 12:13:14\",\n", + " implant_type=\"opto\",\n", + " target_region=\"dHP\",\n", + " target_hemisphere=\"left\",\n", + " ap=\"-7.9\", # anterior-posterior distance in mm\n", + " ap_ref=\"bregma\",\n", + " ml=\"-1.8\", # medial axis distance in mm\n", + " ml_ref=\"bregma\",\n", + " dv=\"5\", # dorso-ventral axis distance in mm\n", + " dv_ref=\"skull_surface\",\n", + " theta=\"11.5\", # degree rotation about ml-axis [0, 180] wrt z\n", + " phi=\"0\", # degree rotation about dv-axis [0, 360] wrt x\n", + " beta=None, # degree rotation about shank [-180, 180] wrt anterior\n", + " )\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Insert into `session` schema\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "-> subject.Subject\n", + "session_id : int \n", + "---\n", + "session_datetime : datetime \n", + "\n" + ] + }, + { + "data": { + "text/plain": [ + "'-> subject.Subject\\nsession_id : int \\n---\\nsession_datetime : datetime \\n'" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "session.Session.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "# \n", + "subject : varchar(8) # \n", + "session_id : int # \n", + "---\n", + "session_datetime : datetime # " + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "session.Session.heading" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

session_datetime

\n", + " \n", + "
subject312022-04-04 12:13:14
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*subject *session_id session_dateti\n", + "+----------+ +------------+ +------------+\n", + "subject3 1 2022-04-04 12:\n", + " (Total: 1)" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "session_key = dict(\n", + " subject=\"subject3\", session_id=\"1\", session_datetime=\"2022-04-04 12:13:14\"\n", + ")\n", + "session.Session.insert1(session_key)\n", + "session.Session()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Insert into `opto` schema\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First, we'll add information to describe the stimulus, including waveform shape and and application parameters.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "opto.OptoWaveform.insert1(\n", + " dict(\n", + " waveform_name=\"square_10\",\n", + " waveform_type=\"square\",\n", + " waveform_description=\"Square waveform: 10%/90% on/off cycle\",\n", + " )\n", + ")\n", + "# Square is one part table of OptoWaveform.\n", + "# For sine and ramp waveforms, see the corresponding tables\n", + "opto.OptoWaveform.Square.insert1(\n", + " dict(waveform_name=\"square_10\", on_proportion=0.10, off_proportion=0.90)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "opto.OptoStimParams.insert1(\n", + " dict(\n", + " opto_params_id=1,\n", + " waveform_name=\"square_10\",\n", + " wavelength=470,\n", + " light_intensity=10.2,\n", + " frequency=1,\n", + " duration=241,\n", + " )\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we'll describe the session in which these parameters were used with `OptoProtocol`\n" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "opto.OptoProtocol.insert1(\n", + " dict(\n", + " subject=\"subject3\",\n", + " session_id=\"1\",\n", + " protocol_id=\"1\",\n", + " opto_params_id=\"1\",\n", + " implant_date=\"2022-04-01 12:13:14\",\n", + " implant_type=\"opto\",\n", + " target_region=\"dHP\",\n", + " target_hemisphere=\"left\",\n", + " device=\"OPTG_4\",\n", + " )\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can describe the timing of these stimulations in `OptoEvent`.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "opto.OptoEvent.insert(\n", + " [\n", + " dict(\n", + " subject=\"subject3\",\n", + " session_id=1,\n", + " protocol_id=1,\n", + " stim_start_time=241,\n", + " stim_end_time=482,\n", + " ),\n", + " dict(\n", + " subject=\"subject3\",\n", + " session_id=1,\n", + " protocol_id=1,\n", + " stim_start_time=482,\n", + " stim_end_time=723,\n", + " ),\n", + " ]\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To store more experimental timing information, see documentation for [Element Event](https://datajoint.com/docs/elements/element-event/).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Automating inserts\n", + "\n", + "This workflow provides functions for ingesting this information from csv files in `ingest.py`.\n", + "\n", + "- `ingest_subjects`: subject.Subject\n", + "- `ingest_sessions`: session.Session\n", + "- `ingest_events`: Element Event schemas\n", + "- `ingest_opto`: surgery and opto schemas\n", + "\n", + "For more information on each, see the docstring.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Help on function ingest_subjects in module workflow_optogenetics.ingest:\n", + "\n", + "ingest_subjects(subject_csv_path: str = './user_data/subjects.csv', skip_duplicates: bool = True, verbose: bool = True)\n", + " Ingest subjects listed in the subject column of ./user_data/subjects.csv\n", + " \n", + " Args:\n", + " subject_csv_path (str, optional): Relative path to subject csv.\n", + " Defaults to \"./user_data/subjects.csv\".\n", + " skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert.\n", + " Defaults to True.\n", + " verbose (bool, optional): Provides insertion info to StdOut. Defaults to True.\n", + "\n" + ] + } + ], + "source": [ + "from workflow_optogenetics.ingest import ingest_subjects\n", + "\n", + "help(ingest_subjects)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "By default, these functions pull from files in the `user_files` directory. We can run each of these in succession with the default parameters with `ingest_all`.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "---- Inserting 0 entry(s) into subject ----\n", + "\n", + "---- Inserting 0 entry(s) into session ----\n", + "\n", + "---- Inserting 1 entry(s) into behavior_recording ----\n", + "\n", + "---- Inserting 5 entry(s) into behavior_recording__file ----\n", + "\n", + "---- Inserting 4 entry(s) into _block ----\n", + "\n", + "---- Inserting 8 entry(s) into _block__attribute ----\n", + "\n", + "---- Inserting 2 entry(s) into #trial_type ----\n", + "\n", + "---- Inserting 40 entry(s) into _trial ----\n", + "\n", + "---- Inserting 40 entry(s) into _trial__attribute ----\n", + "\n", + "---- Inserting 40 entry(s) into _block_trial ----\n", + "\n", + "---- Inserting 3 entry(s) into #event_type ----\n", + "\n", + "---- Inserting 37 entry(s) into _event ----\n", + "\n", + "---- Inserting 37 entry(s) into _trial_event ----\n", + "\n", + "---- Inserting 0 entry(s) into #opto_waveform ----\n", + "\n", + "---- Inserting 0 entry(s) into #opto_waveform__square ----\n", + "\n", + "---- Inserting 0 entry(s) into opto_stim_params ----\n", + "\n", + "---- Inserting 0 entry(s) into #coordinate_reference ----\n", + "\n", + "---- Inserting 0 entry(s) into brain_region ----\n", + "\n", + "---- Inserting 0 entry(s) into #user ----\n", + "\n", + "---- Inserting 0 entry(s) into implantation ----\n", + "\n", + "---- Inserting 0 entry(s) into implantation__coordinate ----\n", + "\n", + "---- Inserting 0 entry(s) into opto_protocol ----\n", + "\n", + "---- Inserting 0 entry(s) into opto_event ----\n" + ] + } + ], + "source": [ + "from workflow_optogenetics.ingest import ingest_all\n", + "\n", + "ingest_all()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Events\n", + "\n", + "The above `ingest_all()` also added behavioral events we can example in conjunction with optogenetic events. For convenience, these stimulation events are also reflected in the Block design of Element Event.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

block_id

\n", + " block number (1-based indexing)\n", + "
\n", + "

trial_id

\n", + " trial number (1-based indexing)\n", + "
\n", + "

event_type

\n", + " \n", + "
\n", + "

event_start_time

\n", + " (second) relative to recording start\n", + "
\n", + "

attribute_name

\n", + " \n", + "
\n", + "

attribute_value

\n", + " \n", + "
\n", + "

attribute_blob

\n", + " \n", + "
subject3111center4.864stimulationoff=BLOB=
subject3111center10.192stimulationoff=BLOB=
subject3111center17.865stimulationoff=BLOB=
subject3113center59.128stimulationoff=BLOB=
subject3113left49.536stimulationoff=BLOB=
subject3113left64.453stimulationoff=BLOB=
subject3114center90.447stimulationoff=BLOB=
subject3114right73.844stimulationoff=BLOB=
subject3114right79.506stimulationoff=BLOB=
subject3115left101.14stimulationoff=BLOB=
subject3115right108.499stimulationoff=BLOB=
subject3119right192.237stimulationoff=BLOB=
subject31110center216.542stimulationoff=BLOB=
subject31322center509.98stimulationon=BLOB=
subject31323center534.82stimulationon=BLOB=
subject31324center555.033stimulationon=BLOB=
subject31326center605.523stimulationon=BLOB=
subject31329center673.068stimulationon=BLOB=
subject31431left725.153stimulationoff=BLOB=
subject31433center771.223stimulationoff=BLOB=
subject31433left778.669stimulationoff=BLOB=
subject31433left789.797stimulationoff=BLOB=
subject31434center798.334stimulationoff=BLOB=
subject31434left804.33stimulationoff=BLOB=
subject31435center836.522stimulationoff=BLOB=
subject31435left818.922stimulationoff=BLOB=
subject31435left831.301stimulationoff=BLOB=
subject31436center847.933stimulationoff=BLOB=
subject31436left852.572stimulationoff=BLOB=
subject31437center870.121stimulationoff=BLOB=
\n", + "

...

\n", + "

Total: 37

\n", + " " + ], + "text/plain": [ + "*subject *session_id *block_id *trial_id *event_type *event_start_t *attribute_nam attribute_valu attribute_\n", + "+----------+ +------------+ +----------+ +----------+ +------------+ +------------+ +------------+ +------------+ +--------+\n", + "subject3 1 1 1 center 4.864 stimulation off =BLOB= \n", + "subject3 1 1 1 center 10.192 stimulation off =BLOB= \n", + "subject3 1 1 1 center 17.865 stimulation off =BLOB= \n", + "subject3 1 1 3 center 59.128 stimulation off =BLOB= \n", + "subject3 1 1 3 left 49.536 stimulation off =BLOB= \n", + "subject3 1 1 3 left 64.453 stimulation off =BLOB= \n", + "subject3 1 1 4 center 90.447 stimulation off =BLOB= \n", + "subject3 1 1 4 right 73.844 stimulation off =BLOB= \n", + "subject3 1 1 4 right 79.506 stimulation off =BLOB= \n", + "subject3 1 1 5 left 101.14 stimulation off =BLOB= \n", + "subject3 1 1 5 right 108.499 stimulation off =BLOB= \n", + "subject3 1 1 9 right 192.237 stimulation off =BLOB= \n", + "subject3 1 1 10 center 216.542 stimulation off =BLOB= \n", + "subject3 1 3 22 center 509.98 stimulation on =BLOB= \n", + "subject3 1 3 23 center 534.82 stimulation on =BLOB= \n", + "subject3 1 3 24 center 555.033 stimulation on =BLOB= \n", + "subject3 1 3 26 center 605.523 stimulation on =BLOB= \n", + "subject3 1 3 29 center 673.068 stimulation on =BLOB= \n", + "subject3 1 4 31 left 725.153 stimulation off =BLOB= \n", + "subject3 1 4 33 center 771.223 stimulation off =BLOB= \n", + "subject3 1 4 33 left 778.669 stimulation off =BLOB= \n", + "subject3 1 4 33 left 789.797 stimulation off =BLOB= \n", + "subject3 1 4 34 center 798.334 stimulation off =BLOB= \n", + "subject3 1 4 34 left 804.33 stimulation off =BLOB= \n", + "subject3 1 4 35 center 836.522 stimulation off =BLOB= \n", + "subject3 1 4 35 left 818.922 stimulation off =BLOB= \n", + "subject3 1 4 35 left 831.301 stimulation off =BLOB= \n", + "subject3 1 4 36 center 847.933 stimulation off =BLOB= \n", + "subject3 1 4 36 left 852.572 stimulation off =BLOB= \n", + "subject3 1 4 37 center 870.121 stimulation off =BLOB= \n", + " ...\n", + " (Total: 37)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from workflow_optogenetics.pipeline import event, trial\n", + "\n", + "events_by_block = (\n", + " trial.BlockTrial * trial.TrialEvent * trial.Block.Attribute\n", + " & \"attribute_name='stimulation'\"\n", + ")\n", + "events_by_block" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can readily compare the count of events or event types across 'on' and 'off' stimulation conditions." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

block_id

\n", + " block number (1-based indexing)\n", + "
\n", + "

trial_id

\n", + " trial number (1-based indexing)\n", + "
\n", + "

event_type

\n", + " \n", + "
\n", + "

event_start_time

\n", + " (second) relative to recording start\n", + "
\n", + "

attribute_name

\n", + " \n", + "
\n", + "

attribute_value

\n", + " \n", + "
\n", + "

attribute_blob

\n", + " \n", + "
subject31322center509.98stimulationon=BLOB=
subject31323center534.82stimulationon=BLOB=
subject31324center555.033stimulationon=BLOB=
subject31326center605.523stimulationon=BLOB=
subject31329center673.068stimulationon=BLOB=
\n", + " \n", + "

Total: 5

\n", + " " + ], + "text/plain": [ + "*subject *session_id *block_id *trial_id *event_type *event_start_t *attribute_nam attribute_valu attribute_\n", + "+----------+ +------------+ +----------+ +----------+ +------------+ +------------+ +------------+ +------------+ +--------+\n", + "subject3 1 3 22 center 509.98 stimulation on =BLOB= \n", + "subject3 1 3 23 center 534.82 stimulation on =BLOB= \n", + "subject3 1 3 24 center 555.033 stimulation on =BLOB= \n", + "subject3 1 3 26 center 605.523 stimulation on =BLOB= \n", + "subject3 1 3 29 center 673.068 stimulation on =BLOB= \n", + " (Total: 5)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "events_by_block & \"attribute_value='on'\"" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

block_id

\n", + " block number (1-based indexing)\n", + "
\n", + "

trial_id

\n", + " trial number (1-based indexing)\n", + "
\n", + "

event_type

\n", + " \n", + "
\n", + "

event_start_time

\n", + " (second) relative to recording start\n", + "
\n", + "

attribute_name

\n", + " \n", + "
\n", + "

attribute_value

\n", + " \n", + "
\n", + "

attribute_blob

\n", + " \n", + "
subject3111center4.864stimulationoff=BLOB=
subject3111center10.192stimulationoff=BLOB=
subject3111center17.865stimulationoff=BLOB=
subject3113center59.128stimulationoff=BLOB=
subject3113left49.536stimulationoff=BLOB=
subject3113left64.453stimulationoff=BLOB=
subject3114center90.447stimulationoff=BLOB=
subject3114right73.844stimulationoff=BLOB=
subject3114right79.506stimulationoff=BLOB=
subject3115left101.14stimulationoff=BLOB=
subject3115right108.499stimulationoff=BLOB=
subject3119right192.237stimulationoff=BLOB=
subject31110center216.542stimulationoff=BLOB=
subject31431left725.153stimulationoff=BLOB=
subject31433center771.223stimulationoff=BLOB=
subject31433left778.669stimulationoff=BLOB=
subject31433left789.797stimulationoff=BLOB=
subject31434center798.334stimulationoff=BLOB=
subject31434left804.33stimulationoff=BLOB=
subject31435center836.522stimulationoff=BLOB=
subject31435left818.922stimulationoff=BLOB=
subject31435left831.301stimulationoff=BLOB=
subject31436center847.933stimulationoff=BLOB=
subject31436left852.572stimulationoff=BLOB=
subject31437center870.121stimulationoff=BLOB=
subject31437right877.152stimulationoff=BLOB=
subject31437right881.91stimulationoff=BLOB=
subject31439center917.759stimulationoff=BLOB=
subject31439center923.011stimulationoff=BLOB=
subject31440center940.588stimulationoff=BLOB=
\n", + "

...

\n", + "

Total: 32

\n", + " " + ], + "text/plain": [ + "*subject *session_id *block_id *trial_id *event_type *event_start_t *attribute_nam attribute_valu attribute_\n", + "+----------+ +------------+ +----------+ +----------+ +------------+ +------------+ +------------+ +------------+ +--------+\n", + "subject3 1 1 1 center 4.864 stimulation off =BLOB= \n", + "subject3 1 1 1 center 10.192 stimulation off =BLOB= \n", + "subject3 1 1 1 center 17.865 stimulation off =BLOB= \n", + "subject3 1 1 3 center 59.128 stimulation off =BLOB= \n", + "subject3 1 1 3 left 49.536 stimulation off =BLOB= \n", + "subject3 1 1 3 left 64.453 stimulation off =BLOB= \n", + "subject3 1 1 4 center 90.447 stimulation off =BLOB= \n", + "subject3 1 1 4 right 73.844 stimulation off =BLOB= \n", + "subject3 1 1 4 right 79.506 stimulation off =BLOB= \n", + "subject3 1 1 5 left 101.14 stimulation off =BLOB= \n", + "subject3 1 1 5 right 108.499 stimulation off =BLOB= \n", + "subject3 1 1 9 right 192.237 stimulation off =BLOB= \n", + "subject3 1 1 10 center 216.542 stimulation off =BLOB= \n", + "subject3 1 4 31 left 725.153 stimulation off =BLOB= \n", + "subject3 1 4 33 center 771.223 stimulation off =BLOB= \n", + "subject3 1 4 33 left 778.669 stimulation off =BLOB= \n", + "subject3 1 4 33 left 789.797 stimulation off =BLOB= \n", + "subject3 1 4 34 center 798.334 stimulation off =BLOB= \n", + "subject3 1 4 34 left 804.33 stimulation off =BLOB= \n", + "subject3 1 4 35 center 836.522 stimulation off =BLOB= \n", + "subject3 1 4 35 left 818.922 stimulation off =BLOB= \n", + "subject3 1 4 35 left 831.301 stimulation off =BLOB= \n", + "subject3 1 4 36 center 847.933 stimulation off =BLOB= \n", + "subject3 1 4 36 left 852.572 stimulation off =BLOB= \n", + "subject3 1 4 37 center 870.121 stimulation off =BLOB= \n", + "subject3 1 4 37 right 877.152 stimulation off =BLOB= \n", + "subject3 1 4 37 right 881.91 stimulation off =BLOB= \n", + "subject3 1 4 39 center 917.759 stimulation off =BLOB= \n", + "subject3 1 4 39 center 923.011 stimulation off =BLOB= \n", + "subject3 1 4 40 center 940.588 stimulation off =BLOB= \n", + " ...\n", + " (Total: 32)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "events_by_block & \"attribute_value='off'\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Next Steps\n", + "\n", + "Interested in using Element Optogenetics for your own project? Reach out to the DataJoint team via [email](mailto:support@datajoint.com) or [Slack](https://datajoint.slack.com).\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.9.13 ('ele')", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + }, + "vscode": { + "interpreter": { + "hash": "d00c4ad21a7027bf1726d6ae3a9a6ef39c8838928eca5a3d5f51f3eb68720410" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/06-drop-optional.ipynb b/notebooks/06-drop-optional.ipynb new file mode 100644 index 0000000..d3dd74b --- /dev/null +++ b/notebooks/06-drop-optional.ipynb @@ -0,0 +1,96 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Drop schemas\n", + "\n", + "- Schemas are not typically dropped in a production workflow with real data in it.\n", + "- At the developmental phase, it might be required for the table redesign.\n", + "- When dropping all schemas is needed, the following is the dependency order.\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Change into the parent directory to find the `dj_local_conf.json` file.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "if os.path.basename(os.getcwd()) == \"notebooks\":\n", + " os.chdir(\"..\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from workflow_optogenetics.pipeline import (\n", + " trial,\n", + " opto,\n", + " Device,\n", + " surgery,\n", + " event,\n", + " session,\n", + " subject,\n", + " lab,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# trial.schema.drop()\n", + "# opto.schema.drop()\n", + "# Device.drop_quick()\n", + "# surgery.schema.drop()\n", + "# event.schema.drop()\n", + "# session.schema.drop()\n", + "# subject.schema.drop()\n", + "# lab.schema.drop()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "ele", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.16" + }, + "vscode": { + "interpreter": { + "hash": "d00c4ad21a7027bf1726d6ae3a9a6ef39c8838928eca5a3d5f51f3eb68720410" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/notebooks/py_scripts/01-configure.py b/notebooks/py_scripts/01-configure.py new file mode 100644 index 0000000..ebb0006 --- /dev/null +++ b/notebooks/py_scripts/01-configure.py @@ -0,0 +1,82 @@ +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: light +# format_version: '1.5' +# jupytext_version: 1.14.1 +# kernelspec: +# display_name: Python 3.9.13 ('ele') +# language: python +# name: python3 +# --- + +# # DataJoint configuration +# +# ## Setup - Working Directory +# +# To run the workflow, we need to properly set up the DataJoint configuration. The configuration can be saved in a local directory as `dj_local_conf.json` or at your root directory as a hidden file. This notebook walks you through the setup process. +# +# **The configuration only needs to be set up once**, if you have gone through the configuration before, directly go to [02-workflow-structure](02-workflow-structure-optional.ipynb). +# + +# + +import os + +import datajoint as dj + +if os.path.basename(os.getcwd()) == "notebooks": + os.chdir("..") + +# - + +# ## Setup - Credentials +# +# Now let's set up the host, user and password in the `dj.config` global variable +# + +# + +import getpass + +dj.config["database.host"] = "{YOUR_HOST}" # CodeBook users should omit this +dj.config["database.user"] = "{YOUR_USERNAME}" +dj.config["database.password"] = getpass.getpass() # enter the password securily + +# - + +# You should be able to connect to the database at this stage. +# + +dj.conn() + + +# ## Setup - `dj.config['custom']` +# +# The major component of the current workflow is Element Optogenetics (see [GitHub repository](https://github.com/datajoint/element-optogenetics) and [documentation](https://datajoint.com/docs/elements/element-optogenetics)). Many Elements require configurations in the field `custom` in `dj.config`: +# +# ### Database prefix +# +# Giving a prefix to schemas could help when configuring database privileges. If we set the prefix to `neuro_`, every schema created with the current workflow will start with `neuro_`, e.g. `neuro_lab`, `neuro_subject`, etc. +# +# The prefix could be configurated to your username in `dj.config` as follows. +# + +username_as_prefix = dj.config["database.user"] + "_" +dj.config["custom"] = {"database.prefix": username_as_prefix} + +# ## Save configuration +# +# We could save this as a file, either as a local json file, or a global file. Local configuration file is saved as `dj_local_conf.json` in current directory, which is great for project-specific settings. +# +# For first-time users, we recommend saving globally. This will create a hidden configuration file in your root directory, which will be loaded whenever there is no local version to override it. +# + +# dj.config.save_local() +dj.config.save_global() + + +# ## Next Step +# +# After the configuration, we will be able to run through the workflow with the [02-workflow-structure](02-workflow-structure-optional.ipynb) notebook. +# diff --git a/notebooks/py_scripts/02-workflow-structure-optional.py b/notebooks/py_scripts/02-workflow-structure-optional.py new file mode 100644 index 0000000..9fb2887 --- /dev/null +++ b/notebooks/py_scripts/02-workflow-structure-optional.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: light +# format_version: '1.5' +# jupytext_version: 1.14.1 +# kernelspec: +# display_name: Python 3.9.13 ('ele') +# language: python +# name: python3 +# --- + +# # Introduction to the workflow structure +# +# This notebook gives a brief overview of the workflow structure and introduces some useful DataJoint tools to facilitate the exploration. +# +# - DataJoint needs to be pre-configured before running this notebook, if you haven't set up the configuration, refer to notebook [01-configure](01-configure.ipynb). +# +# - If you are familar with DataJoint and the workflow structure, proceed to the next notebook [03-process](03-process.ipynb) directly to run the workflow. +# +# - For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/) +# +# To load the local configuration, we will change the directory to the package root. +# + +# + +import os + +if os.path.basename(os.getcwd()) == "notebooks": + os.chdir("..") + +# - + +# ## Schemas and tables +# +# The current workflow is composed of multiple database schemas, each of them corresponds to a module within `workflow_optogenetics.pipeline` +# + +import datajoint as dj + +from workflow_optogenetics.pipeline import Device, lab, opto, session, subject, surgery + +# Each module contains a schema object that enables interaction with the schema in the database. +# + +# + Each module imported above corresponds to one schema inside the database. For example, `ephys` corresponds to `neuro_ephys` schema in the database. +opto.schema + +# - + +# The table classes in the module corresponds to a table in the schema in the database. +# + +# + Each datajoint table class inside the module corresponds to a table inside the schema. For example, the class `ephys.EphysRecording` correponds to the table `_ephys_recording` in the schema `neuro_ephys` in the database. +# preview columns and contents in a table +opto.OptoWaveform() + + +# + The first time importing the modules, empty schemas and tables will be created in the database. [markdown] +# By importing the modules for the first time, the schemas and tables will be created inside the database. +# +# Once created, importing modules will not create schemas and tables again, but the existing schemas/tables can be accessed and manipulated by the modules. +# +# + The schemas and tables will not be re-created when importing modules if they have existed. [markdown] +# ## DataJoint tools to explore schemas and tables +# +# `dj.list_schemas()`: list all schemas a user has access to in the current database +# +# + `dj.list_schemas()`: list all schemas a user could access. +dj.list_schemas() + +# - + +# `dj.Diagram()`: plot tables and dependencies in a schema. See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/). +# + +# + `dj.Diagram()`: plot tables and dependencies +# Plot diagram for all tables in a schema +dj.Diagram(opto) + + +# + `dj.Diagram()`: plot the diagram of the tables and dependencies. It could be used to plot tables in a schema or selected tables. +# Plot diagram of tables in multiple schemas. +# Adding and subtracting looks downstream and upstream respectively +dj.Diagram(surgery) + dj.Diagram(opto) - 1 + +# - + +# Plot diagram of selected tables and schemas +( + dj.Diagram(subject.Subject) + + dj.Diagram(session.Session) + + dj.Diagram(surgery.Implantation) + + dj.Diagram(opto.OptoProtocol) +) + + +# + `heading`: [markdown] +# `describe()`: show table definition with foreign key references. +# +# - +opto.OptoProtocol.describe() + + +# `heading`: show attribute definitions regardless of foreign key references +# + +# + `heading`: show table attributes regardless of foreign key references. +opto.OptoProtocol.heading + + +# + ephys [markdown] +# ## Elements in `workflow-optogenetics` +# +# [`lab`](https://datajoint.com/docs/elements/element-animal/): lab management related information, such as Lab, User, Project, Protocol, Source. +# +# - + +dj.Diagram(lab) + + +# [`subject`](https://datajoint.com/docs/elements/element-animal/): general animal metadata and surgery information + +dj.Diagram(subject) + + +# + [subject](https://github.com/datajoint/element-animal): contains the basic information of subject, including Strain, Line, Subject, Zygosity, and SubjectDeath information. +subject.Subject.describe() + +# - + +dj.Diagram(surgery) + +# [`session`](https://datajoint.com/docs/elements/element-session/): General information of experimental sessions. +# + +dj.Diagram(session) + + +# + [session](https://github.com/datajoint/element-session): experimental session information +session.Session.describe() + +# - + +# [`opto`](https://github.com/datajoint/element-optogenetics): Optogenetics stimulus and timing data +# + +# + [probe and ephys](https://github.com/datajoint/element-array-ephys): Neuropixel based probe and ephys tables +dj.Diagram(opto) + +# - + +# ## Summary and next step +# +# - This notebook introduced the overall structures of the schemas and tables in the workflow and relevant tools to explore the schema structure and table definitions. +# +# - In the next notebook [03-process](03-process.ipynb), we will introduce the detailed steps to run through `workflow-optogenetics`. +# diff --git a/notebooks/py_scripts/03-process.py b/notebooks/py_scripts/03-process.py new file mode 100644 index 0000000..23e7055 --- /dev/null +++ b/notebooks/py_scripts/03-process.py @@ -0,0 +1,287 @@ +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: light +# format_version: '1.5' +# jupytext_version: 1.14.1 +# kernelspec: +# display_name: Python 3.9.13 ('ele') +# language: python +# name: python3 +# --- + +# # Interactively run workflow optogenetics +# +# - This notebook walks you through the steps in detail to run the `workflow-optogenetics`. +# +# - If you haven't configured the paths, refer to [01-configure](01-configure.ipynb). +# +# - To overview the schema structures, refer to [02-workflow-structure](02-workflow-structure.ipynb). +# +# - For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/). +# + +# Let's change the directory to the package root directory to load the local configuration (`dj_local_conf.json`). +# + +# + +import os + +if os.path.basename(os.getcwd()) == "notebooks": + os.chdir("..") + +# - + +# ## `pipeline.py` +# +# This script `activates` the DataJoint Elements and declares other required tables. +# + +import datajoint as dj + +from workflow_optogenetics.pipeline import Device, lab, opto, session, subject, surgery + +# ## Schema diagrams +# +# See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/). +# + +( + dj.Diagram(subject.Subject) + + dj.Diagram(session.Session) + + dj.Diagram(surgery.Implantation) + + dj.Diagram(opto) +) + + +# ## Inserting data +# + +# ### `lab` schema +# +# `pipeline.py` adds a Device table to the `lab` schema. This table, like other `Lookup` tables, has default contents, but we can always add more. +# + +Device.insert1( + dict( + device="OPTG_8", + modality="Optogenetics", + description="8 channel pulse sequence device", + ) +) + + +lab.User.insert1( + dict(user="User1") +) # For the surgeon attribute in surgery.Implantation + + +# ### `subject` schema +# + +subject.Subject.heading + + +subject.Subject.insert1( + dict( + subject="subject3", + sex="F", + subject_birth_date="2020-03-03", + subject_description="Optogenetic pilot subject.", + ) +) + + +# In order to conduct optogenetic stimulation, our subject must have an implant in the target brain region. Again, some `Lookup` tables have useful default content. +# + +surgery.CoordinateReference() + + +surgery.Hemisphere() + + +# + +surgery.BrainRegion.insert1( + dict(region_acronym="dHP", region_name="Dorsal Hippocampus") +) +surgery.Implantation.insert1( + dict( + subject="subject3", + implant_date="2022-04-01 12:13:14", + implant_type="opto", + target_region="dHP", + target_hemisphere="left", + surgeon="user1", + ) +) + +surgery.Implantation.Coordinate.insert1( + dict( + subject="subject3", + implant_date="2022-04-01 12:13:14", + implant_type="opto", + target_region="dHP", + target_hemisphere="left", + ap="-7.9", # anterior-posterior distance in mm + ap_ref="bregma", + ml="-1.8", # medial axis distance in mm + ml_ref="bregma", + dv="5", # dorso-ventral axis distance in mm + dv_ref="skull_surface", + theta="11.5", # degree rotation about ml-axis [0, 180] wrt z + phi="0", # degree rotation about dv-axis [0, 360] wrt x + beta=None, # degree rotation about shank [-180, 180] wrt anterior + ) +) + +# - + +# ### Insert into `session` schema +# + +session.Session.describe() + + +session.Session.heading + + +session_key = dict( + subject="subject3", session_id="1", session_datetime="2022-04-04 12:13:14" +) +session.Session.insert1(session_key) +session.Session() + + +# ### Insert into `opto` schema +# + +# First, we'll add information to describe the stimulus, including waveform shape and and application parameters. +# + +opto.OptoWaveform.insert1( + dict( + waveform_name="square_10", + waveform_type="square", + waveform_description="Square waveform: 10%/90% on/off cycle", + ) +) +# Square is one part table of OptoWaveform. +# For sine and ramp waveforms, see the corresponding tables +opto.OptoWaveform.Square.insert1( + dict(waveform_name="square_10", on_proportion=0.10, off_proportion=0.90) +) + + +opto.OptoStimParams.insert1( + dict( + opto_params_id=1, + waveform_name="square_10", + wavelength=470, + light_intensity=10.2, + frequency=1, + duration=241, + ) +) + + +# Next, we'll describe the session in which these parameters were used with `OptoProtocol` +# + +opto.OptoProtocol.insert1( + dict( + subject="subject3", + session_id="1", + protocol_id="1", + opto_params_id="1", + implant_date="2022-04-01 12:13:14", + implant_type="opto", + target_region="dHP", + target_hemisphere="left", + device="OPTG_4", + ) +) + + +# We can describe the timing of these stimulations in `OptoEvent`. +# + +opto.OptoEvent.insert( + [ + dict( + subject="subject3", + session_id=1, + protocol_id=1, + stim_start_time=241, + stim_end_time=482, + ), + dict( + subject="subject3", + session_id=1, + protocol_id=1, + stim_start_time=482, + stim_end_time=723, + ), + ] +) + + +# To store more experimental timing information, see documentation for [Element Event](https://datajoint.com/docs/elements/element-event/). +# + +# ## Automating inserts +# +# This workflow provides functions for ingesting this information from csv files in `ingest.py`. +# +# - `ingest_subjects`: subject.Subject +# - `ingest_sessions`: session.Session +# - `ingest_events`: Element Event schemas +# - `ingest_opto`: surgery and opto schemas +# +# For more information on each, see the docstring. +# + +# + +from workflow_optogenetics.ingest import ingest_subjects + +help(ingest_subjects) + +# - + +# By default, these functions pull from files in the `user_files` directory. We can run each of these in succession with the default parameters with `ingest_all`. +# + +# + +from workflow_optogenetics.ingest import ingest_all + +ingest_all() + +# - + +# ## Events +# +# The above `ingest_all()` also added behavioral events we can example in conjunction with optogenetic events. For convenience, these stimulation events are also reflected in the Block design of Element Event. +# + +# + +from workflow_optogenetics.pipeline import event, trial + +events_by_block = ( + trial.BlockTrial * trial.TrialEvent * trial.Block.Attribute + & "attribute_name='stimulation'" +) +events_by_block +# - + +# We can readily compare the count of events or event types across 'on' and 'off' stimulation conditions. + +events_by_block & "attribute_value='on'" + +events_by_block & "attribute_value='off'" + +# ## Next Steps +# +# Interested in using Element Optogenetics for your own project? Reach out to the DataJoint team via [email](mailto:support@datajoint.com) or [Slack](https://datajoint.slack.com). +# diff --git a/notebooks/py_scripts/06-drop-optional.py b/notebooks/py_scripts/06-drop-optional.py new file mode 100644 index 0000000..6894599 --- /dev/null +++ b/notebooks/py_scripts/06-drop-optional.py @@ -0,0 +1,51 @@ +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: light +# format_version: '1.5' +# jupytext_version: 1.14.4 +# kernelspec: +# display_name: ele +# language: python +# name: python3 +# --- + +# # Drop schemas +# +# - Schemas are not typically dropped in a production workflow with real data in it. +# - At the developmental phase, it might be required for the table redesign. +# - When dropping all schemas is needed, the following is the dependency order. +# + +# Change into the parent directory to find the `dj_local_conf.json` file. +# + +# + +import os + +if os.path.basename(os.getcwd()) == "notebooks": + os.chdir("..") +# - + +from workflow_optogenetics.pipeline import ( + Device, + event, + lab, + opto, + session, + subject, + surgery, + trial, +) + +# + +# trial.schema.drop() +# opto.schema.drop() +# Device.drop_quick() +# surgery.schema.drop() +# event.schema.drop() +# session.schema.drop() +# subject.schema.drop() +# lab.schema.drop() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..99b43e4 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,9 @@ +datajoint>=0.13.0 +element-animal>=0.1.2 +element-event>=0.1.2 +element-interface>=0.3.0 +element-lab>=0.1.1 +element-optogenetics>=0.1.0 +element-session>=0.1.2 +ipykernel>=6.0.1 +jupytext>=1.13.7 diff --git a/requirements_dev.txt b/requirements_dev.txt new file mode 100644 index 0000000..9955dec --- /dev/null +++ b/requirements_dev.txt @@ -0,0 +1,2 @@ +pytest +pytest-cov diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..707c52d --- /dev/null +++ b/setup.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +from os import path + +from setuptools import find_packages, setup + +pkg_name = "workflow_optogenetics" +here = path.abspath(path.dirname(__file__)) + +long_description = """" +# Workflow for optogenetics research. + +Build a complete imaging workflow using the DataJoint Elements ++ [element-lab](https://github.com/datajoint/element-lab) ++ [element-animal](https://github.com/datajoint/element-animal) ++ [element-session](https://github.com/datajoint/element-session) ++ [element-event](https://github.com/datajoint/element-event) ++ [element-optogenetics](https://github.com/datajoint/element-optogenetics) +""" + +with open(path.join(here, "requirements.txt")) as f: + requirements = f.read().splitlines() + +with open(path.join(here, pkg_name, "version.py")) as f: + exec(f.read()) + +setup( + name="workflow-optogenetics", + version=__version__, # noqa: F821 + description="Optogenetics workflow using the DataJoint elements", + long_description=long_description, + long_description_content_type="text/markdown", + author="DataJoint", + author_email="info@datajoint.com", + license="MIT", + url="https://github.com/datajoint/workflow-optogenetics", + keywords="neuroscience datajoint optogenetics", + packages=find_packages(exclude=["contrib", "docs", "tests*"]), + install_requires=requirements, +) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..bc2505c --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,5 @@ +""" +run all: pytest tests/ +run one: pytest --pdb tests/tests_name.py -k function_name +options: pytest --dj-verbose True --dj-teardown True --dj-datadir './tests/user_data' +""" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..4a61fc5 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,268 @@ +import logging +import os +import sys +from contextlib import nullcontext +from pathlib import Path + +import datajoint as dj +import pytest +from element_interface.utils import ingest_csv_to_table + +from workflow_optogenetics.ingest import ( + ingest_all, + ingest_events, + ingest_opto, + ingest_sessions, + ingest_subjects, +) +from workflow_optogenetics.paths import get_opto_root_data_dir + +__all__ = [ + "ingest_all", + "ingest_events", + "ingest_opto", + "ingest_subjects", + "ingest_sessions", + "get_opto_root_data_dir", +] + +# ---------------------- CONSTANTS --------------------- + +logger = logging.getLogger("datajoint") + + +def pytest_addoption(parser): + """ + Permit constants when calling pytest at commandline e.g., pytest --dj-verbose False + + Parameters + ---------- + --dj-verbose (bool): Default True. Pass print statements from Elements. + --dj-teardown (bool): Default True. Delete pipeline on close. + --dj-datadir (str): Default ./tests/user_data. Relative path of test CSV data. + """ + parser.addoption( + "--dj-verbose", + action="store", + default="True", + help="Verbose for dj items: True or False", + choices=("True", "False"), + ) + parser.addoption( + "--dj-teardown", + action="store", + default="True", + help="Verbose for dj items: True or False", + choices=("True", "False"), + ) + parser.addoption( + "--dj-datadir", + action="store", + default="./tests/user_data", + help="Relative path for saving tests data", + ) + + +@pytest.fixture(scope="session") +def setup(request): + """Take passed commandline variables, set as global""" + global verbose, _tear_down, test_user_data_dir, verbose_context + + verbose = str_to_bool(request.config.getoption("--dj-verbose")) + _tear_down = str_to_bool(request.config.getoption("--dj-teardown")) + test_user_data_dir = Path(request.config.getoption("--dj-datadir")) + test_user_data_dir.mkdir(exist_ok=True) + + verbose_context = nullcontext() if verbose else QuietStdOut() + + yield verbose_context, verbose + + +# ------------------ GENERAL FUCNTION ------------------ + + +def str_to_bool(value) -> bool: + """Return whether the provided string represents true. Otherwise false. + + Args: + value (any): Any input + + Returns: + bool (bool): True if value in ("y", "yes", "t", "true", "on", "1") + """ + # Due to distutils equivalent depreciation in 3.10 + # Adopted from github.com/PostHog/posthog/blob/master/posthog/utils.py + if not value: + return False + return str(value).lower() in ("y", "yes", "t", "true", "on", "1") + + +def write_csv(path, content): + """General function for writing strings to lines in CSV + + Args: + path: pathlib PosixPath + content: list of strings, each as row of CSV + """ + with open(path, "w") as f: + for line in content: + f.write(line + "\n") + + +class QuietStdOut: + """If verbose set to false, used to quiet tear_down table.delete prints""" + + def __enter__(self): + logger.setLevel("WARNING") + self._original_stdout = sys.stdout + sys.stdout = open(os.devnull, "w") + + def __exit__(self, exc_type, exc_val, exc_tb): + logger.setLevel("INFO") + sys.stdout.close() + sys.stdout = self._original_stdout + + +# ------------------- FIXTURES ------------------- + + +@pytest.fixture(autouse=True, scope="session") +def dj_config(): + """If dj_local_config exists, load""" + if Path("./dj_local_conf.json").exists(): + dj.config.load("./dj_local_conf.json") + + dj.config.update( + { + "safemode": False, + "database.host": os.environ.get("DJ_HOST") or dj.config["database.host"], + "database.password": os.environ.get("DJ_PASS") + or dj.config["database.password"], + "database.user": os.environ.get("DJ_USER") or dj.config["database.user"], + "custom": { + "database.prefix": os.environ.get("DATABASE_PREFIX") + or dj.config["custom"]["database.prefix"], + "dlc_root_data_dir": os.environ.get("DLC_ROOT_DATA_DIR") + or dj.config["custom"]["dlc_root_data_dir"], + }, + } + ) + + return + + +@pytest.fixture(scope="session") +def pipeline(setup): + """Loads workflow_optogenetics.pipeline lab, session, subject, dlc""" + with verbose_context: + from workflow_optogenetics import pipeline + + yield { + "lab": pipeline.lab, + "subject": pipeline.subject, + "surgery": pipeline.surgery, + "session": pipeline.session, + "opto": pipeline.opto, + "Device": pipeline.Device, + } + if _tear_down: + with verbose_context: + pipeline.opto.OptoWaveform.delete() + pipeline.surgery.BrainRegion.delete() + pipeline.subject.Subject.delete() + pipeline.session.Session.delete() + pipeline.lab.User.delete() + + +@pytest.fixture(scope="session") +def ingest_csvs(setup, pipeline): + """For each input, generates csv in test_user_data_dir and ingests in schema""" + # CSV as list of 3: filename, relevant tables, content + all_csvs = { + "subjects.csv": { + "tables": [pipeline["subject"].Subject()], + "content": [ + "subject,sex,subject_birth_date,subject_description", + "subject3,F,2022-03-03,Optogenetic pilot subject", + ], + }, + "sessions.csv": { + "tables": [pipeline["session"].Session()], + "content": [ + "subject,session_dir,session_id,session_datetime", + "subject3,subject3/opto_session1/,1,2022-04-04 12:13:14", + ], + }, + "opto_waveforms.csv": { + "tables": [ + pipeline["opto"].OptoWaveform(), + pipeline["opto"].OptoWaveform.Square(), + pipeline["opto"].OptoStimParams(), + ], + "content": [ + "waveform_type,waveform_name,waveform_description,on_proportion," + + "off_proportion,opto_params_id,wavelength,light_intensity,frequency," + + "duration", + "square,square_10,Square waveform with 10-90 on-off cycle,.10," + + ".90,1,470,10.2,1,241", + ], + }, + "opto_surgeries.csv": { + "tables": [ + pipeline["surgery"].CoordinateReference(), + pipeline["surgery"].BrainRegion(), + pipeline["lab"].User(), + pipeline["surgery"].Implantation(), + pipeline["surgery"].Implantation.Coordinate(), + ], + "content": [ + "subject,implant_date,reference,region_acronym,region_name,hemisphere," + + "implant_type,ap,ap_ref,ml,ml_ref,dv,dv_ref,theta,phi,user,surgeon," + + "target_region,target_hemisphere", + "subject3,2022-04-01 12:13:14,bregma,dHP,Dorsal Hippocampus,left," + + "opto,-7.9,bregma,-1.8,bregma,5,skull_surface,11.5,0,user1,user1," + + "dHP,left", + ], + }, + "opto_sessions.csv": { + "tables": [ + pipeline["opto"].OptoProtocol(), + ], + "content": [ + "subject,session_id,protocol_id,opto_params_id,implant_date," + + "implant_type,target_region,target_hemisphere", + "subject3,1,1,1,2022-04-01 12:13:14,opto,dHP,left", + ], + }, + "opto_events.csv": { + "tables": [ + pipeline["opto"].OptoEvent(), + ], + "content": [ + "subject,session_id,protocol_id,stim_start_time,stim_end_time", + "subject3,1,1,241,482", + "subject3,1,1,482,723", + ], + }, + } + # If data in last table, presume didn't tear down last time, skip insert + if len(pipeline["opto"].OptoEvent()) == 0: + for csv_filename, csv_dict in all_csvs.items(): + csv_path = test_user_data_dir / csv_filename # add prefix for rel path + write_csv(csv_path, csv_dict["content"]) # write content at rel path + # repeat csv path n times as list to match n tables + csv_path_as_list = [str(csv_path)] * len(csv_dict["tables"]) + ingest_csv_to_table( # insert csv content into each of n tables + csv_path_as_list, + csv_dict["tables"], + skip_duplicates=True, + verbose=verbose, + ) + + yield + + if _tear_down: + with verbose_context: + for csv_info in all_csvs: + csv_path = test_user_data_dir / csv_info[1] + csv_path.unlink() diff --git a/tests/test_ingest.py b/tests/test_ingest.py new file mode 100644 index 0000000..e52eb95 --- /dev/null +++ b/tests/test_ingest.py @@ -0,0 +1,26 @@ +"""Tests ingestion into schema tables: Lab, Subject, Session + 1. Assert length of populating data conftest + 2. Assert exact matches of inserted data fore key tables +""" + + +def test_ingest(pipeline, ingest_csvs): + """Check successful ingestion of csv data""" + import datetime + + subject = pipeline["subject"] + session = pipeline["session"] + surgery = pipeline["surgery"] + opto = pipeline["opto"] + + table_lengths = [ + (subject.Subject(), 1, "subject3"), + (session.Session(), 1, datetime.datetime(2022, 4, 4, 12, 13, 14)), + (surgery.Implantation.Coordinate(), 1, 11.5), + (opto.OptoStimParams(), 1, "square_10"), + (opto.OptoEvent(), 2, 482), + ] + + for t in table_lengths: + assert len(t[0]) == t[1], f"Check length of {t[0].full_table_name}" + assert t[2] in t[0].fetch()[0], f"Check contents of {t[0].full_table_name}" diff --git a/tests/test_pipeline_generation.py b/tests/test_pipeline_generation.py new file mode 100644 index 0000000..7860276 --- /dev/null +++ b/tests/test_pipeline_generation.py @@ -0,0 +1,28 @@ +def test_upstream_pipeline(pipeline): + session = pipeline["session"] + surgery = pipeline["surgery"] + subject = pipeline["subject"] + + # test connection Subject->Session + assert subject.Subject.full_table_name == session.Session.parents()[0] + assert subject.Subject.full_table_name in surgery.Implantation.parents() + + +def test_opto_pipeline(pipeline): + session = pipeline["session"] + surgery = pipeline["surgery"] + opto = pipeline["opto"] + Device = pipeline["Device"] + + # test connection opto.VideoRec -> schema children + opto_parent_links = opto.OptoProtocol.parents() + opto_parent_list = [ + session.Session, + opto.OptoStimParams, + surgery.Implantation, + Device, + ] + for parent in opto_parent_list: + assert ( + parent.full_table_name in opto_parent_links + ), f"opto.OptoProtocol.parents() did not include {parent.full_table_name}" diff --git a/user_data/behavior_recordings.csv b/user_data/behavior_recordings.csv new file mode 100644 index 0000000..9fc66db --- /dev/null +++ b/user_data/behavior_recordings.csv @@ -0,0 +1,6 @@ +subject,session_id,filepath +subject3,1,./user_data/blocks.csv +subject3,1,./user_data/trials.csv +subject3,1,./user_data/events.csv +subject3,1,./user_data/opto_session.csv +subject3,1,./user_data/opto_surgery.csv diff --git a/user_data/blocks.csv b/user_data/blocks.csv new file mode 100644 index 0000000..755e6c5 --- /dev/null +++ b/user_data/blocks.csv @@ -0,0 +1,9 @@ +subject,session_id,block_id,block_start_time,block_stop_time,attribute_name,attribute_value +subject3,1,1,0,241,stimulation,off +subject3,1,2,241,482,stimulation,on +subject3,1,3,482,723,stimulation,on +subject3,1,4,723,964,stimulation,off +subject3,1,1,0,241,environment,light +subject3,1,2,241,482,environment,dark +subject3,1,3,482,723,environment,light +subject3,1,4,723,964,environment,dark diff --git a/user_data/events.csv b/user_data/events.csv new file mode 100644 index 0000000..457abfc --- /dev/null +++ b/user_data/events.csv @@ -0,0 +1,38 @@ +subject,session_id,trial_id,event_id,event_start_time,event_type +subject3,1,1,1,4.864,center +subject3,1,1,2,10.192,center +subject3,1,1,3,17.865,center +subject3,1,3,1,49.536,left +subject3,1,3,2,59.128,center +subject3,1,3,3,64.453,left +subject3,1,4,1,73.844,right +subject3,1,4,2,79.506,right +subject3,1,4,3,90.447,center +subject3,1,5,1,101.14,left +subject3,1,5,2,108.499,right +subject3,1,9,1,192.237,right +subject3,1,10,1,216.542,center +subject3,1,22,1,509.98,center +subject3,1,23,1,534.82,center +subject3,1,24,1,555.033,center +subject3,1,26,1,605.523,center +subject3,1,29,1,673.068,center +subject3,1,31,1,725.153,left +subject3,1,33,1,771.223,center +subject3,1,33,2,778.669,left +subject3,1,33,3,789.797,left +subject3,1,34,1,798.334,center +subject3,1,34,2,804.33,left +subject3,1,35,1,818.922,left +subject3,1,35,2,831.301,left +subject3,1,35,3,836.522,center +subject3,1,36,1,847.933,center +subject3,1,36,2,852.572,left +subject3,1,37,1,870.121,center +subject3,1,37,2,877.152,right +subject3,1,37,3,881.91,right +subject3,1,39,1,917.759,center +subject3,1,39,2,923.011,center +subject3,1,40,1,940.588,center +subject3,1,40,2,944.6,right +subject3,1,40,3,954.637,left diff --git a/user_data/opto_events.csv b/user_data/opto_events.csv new file mode 100644 index 0000000..be34d4e --- /dev/null +++ b/user_data/opto_events.csv @@ -0,0 +1,3 @@ +subject,session_id,protocol_id,stim_start_time,stim_end_time +subject3,1,1,241,482 +subject3,1,1,482,723 diff --git a/user_data/opto_sessions.csv b/user_data/opto_sessions.csv new file mode 100644 index 0000000..b44ceb6 --- /dev/null +++ b/user_data/opto_sessions.csv @@ -0,0 +1,2 @@ +subject,session_id,protocol_id,opto_params_id,implant_date,implant_type,target_region,target_hemisphere +subject3,1,1,1,2022-04-01 12:13:14,opto,dHP,left diff --git a/user_data/opto_surgeries.csv b/user_data/opto_surgeries.csv new file mode 100644 index 0000000..f61f3b3 --- /dev/null +++ b/user_data/opto_surgeries.csv @@ -0,0 +1,2 @@ +subject,implant_date,reference,region_acronym,region_name,hemisphere,implant_type,ap,ap_ref,ml,ml_ref,dv,dv_ref,theta,phi,user,surgeon,target_region,target_hemisphere +subject3,2022-04-01 12:13:14,bregma,dHP,Dorsal Hippocampus,left,opto,-7.9,bregma,-1.8,bregma,5,skull_surface,11.5,0,user1,user1,dHP,left diff --git a/user_data/opto_waveforms.csv b/user_data/opto_waveforms.csv new file mode 100644 index 0000000..077ece6 --- /dev/null +++ b/user_data/opto_waveforms.csv @@ -0,0 +1,2 @@ +waveform_type,waveform_name,waveform_description,on_proportion,off_proportion,opto_params_id,wavelength,light_intensity,frequency,duration +square,square_10,Square waveform with 10-90 on-off cycle,.10,.90,1,470,10.2,1,241 diff --git a/user_data/sessions.csv b/user_data/sessions.csv new file mode 100644 index 0000000..8db7b37 --- /dev/null +++ b/user_data/sessions.csv @@ -0,0 +1,2 @@ +subject,session_dir,session_id,session_datetime +subject3,subject3/opto_session1/,1,2022-04-04 12:13:14 diff --git a/user_data/subjects.csv b/user_data/subjects.csv new file mode 100644 index 0000000..47510f9 --- /dev/null +++ b/user_data/subjects.csv @@ -0,0 +1,2 @@ +subject,sex,subject_birth_date,subject_description +subject3,F,2022-03-03,Optogenetic pilot subject diff --git a/user_data/trials.csv b/user_data/trials.csv new file mode 100644 index 0000000..df14974 --- /dev/null +++ b/user_data/trials.csv @@ -0,0 +1,41 @@ +subject,session_id,block_id,trial_id,trial_start_time,trial_stop_time,trial_type,attribute_name,attribute_value +subject3,1,1,1,0.393,20.393,stim,lumen,887 +subject3,1,1,2,24.187,44.187,ctrl,lumen,994 +subject3,1,1,3,47.747,67.747,stim,lumen,887 +subject3,1,1,4,71.615,91.615,stim,lumen,966 +subject3,1,1,5,95.56,115.56,stim,lumen,559 +subject3,1,1,6,119.373,139.373,ctrl,lumen,555 +subject3,1,1,7,143.113,163.113,stim,lumen,849 +subject3,1,1,8,166.702,186.702,ctrl,lumen,593 +subject3,1,1,9,190.539,210.539,ctrl,lumen,548 +subject3,1,1,10,214.382,234.382,ctrl,lumen,974 +subject3,1,2,11,241.255,261.255,stim,lumen,0 +subject3,1,2,12,264.76,284.76,ctrl,lumen,0 +subject3,1,2,13,288.591,308.591,ctrl,lumen,0 +subject3,1,2,14,312.1,332.1,stim,lumen,0 +subject3,1,2,15,336.024,356.024,ctrl,lumen,0 +subject3,1,2,16,359.8,379.8,stim,lumen,0 +subject3,1,2,17,383.357,403.357,ctrl,lumen,0 +subject3,1,2,18,407.129,427.129,ctrl,lumen,0 +subject3,1,2,19,431.011,451.011,ctrl,lumen,0 +subject3,1,2,20,454.87,474.87,stim,lumen,0 +subject3,1,3,21,482.043,502.043,ctrl,lumen,775 +subject3,1,3,22,505.609,525.609,stim,lumen,887 +subject3,1,3,23,529.228,549.228,ctrl,lumen,972 +subject3,1,3,24,552.769,572.769,stim,lumen,963 +subject3,1,3,25,576.278,596.278,ctrl,lumen,682 +subject3,1,3,26,600.029,620.029,stim,lumen,746 +subject3,1,3,27,623.997,643.997,stim,lumen,793 +subject3,1,3,28,647.847,667.847,stim,lumen,995 +subject3,1,3,29,671.532,691.532,ctrl,lumen,866 +subject3,1,3,30,695.186,715.186,ctrl,lumen,501 +subject3,1,4,31,723.47,743.47,ctrl,lumen,0 +subject3,1,4,32,747.186,767.186,stim,lumen,0 +subject3,1,4,33,771.171,791.171,ctrl,lumen,0 +subject3,1,4,34,794.755,814.755,stim,lumen,0 +subject3,1,4,35,818.322,838.322,stim,lumen,0 +subject3,1,4,36,842.046,862.046,ctrl,lumen,0 +subject3,1,4,37,865.999,885.999,stim,lumen,0 +subject3,1,4,38,889.587,909.587,stim,lumen,0 +subject3,1,4,39,913.564,933.564,stim,lumen,0 +subject3,1,4,40,937.31,957.31,stim,lumen,0 diff --git a/workflow_optogenetics/__init__.py b/workflow_optogenetics/__init__.py new file mode 100644 index 0000000..111bd2f --- /dev/null +++ b/workflow_optogenetics/__init__.py @@ -0,0 +1,6 @@ +import datajoint as dj + +if "custom" not in dj.config: + dj.config["custom"] = {} + +db_prefix = dj.config["custom"].get("database.prefix", "") diff --git a/workflow_optogenetics/ingest.py b/workflow_optogenetics/ingest.py new file mode 100644 index 0000000..a4de7f2 --- /dev/null +++ b/workflow_optogenetics/ingest.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python +from element_interface.utils import ingest_csv_to_table + +from .pipeline import event, lab, opto, session, subject, surgery, trial + +___all__ = [ + "subject", + "surgery", + "session", + "trial", + "event", + "opto", + "ingest_csv_to_table", + "ingest_subjects", + "ingest_sessions", + "ingest_events", + "ingest_opto", + "ingest_all", +] + + +def ingest_subjects( + subject_csv_path: str = "./user_data/subjects.csv", + skip_duplicates: bool = True, + verbose: bool = True, +): + """Ingest subjects listed in the subject column of ./user_data/subjects.csv + + Args: + subject_csv_path (str, optional): Relative path to subject csv. + Defaults to "./user_data/subjects.csv". + skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert. + Defaults to True. + verbose (bool, optional): Provides insertion info to StdOut. Defaults to True. + """ + csvs = [subject_csv_path] + tables = [subject.Subject()] + + ingest_csv_to_table(csvs, tables, skip_duplicates=skip_duplicates, verbose=verbose) + + +def ingest_sessions( + session_csv_path: str = "./user_data/sessions.csv", + skip_duplicates: bool = True, + verbose: bool = True, +): + """Ingest sessions from CSV. Defaults to ./user_data/subjects.csv + + Args: + session_csv_path (str, optional): Relative path to sessions CSV. + Defaults to "./user_data/sessions.csv". + skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert. + Defaults to True. + verbose (bool, optional): Provides insertion info to StdOut. Defaults to True. + """ + + csvs = [session_csv_path] + tables = [session.Session()] + + ingest_csv_to_table(csvs, tables, skip_duplicates=skip_duplicates, verbose=verbose) + + +def ingest_events( + recording_csv_path: str = "./user_data/behavior_recordings.csv", + block_csv_path: str = "./user_data/blocks.csv", + trial_csv_path: str = "./user_data/trials.csv", + event_csv_path: str = "./user_data/events.csv", + skip_duplicates: bool = True, + verbose: bool = True, +): + """Ingest trial structure: blocks, trials, events + + A recording is one or more blocks (i.e., phases of trials), with trials (repeated + units). Events are optionally-instantaneous occurrences within trial. This ingestion + function is duplicated across multiple DataJoint workflow repositories. + + Args: + recording_csv_path (str, optional): Relative path to recording CSV. + Defaults to "./user_data/behavior_recordings.csv". + block_csv_path (str, optional): Relative path to block CSV. + Defaults to "./user_data/blocks.csv". + trial_csv_path (str, optional): Relative path to trial CSV. + Defaults to "./user_data/trials.csv". + event_csv_path (str, optional): Relative path to event CSV. + Defaults to "./user_data/events.csv". + skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert. + Defaults to True. + verbose (bool, optional): Provides insertion info to StdOut. Defaults to True. + """ + csvs = [ + recording_csv_path, + recording_csv_path, + block_csv_path, + block_csv_path, + trial_csv_path, + trial_csv_path, + trial_csv_path, + trial_csv_path, + event_csv_path, + event_csv_path, + event_csv_path, + ] + tables = [ + event.BehaviorRecording(), + event.BehaviorRecording.File(), + trial.Block(), + trial.Block.Attribute(), + trial.TrialType(), + trial.Trial(), + trial.Trial.Attribute(), + trial.BlockTrial(), + event.EventType(), + event.Event(), + trial.TrialEvent(), + ] + + # Allow direct insert required bc element-trial has Imported tables + ingest_csv_to_table( + csvs, + tables, + skip_duplicates=skip_duplicates, + verbose=verbose, + allow_direct_insert=True, + ) + + +def ingest_opto( + opto_surgery_csv_path: str = "./user_data/opto_surgeries.csv", + opto_session_csv_path: str = "./user_data/opto_sessions.csv", + opto_events_csv_path: str = "./user_data/opto_events.csv", + waveform_csv_path: str = "./user_data/opto_waveforms.csv", + skip_duplicates: bool = True, + verbose: bool = True, +): + """Ingest optogenetic stimulation and protocol information. + + Args: + opto_surgery_csv_path (str, optional): Relative path to implantation info CSV. + Defaults to "./user_data/opto_surgeries.csv". + opto_session_csv_path (str, optional): Relative path to CSV with opto session + information. Defaults to "./user_data/opto_sessions.csv". + opto_events_csv_path (str, optional): Relative path to opto events CSV. + Defaults to "./user_data/opto_events.csv". + waveform_csv_path (str, optional): Relative path to waveform info CSV. + Defaults to "./user_data/opto_waveforms.csv". + skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert. + Defaults to True. + verbose (bool, optional): Provides insertion info to StdOut. Defaults to True. + """ + csvs = [ + waveform_csv_path, # 1 + waveform_csv_path, # 2 + waveform_csv_path, # 3 + opto_surgery_csv_path, # 4 + opto_surgery_csv_path, # 5 + opto_surgery_csv_path, # 6 + opto_surgery_csv_path, # 7 + opto_surgery_csv_path, # 8 + opto_session_csv_path, # 9 + opto_events_csv_path, # 10 + ] + tables = [ + opto.OptoWaveform(), # 1 + opto.OptoWaveform.Square(), # 2 + opto.OptoStimParams(), # 3 + surgery.CoordinateReference(), # 4 + surgery.BrainRegion(), # 5 + lab.User(), # 6 + surgery.Implantation(), # 7 + surgery.Implantation.Coordinate(), # 8 + opto.OptoProtocol(), # 9 + opto.OptoEvent(), # 10 + ] + + ingest_csv_to_table(csvs, tables, skip_duplicates=skip_duplicates, verbose=verbose) + + +def ingest_all(skip_duplicates: bool = True, verbose: bool = True): + """Run all available available ingestion functions""" + ingest_subjects(skip_duplicates=skip_duplicates, verbose=verbose) + ingest_sessions(skip_duplicates=skip_duplicates, verbose=verbose) + ingest_events(skip_duplicates=skip_duplicates, verbose=verbose) + ingest_opto(skip_duplicates=skip_duplicates, verbose=verbose) + + +if __name__ == "__main__": + ingest_all() diff --git a/workflow_optogenetics/paths.py b/workflow_optogenetics/paths.py new file mode 100644 index 0000000..281fffc --- /dev/null +++ b/workflow_optogenetics/paths.py @@ -0,0 +1,8 @@ +import pathlib + +import datajoint as dj + + +def get_opto_root_data_dir(): + data_dir = dj.config.get("custom", {}).get("opto_root_data_dir", None) + return pathlib.Path(data_dir) if data_dir else None diff --git a/workflow_optogenetics/pipeline.py b/workflow_optogenetics/pipeline.py new file mode 100644 index 0000000..de2ce7d --- /dev/null +++ b/workflow_optogenetics/pipeline.py @@ -0,0 +1,48 @@ +from element_animal import subject, surgery +from element_animal.subject import Subject # Dependency for session schema +from element_animal.surgery import Implantation # Dependency for opto schema +from element_event import event, trial +from element_lab import lab +from element_lab.lab import User # Alias for session schema +from element_lab.lab import Lab, Project, Protocol, Source +from element_optogenetics import optogenetics as opto +from element_session import session_with_id as session +from element_session.session_with_id import Session + +from . import db_prefix +from .paths import get_opto_root_data_dir +from .reference import Device + +__all__ = [ + "event", + "lab", + "opto", + "session", + "subject", + "surgery", + "trial", + "Device", + "Implantation", + "Lab", + "Project", + "Protocol", + "Session", + "Source", + "Subject", + "User", + "get_opto_root_data_dir", +] + + +# Activate "lab", "subject", "surgery", "session", "event", "trial" schemas ------- + +lab.activate(db_prefix + "lab") +subject.activate(db_prefix + "subject", linking_module=__name__) +surgery.activate(db_prefix + "surgery", linking_module=__name__) +session.activate(db_prefix + "session", linking_module=__name__) + +trial.activate(db_prefix + "trial", db_prefix + "event", linking_module=__name__) + +# ------------- Activate "opto" schema ------------- + +opto.activate(db_prefix + "opto", linking_module=__name__) diff --git a/workflow_optogenetics/reference.py b/workflow_optogenetics/reference.py new file mode 100644 index 0000000..0a64e8d --- /dev/null +++ b/workflow_optogenetics/reference.py @@ -0,0 +1,26 @@ +import datajoint as dj + +from . import db_prefix + +schema = dj.Schema(db_prefix + "reference") + + +@schema +class Device(dj.Lookup): + """Table for managing lab devices. + + Attributes: + device ( varchar(32) ): Device short name. + modality ( varchar(64) ): Modality for which this device is used. + description ( varchar(256), optional ): Description of device. + """ + + definition = """ + device : varchar(32) + --- + modality : varchar(64) + description='' : varchar(256) + """ + contents = [ + ["OPTG_4", "Optogenetics", "Doric Pulse Sequence Generator"], + ] diff --git a/workflow_optogenetics/version.py b/workflow_optogenetics/version.py new file mode 100644 index 0000000..ee1b0bb --- /dev/null +++ b/workflow_optogenetics/version.py @@ -0,0 +1,5 @@ +""" +Package metadata +Update the Docker image tag in `docker-compose.yaml` to match +""" +__version__ = "0.1.0" From 2f67dcc5ec284f66b2077b9b0c13d32390e7aa40 Mon Sep 17 00:00:00 2001 From: Chris Brozdowski Date: Tue, 21 Feb 2023 09:07:07 -0600 Subject: [PATCH 02/58] Update .github/ISSUE_TEMPLATE/config.yml Co-authored-by: Kabilar Gunalan --- .github/ISSUE_TEMPLATE/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index d31fbac..b3d197d 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,5 +1,5 @@ blank_issues_enabled: false contact_links: - name: DataJoint Contribution Guideline - url: https://docs.datajoint.org/python/community/02-Contribute.html + url: https://datajoint.com/docs/community/contribute/ about: Please make sure to review the DataJoint Contribution Guidelines \ No newline at end of file From 1c57a6e5b7e756bd96c10367a1f69d1d65cd0be6 Mon Sep 17 00:00:00 2001 From: Chris Brozdowski Date: Wed, 22 Feb 2023 10:08:47 -0600 Subject: [PATCH 03/58] Apply suggestions from code review Co-authored-by: Kabilar Gunalan --- README.md | 2 +- docker/Dockerfile.test | 2 -- docker/setup.sh | 6 +----- 3 files changed, 2 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 5eb9d33..b1eb0ee 100644 --- a/README.md +++ b/README.md @@ -9,4 +9,4 @@ combined with other Elements to assemble a fully functional pipeline. Installation and usage instructions can be found at the [Element documentation](datajoint.com/docs/elements/element-optogenetics). -![element-Optogenetics diagram](https://raw.githubusercontent.com/datajoint/element-Optogenetics/main/images/diagram_opto.svg) +![diagram](https://raw.githubusercontent.com/datajoint/element-optogenetics/main/images/diagram_flowchart.svg) diff --git a/docker/Dockerfile.test b/docker/Dockerfile.test index 33951a0..82a08b3 100755 --- a/docker/Dockerfile.test +++ b/docker/Dockerfile.test @@ -9,8 +9,6 @@ RUN /entrypoint.sh echo "Installed dependencies." WORKDIR /main/workflow-optogenetics # Always get interface/djarchive -RUN pip install --no-deps "element-interface@git+https://github.com/datajoint/element-interface" -RUN pip install --no-deps "djarchive-client@git+https://github.com/datajoint/djarchive-client" # Always move local - conditional install in setup.sh COPY --chown=anaconda:anaconda ./element-lab/ /main/element-lab/ diff --git a/docker/setup.sh b/docker/setup.sh index 68ef81c..cb0d29c 100644 --- a/docker/setup.sh +++ b/docker/setup.sh @@ -1,7 +1,7 @@ #! /bin/bash export $(grep -v '^#' /main/.env | xargs) -echo "INSALL OPTION:" $INSTALL_OPTION +echo "INSTALL OPTION:" $INSTALL_OPTION cd /main/ # all local installs, mapped from host if [ "$INSTALL_OPTION" == "local-all" ]; then @@ -31,7 +31,3 @@ if [[ "$TEST_CMD" == *pytest* ]]; then pip install pytest pip install pytest-cov fi - -# additional installs for running DLC -pip install torch -pip install ffmpeg From b8e36603a13e569dba1e8dcc3d50b6252f6cf46b Mon Sep 17 00:00:00 2001 From: Chris Brozdowski Date: Wed, 22 Feb 2023 10:11:58 -0600 Subject: [PATCH 04/58] Apply suggestions from code review Co-authored-by: Kabilar Gunalan --- README.md | 2 +- docker/Dockerfile.test | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index b1eb0ee..fa6dc3d 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,6 @@ pipeline for data storage and processing with corresponding database tables that combined with other Elements to assemble a fully functional pipeline. Installation and usage instructions can be found at the -[Element documentation](datajoint.com/docs/elements/element-optogenetics). +[Element documentation](https://datajoint.com/docs/elements/element-optogenetics). ![diagram](https://raw.githubusercontent.com/datajoint/element-optogenetics/main/images/diagram_flowchart.svg) diff --git a/docker/Dockerfile.test b/docker/Dockerfile.test index 82a08b3..4f47702 100755 --- a/docker/Dockerfile.test +++ b/docker/Dockerfile.test @@ -8,7 +8,6 @@ RUN /entrypoint.sh echo "Installed dependencies." WORKDIR /main/workflow-optogenetics -# Always get interface/djarchive # Always move local - conditional install in setup.sh COPY --chown=anaconda:anaconda ./element-lab/ /main/element-lab/ From a5bda82f882be4d3fc72415ceb15cdd26e3e15d8 Mon Sep 17 00:00:00 2001 From: Chris Brozdowski Date: Wed, 22 Feb 2023 10:45:57 -0600 Subject: [PATCH 05/58] Update tests/test_pipeline_generation.py Co-authored-by: Kabilar Gunalan --- tests/test_pipeline_generation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_pipeline_generation.py b/tests/test_pipeline_generation.py index 7860276..166169d 100644 --- a/tests/test_pipeline_generation.py +++ b/tests/test_pipeline_generation.py @@ -14,7 +14,7 @@ def test_opto_pipeline(pipeline): opto = pipeline["opto"] Device = pipeline["Device"] - # test connection opto.VideoRec -> schema children + # test connection opto.OptoProtocol -> schema children opto_parent_links = opto.OptoProtocol.parents() opto_parent_list = [ session.Session, From e996a752e1fa2f6a85169e1d1b571fc1ce2772c4 Mon Sep 17 00:00:00 2001 From: Chris Brozdowski Date: Wed, 22 Feb 2023 12:38:45 -0600 Subject: [PATCH 06/58] Apply suggestions from code review Co-authored-by: Kabilar Gunalan --- docker/docker-compose.yaml | 2 +- docker/setup.sh | 2 +- tests/test_ingest.py | 4 ++-- tests/test_pipeline_generation.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 960b88a..1cdba07 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -29,7 +29,7 @@ services: dockerfile: ./workflow-optogenetics/docker/Dockerfile.test args: - GITHUB_USERNAME=${GITHUB_USERNAME} - image: workflow-optogenetics:0.0.0 + image: workflow-optogenetics:0.1.0 container_name: workflow-optogenetics environment: - DJ_HOST=db diff --git a/docker/setup.sh b/docker/setup.sh index cb0d29c..8d2d977 100644 --- a/docker/setup.sh +++ b/docker/setup.sh @@ -9,7 +9,7 @@ if [ "$INSTALL_OPTION" == "local-all" ]; then pip install -e ./element-${f} done pip install -e ./workflow-optogenetics -# all except workflow pip installed +# Install all from GitHub except for (optionally) optogenetics Element and Workflow else pip install git+https://github.com/${GITHUB_USERNAME}/element-lab.git pip install git+https://github.com/${GITHUB_USERNAME}/element-animal.git diff --git a/tests/test_ingest.py b/tests/test_ingest.py index e52eb95..afcbf93 100644 --- a/tests/test_ingest.py +++ b/tests/test_ingest.py @@ -1,6 +1,6 @@ """Tests ingestion into schema tables: Lab, Subject, Session - 1. Assert length of populating data conftest - 2. Assert exact matches of inserted data fore key tables + 1. Assert length of data populated with conftest + 2. Assert exact matches of inserted data for key tables """ diff --git a/tests/test_pipeline_generation.py b/tests/test_pipeline_generation.py index 166169d..3692f69 100644 --- a/tests/test_pipeline_generation.py +++ b/tests/test_pipeline_generation.py @@ -14,7 +14,7 @@ def test_opto_pipeline(pipeline): opto = pipeline["opto"] Device = pipeline["Device"] - # test connection opto.OptoProtocol -> schema children + # test connection opto.OptoProtocol -> parents opto_parent_links = opto.OptoProtocol.parents() opto_parent_list = [ session.Session, From 776e5799397ea41927508ba6c645814b0e5b9eff Mon Sep 17 00:00:00 2001 From: CBroz1 Date: Wed, 22 Feb 2023 15:44:13 -0600 Subject: [PATCH 07/58] Remove cell metadata titles. Spelling/Pre-commit fixes --- cspell.json => .cspell.json | 7 ++- .pre-commit-config.yaml | 2 +- notebooks/01-configure.ipynb | 6 +-- .../02-workflow-structure-optional.ipynb | 45 ++++++------------- notebooks/03-process.ipynb | 2 +- notebooks/py_scripts/01-configure.py | 13 ++---- .../02-workflow-structure-optional.py | 40 ++--------------- notebooks/py_scripts/03-process.py | 23 +--------- notebooks/py_scripts/06-drop-optional.py | 8 ++-- 9 files changed, 38 insertions(+), 108 deletions(-) rename cspell.json => .cspell.json (88%) diff --git a/cspell.json b/.cspell.json similarity index 88% rename from cspell.json rename to .cspell.json index a01173c..5b4e814 100644 --- a/cspell.json +++ b/.cspell.json @@ -4,7 +4,9 @@ "version": "0.2", // Version of the setting file. Always 0.2 "language": "en", // language - current active spelling language "enabledLanguageIds": [ - "markdown", "yaml", "python" + "markdown", + "yaml", + "python" ], // flagWords - list of words to be always considered incorrect // This is useful for offensive words and common spelling errors. @@ -16,9 +18,12 @@ "./images/*" ], "words": [ + "dorso", "FUCNTION", "ipykernel", "jupytext", + "neuro", + "OPTG", "opto", "Opto", "optogenetic", diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 03788bc..29eab3b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: # isort - repo: https://github.com/pycqa/isort - rev: 5.11.2 + rev: 5.12.0 hooks: - id: isort args: ["--profile", "black"] diff --git a/notebooks/01-configure.ipynb b/notebooks/01-configure.ipynb index ebc3f56..fb509c6 100644 --- a/notebooks/01-configure.ipynb +++ b/notebooks/01-configure.ipynb @@ -45,7 +45,7 @@ "\n", "dj.config[\"database.host\"] = \"{YOUR_HOST}\" # CodeBook users should omit this\n", "dj.config[\"database.user\"] = \"{YOUR_USERNAME}\"\n", - "dj.config[\"database.password\"] = getpass.getpass() # enter the password securily" + "dj.config[\"database.password\"] = getpass.getpass() # enter the password securely" ] }, { @@ -77,7 +77,7 @@ "\n", "Giving a prefix to schemas could help when configuring database privileges. If we set the prefix to `neuro_`, e.g. `neuro_lab`, `neuro_subject`, etc.\n", "\n", - "The prefix could be configurated to your username in `dj.config` as follows.\n" + "The prefix could be configured to your username in `dj.config` as follows.\n" ] }, { @@ -138,7 +138,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.13" + "version": "3.9.16" }, "vscode": { "interpreter": { diff --git a/notebooks/02-workflow-structure-optional.ipynb b/notebooks/02-workflow-structure-optional.ipynb index b53af38..6dc246d 100644 --- a/notebooks/02-workflow-structure-optional.ipynb +++ b/notebooks/02-workflow-structure-optional.ipynb @@ -1,6 +1,7 @@ { "cells": [ { + "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ @@ -10,7 +11,7 @@ "\n", "- DataJoint needs to be pre-configured before running this notebook, if you haven't set up the configuration, refer to notebook [01-configure](01-configure.ipynb).\n", "\n", - "- If you are familar with DataJoint and the workflow structure, proceed to the next notebook [03-process](03-process.ipynb) directly to run the workflow.\n", + "- If you are familiar with DataJoint and the workflow structure, proceed to the next notebook [03-process](03-process.ipynb) directly to run the workflow.\n", "\n", "- For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/)\n", "\n", @@ -67,9 +68,7 @@ { "cell_type": "code", "execution_count": 3, - "metadata": { - "title": "Each module imported above corresponds to one schema inside the database. For example, `ephys` corresponds to `neuro_ephys` schema in the database." - }, + "metadata": {}, "outputs": [ { "data": { @@ -96,9 +95,7 @@ { "cell_type": "code", "execution_count": 4, - "metadata": { - "title": "Each datajoint table class inside the module corresponds to a table inside the schema. For example, the class `ephys.EphysRecording` correponds to the table `_ephys_recording` in the schema `neuro_ephys` in the database." - }, + "metadata": {}, "outputs": [ { "data": { @@ -222,9 +219,7 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "title": "`dj.list_schemas()`: list all schemas a user could access." - }, + "metadata": {}, "outputs": [], "source": [ "dj.list_schemas()" @@ -240,9 +235,7 @@ { "cell_type": "code", "execution_count": 5, - "metadata": { - "title": "`dj.Diagram()`: plot tables and dependencies" - }, + "metadata": {}, "outputs": [ { "data": { @@ -264,9 +257,7 @@ { "cell_type": "code", "execution_count": 6, - "metadata": { - "title": "`dj.Diagram()`: plot the diagram of the tables and dependencies. It could be used to plot tables in a schema or selected tables." - }, + "metadata": {}, "outputs": [ { "data": { @@ -366,9 +357,7 @@ { "cell_type": "code", "execution_count": 12, - "metadata": { - "title": "`heading`: show table attributes regardless of foreign key references." - }, + "metadata": {}, "outputs": [ { "data": { @@ -397,9 +386,7 @@ }, { "cell_type": "markdown", - "metadata": { - "title": "ephys" - }, + "metadata": {}, "source": [ "## Elements in `workflow-optogenetics`\n", "\n", @@ -458,9 +445,7 @@ { "cell_type": "code", "execution_count": 16, - "metadata": { - "title": "[subject](https://github.com/datajoint/element-animal): contains the basic information of subject, including Strain, Line, Subject, Zygosity, and SubjectDeath information." - }, + "metadata": {}, "outputs": [ { "name": "stdout", @@ -542,9 +527,7 @@ { "cell_type": "code", "execution_count": 18, - "metadata": { - "title": "[session](https://github.com/datajoint/element-session): experimental session information" - }, + "metadata": {}, "outputs": [ { "name": "stdout", @@ -582,9 +565,7 @@ { "cell_type": "code", "execution_count": 19, - "metadata": { - "title": "[probe and ephys](https://github.com/datajoint/element-array-ephys): Neuropixel based probe and ephys tables" - }, + "metadata": {}, "outputs": [ { "data": { @@ -633,7 +614,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.13" + "version": "3.9.16" }, "vscode": { "interpreter": { diff --git a/notebooks/03-process.ipynb b/notebooks/03-process.ipynb index bcfa5a4..734567d 100644 --- a/notebooks/03-process.ipynb +++ b/notebooks/03-process.ipynb @@ -1779,7 +1779,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.13" + "version": "3.9.16" }, "vscode": { "interpreter": { diff --git a/notebooks/py_scripts/01-configure.py b/notebooks/py_scripts/01-configure.py index ebb0006..18281a8 100644 --- a/notebooks/py_scripts/01-configure.py +++ b/notebooks/py_scripts/01-configure.py @@ -5,7 +5,7 @@ # extension: .py # format_name: light # format_version: '1.5' -# jupytext_version: 1.14.1 +# jupytext_version: 1.14.4 # kernelspec: # display_name: Python 3.9.13 ('ele') # language: python @@ -23,12 +23,10 @@ # + import os - import datajoint as dj if os.path.basename(os.getcwd()) == "notebooks": os.chdir("..") - # - # ## Setup - Credentials @@ -41,8 +39,7 @@ dj.config["database.host"] = "{YOUR_HOST}" # CodeBook users should omit this dj.config["database.user"] = "{YOUR_USERNAME}" -dj.config["database.password"] = getpass.getpass() # enter the password securily - +dj.config["database.password"] = getpass.getpass() # enter the password securely # - # You should be able to connect to the database at this stage. @@ -50,16 +47,15 @@ dj.conn() - # ## Setup - `dj.config['custom']` # # The major component of the current workflow is Element Optogenetics (see [GitHub repository](https://github.com/datajoint/element-optogenetics) and [documentation](https://datajoint.com/docs/elements/element-optogenetics)). Many Elements require configurations in the field `custom` in `dj.config`: # # ### Database prefix # -# Giving a prefix to schemas could help when configuring database privileges. If we set the prefix to `neuro_`, every schema created with the current workflow will start with `neuro_`, e.g. `neuro_lab`, `neuro_subject`, etc. +# Giving a prefix to schemas could help when configuring database privileges. If we set the prefix to `neuro_`, e.g. `neuro_lab`, `neuro_subject`, etc. # -# The prefix could be configurated to your username in `dj.config` as follows. +# The prefix could be configured to your username in `dj.config` as follows. # username_as_prefix = dj.config["database.user"] + "_" @@ -75,7 +71,6 @@ # dj.config.save_local() dj.config.save_global() - # ## Next Step # # After the configuration, we will be able to run through the workflow with the [02-workflow-structure](02-workflow-structure-optional.ipynb) notebook. diff --git a/notebooks/py_scripts/02-workflow-structure-optional.py b/notebooks/py_scripts/02-workflow-structure-optional.py index 9fb2887..17fa1ab 100644 --- a/notebooks/py_scripts/02-workflow-structure-optional.py +++ b/notebooks/py_scripts/02-workflow-structure-optional.py @@ -6,7 +6,7 @@ # extension: .py # format_name: light # format_version: '1.5' -# jupytext_version: 1.14.1 +# jupytext_version: 1.14.4 # kernelspec: # display_name: Python 3.9.13 ('ele') # language: python @@ -19,7 +19,7 @@ # # - DataJoint needs to be pre-configured before running this notebook, if you haven't set up the configuration, refer to notebook [01-configure](01-configure.ipynb). # -# - If you are familar with DataJoint and the workflow structure, proceed to the next notebook [03-process](03-process.ipynb) directly to run the workflow. +# - If you are familiar with DataJoint and the workflow structure, proceed to the next notebook [03-process](03-process.ipynb) directly to run the workflow. # # - For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/) # @@ -31,7 +31,6 @@ if os.path.basename(os.getcwd()) == "notebooks": os.chdir("..") - # - # ## Schemas and tables @@ -40,25 +39,19 @@ # import datajoint as dj - -from workflow_optogenetics.pipeline import Device, lab, opto, session, subject, surgery +from workflow_optogenetics.pipeline import lab, subject, surgery, session, opto, Device # Each module contains a schema object that enables interaction with the schema in the database. # -# + Each module imported above corresponds to one schema inside the database. For example, `ephys` corresponds to `neuro_ephys` schema in the database. opto.schema -# - - # The table classes in the module corresponds to a table in the schema in the database. # -# + Each datajoint table class inside the module corresponds to a table inside the schema. For example, the class `ephys.EphysRecording` correponds to the table `_ephys_recording` in the schema `neuro_ephys` in the database. # preview columns and contents in a table opto.OptoWaveform() - # + The first time importing the modules, empty schemas and tables will be created in the database. [markdown] # By importing the modules for the first time, the schemas and tables will be created inside the database. # @@ -69,26 +62,19 @@ # # `dj.list_schemas()`: list all schemas a user has access to in the current database # -# + `dj.list_schemas()`: list all schemas a user could access. -dj.list_schemas() - # - +dj.list_schemas() # `dj.Diagram()`: plot tables and dependencies in a schema. See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/). # -# + `dj.Diagram()`: plot tables and dependencies # Plot diagram for all tables in a schema dj.Diagram(opto) - -# + `dj.Diagram()`: plot the diagram of the tables and dependencies. It could be used to plot tables in a schema or selected tables. # Plot diagram of tables in multiple schemas. # Adding and subtracting looks downstream and upstream respectively dj.Diagram(surgery) + dj.Diagram(opto) - 1 -# - - # Plot diagram of selected tables and schemas ( dj.Diagram(subject.Subject) @@ -97,41 +83,30 @@ + dj.Diagram(opto.OptoProtocol) ) - # + `heading`: [markdown] # `describe()`: show table definition with foreign key references. # # - opto.OptoProtocol.describe() - # `heading`: show attribute definitions regardless of foreign key references # -# + `heading`: show table attributes regardless of foreign key references. opto.OptoProtocol.heading - -# + ephys [markdown] # ## Elements in `workflow-optogenetics` # # [`lab`](https://datajoint.com/docs/elements/element-animal/): lab management related information, such as Lab, User, Project, Protocol, Source. # -# - dj.Diagram(lab) - # [`subject`](https://datajoint.com/docs/elements/element-animal/): general animal metadata and surgery information dj.Diagram(subject) - -# + [subject](https://github.com/datajoint/element-animal): contains the basic information of subject, including Strain, Line, Subject, Zygosity, and SubjectDeath information. subject.Subject.describe() -# - - dj.Diagram(surgery) # [`session`](https://datajoint.com/docs/elements/element-session/): General information of experimental sessions. @@ -139,20 +114,13 @@ dj.Diagram(session) - -# + [session](https://github.com/datajoint/element-session): experimental session information session.Session.describe() -# - - # [`opto`](https://github.com/datajoint/element-optogenetics): Optogenetics stimulus and timing data # -# + [probe and ephys](https://github.com/datajoint/element-array-ephys): Neuropixel based probe and ephys tables dj.Diagram(opto) -# - - # ## Summary and next step # # - This notebook introduced the overall structures of the schemas and tables in the workflow and relevant tools to explore the schema structure and table definitions. diff --git a/notebooks/py_scripts/03-process.py b/notebooks/py_scripts/03-process.py index 23e7055..a18d100 100644 --- a/notebooks/py_scripts/03-process.py +++ b/notebooks/py_scripts/03-process.py @@ -5,7 +5,7 @@ # extension: .py # format_name: light # format_version: '1.5' -# jupytext_version: 1.14.1 +# jupytext_version: 1.14.4 # kernelspec: # display_name: Python 3.9.13 ('ele') # language: python @@ -31,7 +31,6 @@ if os.path.basename(os.getcwd()) == "notebooks": os.chdir("..") - # - # ## `pipeline.py` @@ -40,8 +39,7 @@ # import datajoint as dj - -from workflow_optogenetics.pipeline import Device, lab, opto, session, subject, surgery +from workflow_optogenetics.pipeline import lab, subject, surgery, session, opto, Device # ## Schema diagrams # @@ -55,7 +53,6 @@ + dj.Diagram(opto) ) - # ## Inserting data # @@ -72,18 +69,15 @@ ) ) - lab.User.insert1( dict(user="User1") ) # For the surgeon attribute in surgery.Implantation - # ### `subject` schema # subject.Subject.heading - subject.Subject.insert1( dict( subject="subject3", @@ -93,16 +87,13 @@ ) ) - # In order to conduct optogenetic stimulation, our subject must have an implant in the target brain region. Again, some `Lookup` tables have useful default content. # surgery.CoordinateReference() - surgery.Hemisphere() - # + surgery.BrainRegion.insert1( dict(region_acronym="dHP", region_name="Dorsal Hippocampus") @@ -136,7 +127,6 @@ beta=None, # degree rotation about shank [-180, 180] wrt anterior ) ) - # - # ### Insert into `session` schema @@ -144,17 +134,14 @@ session.Session.describe() - session.Session.heading - session_key = dict( subject="subject3", session_id="1", session_datetime="2022-04-04 12:13:14" ) session.Session.insert1(session_key) session.Session() - # ### Insert into `opto` schema # @@ -174,7 +161,6 @@ dict(waveform_name="square_10", on_proportion=0.10, off_proportion=0.90) ) - opto.OptoStimParams.insert1( dict( opto_params_id=1, @@ -186,7 +172,6 @@ ) ) - # Next, we'll describe the session in which these parameters were used with `OptoProtocol` # @@ -204,7 +189,6 @@ ) ) - # We can describe the timing of these stimulations in `OptoEvent`. # @@ -227,7 +211,6 @@ ] ) - # To store more experimental timing information, see documentation for [Element Event](https://datajoint.com/docs/elements/element-event/). # @@ -247,7 +230,6 @@ from workflow_optogenetics.ingest import ingest_subjects help(ingest_subjects) - # - # By default, these functions pull from files in the `user_files` directory. We can run each of these in succession with the default parameters with `ingest_all`. @@ -257,7 +239,6 @@ from workflow_optogenetics.ingest import ingest_all ingest_all() - # - # ## Events diff --git a/notebooks/py_scripts/06-drop-optional.py b/notebooks/py_scripts/06-drop-optional.py index 6894599..795e25a 100644 --- a/notebooks/py_scripts/06-drop-optional.py +++ b/notebooks/py_scripts/06-drop-optional.py @@ -30,14 +30,14 @@ # - from workflow_optogenetics.pipeline import ( + trial, + opto, Device, + surgery, event, - lab, - opto, session, subject, - surgery, - trial, + lab, ) # + From 27745fccdbba29d7b7b9e8dadfc634cfe928d17e Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 3 Apr 2023 09:09:46 -0500 Subject: [PATCH 08/58] Remove jupytext synced scripts --- notebooks/py_scripts/01-configure.py | 77 ----- .../02-workflow-structure-optional.py | 129 --------- notebooks/py_scripts/03-process.py | 268 ------------------ notebooks/py_scripts/06-drop-optional.py | 51 ---- 4 files changed, 525 deletions(-) delete mode 100644 notebooks/py_scripts/01-configure.py delete mode 100644 notebooks/py_scripts/02-workflow-structure-optional.py delete mode 100644 notebooks/py_scripts/03-process.py delete mode 100644 notebooks/py_scripts/06-drop-optional.py diff --git a/notebooks/py_scripts/01-configure.py b/notebooks/py_scripts/01-configure.py deleted file mode 100644 index 18281a8..0000000 --- a/notebooks/py_scripts/01-configure.py +++ /dev/null @@ -1,77 +0,0 @@ -# --- -# jupyter: -# jupytext: -# text_representation: -# extension: .py -# format_name: light -# format_version: '1.5' -# jupytext_version: 1.14.4 -# kernelspec: -# display_name: Python 3.9.13 ('ele') -# language: python -# name: python3 -# --- - -# # DataJoint configuration -# -# ## Setup - Working Directory -# -# To run the workflow, we need to properly set up the DataJoint configuration. The configuration can be saved in a local directory as `dj_local_conf.json` or at your root directory as a hidden file. This notebook walks you through the setup process. -# -# **The configuration only needs to be set up once**, if you have gone through the configuration before, directly go to [02-workflow-structure](02-workflow-structure-optional.ipynb). -# - -# + -import os -import datajoint as dj - -if os.path.basename(os.getcwd()) == "notebooks": - os.chdir("..") -# - - -# ## Setup - Credentials -# -# Now let's set up the host, user and password in the `dj.config` global variable -# - -# + -import getpass - -dj.config["database.host"] = "{YOUR_HOST}" # CodeBook users should omit this -dj.config["database.user"] = "{YOUR_USERNAME}" -dj.config["database.password"] = getpass.getpass() # enter the password securely -# - - -# You should be able to connect to the database at this stage. -# - -dj.conn() - -# ## Setup - `dj.config['custom']` -# -# The major component of the current workflow is Element Optogenetics (see [GitHub repository](https://github.com/datajoint/element-optogenetics) and [documentation](https://datajoint.com/docs/elements/element-optogenetics)). Many Elements require configurations in the field `custom` in `dj.config`: -# -# ### Database prefix -# -# Giving a prefix to schemas could help when configuring database privileges. If we set the prefix to `neuro_`, e.g. `neuro_lab`, `neuro_subject`, etc. -# -# The prefix could be configured to your username in `dj.config` as follows. -# - -username_as_prefix = dj.config["database.user"] + "_" -dj.config["custom"] = {"database.prefix": username_as_prefix} - -# ## Save configuration -# -# We could save this as a file, either as a local json file, or a global file. Local configuration file is saved as `dj_local_conf.json` in current directory, which is great for project-specific settings. -# -# For first-time users, we recommend saving globally. This will create a hidden configuration file in your root directory, which will be loaded whenever there is no local version to override it. -# - -# dj.config.save_local() -dj.config.save_global() - -# ## Next Step -# -# After the configuration, we will be able to run through the workflow with the [02-workflow-structure](02-workflow-structure-optional.ipynb) notebook. -# diff --git a/notebooks/py_scripts/02-workflow-structure-optional.py b/notebooks/py_scripts/02-workflow-structure-optional.py deleted file mode 100644 index 17fa1ab..0000000 --- a/notebooks/py_scripts/02-workflow-structure-optional.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -# --- -# jupyter: -# jupytext: -# text_representation: -# extension: .py -# format_name: light -# format_version: '1.5' -# jupytext_version: 1.14.4 -# kernelspec: -# display_name: Python 3.9.13 ('ele') -# language: python -# name: python3 -# --- - -# # Introduction to the workflow structure -# -# This notebook gives a brief overview of the workflow structure and introduces some useful DataJoint tools to facilitate the exploration. -# -# - DataJoint needs to be pre-configured before running this notebook, if you haven't set up the configuration, refer to notebook [01-configure](01-configure.ipynb). -# -# - If you are familiar with DataJoint and the workflow structure, proceed to the next notebook [03-process](03-process.ipynb) directly to run the workflow. -# -# - For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/) -# -# To load the local configuration, we will change the directory to the package root. -# - -# + -import os - -if os.path.basename(os.getcwd()) == "notebooks": - os.chdir("..") -# - - -# ## Schemas and tables -# -# The current workflow is composed of multiple database schemas, each of them corresponds to a module within `workflow_optogenetics.pipeline` -# - -import datajoint as dj -from workflow_optogenetics.pipeline import lab, subject, surgery, session, opto, Device - -# Each module contains a schema object that enables interaction with the schema in the database. -# - -opto.schema - -# The table classes in the module corresponds to a table in the schema in the database. -# - -# preview columns and contents in a table -opto.OptoWaveform() - -# + The first time importing the modules, empty schemas and tables will be created in the database. [markdown] -# By importing the modules for the first time, the schemas and tables will be created inside the database. -# -# Once created, importing modules will not create schemas and tables again, but the existing schemas/tables can be accessed and manipulated by the modules. -# -# + The schemas and tables will not be re-created when importing modules if they have existed. [markdown] -# ## DataJoint tools to explore schemas and tables -# -# `dj.list_schemas()`: list all schemas a user has access to in the current database -# -# - -dj.list_schemas() - -# `dj.Diagram()`: plot tables and dependencies in a schema. See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/). -# - -# Plot diagram for all tables in a schema -dj.Diagram(opto) - -# Plot diagram of tables in multiple schemas. -# Adding and subtracting looks downstream and upstream respectively -dj.Diagram(surgery) + dj.Diagram(opto) - 1 - -# Plot diagram of selected tables and schemas -( - dj.Diagram(subject.Subject) - + dj.Diagram(session.Session) - + dj.Diagram(surgery.Implantation) - + dj.Diagram(opto.OptoProtocol) -) - -# + `heading`: [markdown] -# `describe()`: show table definition with foreign key references. -# -# - -opto.OptoProtocol.describe() - -# `heading`: show attribute definitions regardless of foreign key references -# - -opto.OptoProtocol.heading - -# ## Elements in `workflow-optogenetics` -# -# [`lab`](https://datajoint.com/docs/elements/element-animal/): lab management related information, such as Lab, User, Project, Protocol, Source. -# - -dj.Diagram(lab) - -# [`subject`](https://datajoint.com/docs/elements/element-animal/): general animal metadata and surgery information - -dj.Diagram(subject) - -subject.Subject.describe() - -dj.Diagram(surgery) - -# [`session`](https://datajoint.com/docs/elements/element-session/): General information of experimental sessions. -# - -dj.Diagram(session) - -session.Session.describe() - -# [`opto`](https://github.com/datajoint/element-optogenetics): Optogenetics stimulus and timing data -# - -dj.Diagram(opto) - -# ## Summary and next step -# -# - This notebook introduced the overall structures of the schemas and tables in the workflow and relevant tools to explore the schema structure and table definitions. -# -# - In the next notebook [03-process](03-process.ipynb), we will introduce the detailed steps to run through `workflow-optogenetics`. -# diff --git a/notebooks/py_scripts/03-process.py b/notebooks/py_scripts/03-process.py deleted file mode 100644 index a18d100..0000000 --- a/notebooks/py_scripts/03-process.py +++ /dev/null @@ -1,268 +0,0 @@ -# --- -# jupyter: -# jupytext: -# text_representation: -# extension: .py -# format_name: light -# format_version: '1.5' -# jupytext_version: 1.14.4 -# kernelspec: -# display_name: Python 3.9.13 ('ele') -# language: python -# name: python3 -# --- - -# # Interactively run workflow optogenetics -# -# - This notebook walks you through the steps in detail to run the `workflow-optogenetics`. -# -# - If you haven't configured the paths, refer to [01-configure](01-configure.ipynb). -# -# - To overview the schema structures, refer to [02-workflow-structure](02-workflow-structure.ipynb). -# -# - For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/). -# - -# Let's change the directory to the package root directory to load the local configuration (`dj_local_conf.json`). -# - -# + -import os - -if os.path.basename(os.getcwd()) == "notebooks": - os.chdir("..") -# - - -# ## `pipeline.py` -# -# This script `activates` the DataJoint Elements and declares other required tables. -# - -import datajoint as dj -from workflow_optogenetics.pipeline import lab, subject, surgery, session, opto, Device - -# ## Schema diagrams -# -# See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/). -# - -( - dj.Diagram(subject.Subject) - + dj.Diagram(session.Session) - + dj.Diagram(surgery.Implantation) - + dj.Diagram(opto) -) - -# ## Inserting data -# - -# ### `lab` schema -# -# `pipeline.py` adds a Device table to the `lab` schema. This table, like other `Lookup` tables, has default contents, but we can always add more. -# - -Device.insert1( - dict( - device="OPTG_8", - modality="Optogenetics", - description="8 channel pulse sequence device", - ) -) - -lab.User.insert1( - dict(user="User1") -) # For the surgeon attribute in surgery.Implantation - -# ### `subject` schema -# - -subject.Subject.heading - -subject.Subject.insert1( - dict( - subject="subject3", - sex="F", - subject_birth_date="2020-03-03", - subject_description="Optogenetic pilot subject.", - ) -) - -# In order to conduct optogenetic stimulation, our subject must have an implant in the target brain region. Again, some `Lookup` tables have useful default content. -# - -surgery.CoordinateReference() - -surgery.Hemisphere() - -# + -surgery.BrainRegion.insert1( - dict(region_acronym="dHP", region_name="Dorsal Hippocampus") -) -surgery.Implantation.insert1( - dict( - subject="subject3", - implant_date="2022-04-01 12:13:14", - implant_type="opto", - target_region="dHP", - target_hemisphere="left", - surgeon="user1", - ) -) - -surgery.Implantation.Coordinate.insert1( - dict( - subject="subject3", - implant_date="2022-04-01 12:13:14", - implant_type="opto", - target_region="dHP", - target_hemisphere="left", - ap="-7.9", # anterior-posterior distance in mm - ap_ref="bregma", - ml="-1.8", # medial axis distance in mm - ml_ref="bregma", - dv="5", # dorso-ventral axis distance in mm - dv_ref="skull_surface", - theta="11.5", # degree rotation about ml-axis [0, 180] wrt z - phi="0", # degree rotation about dv-axis [0, 360] wrt x - beta=None, # degree rotation about shank [-180, 180] wrt anterior - ) -) -# - - -# ### Insert into `session` schema -# - -session.Session.describe() - -session.Session.heading - -session_key = dict( - subject="subject3", session_id="1", session_datetime="2022-04-04 12:13:14" -) -session.Session.insert1(session_key) -session.Session() - -# ### Insert into `opto` schema -# - -# First, we'll add information to describe the stimulus, including waveform shape and and application parameters. -# - -opto.OptoWaveform.insert1( - dict( - waveform_name="square_10", - waveform_type="square", - waveform_description="Square waveform: 10%/90% on/off cycle", - ) -) -# Square is one part table of OptoWaveform. -# For sine and ramp waveforms, see the corresponding tables -opto.OptoWaveform.Square.insert1( - dict(waveform_name="square_10", on_proportion=0.10, off_proportion=0.90) -) - -opto.OptoStimParams.insert1( - dict( - opto_params_id=1, - waveform_name="square_10", - wavelength=470, - light_intensity=10.2, - frequency=1, - duration=241, - ) -) - -# Next, we'll describe the session in which these parameters were used with `OptoProtocol` -# - -opto.OptoProtocol.insert1( - dict( - subject="subject3", - session_id="1", - protocol_id="1", - opto_params_id="1", - implant_date="2022-04-01 12:13:14", - implant_type="opto", - target_region="dHP", - target_hemisphere="left", - device="OPTG_4", - ) -) - -# We can describe the timing of these stimulations in `OptoEvent`. -# - -opto.OptoEvent.insert( - [ - dict( - subject="subject3", - session_id=1, - protocol_id=1, - stim_start_time=241, - stim_end_time=482, - ), - dict( - subject="subject3", - session_id=1, - protocol_id=1, - stim_start_time=482, - stim_end_time=723, - ), - ] -) - -# To store more experimental timing information, see documentation for [Element Event](https://datajoint.com/docs/elements/element-event/). -# - -# ## Automating inserts -# -# This workflow provides functions for ingesting this information from csv files in `ingest.py`. -# -# - `ingest_subjects`: subject.Subject -# - `ingest_sessions`: session.Session -# - `ingest_events`: Element Event schemas -# - `ingest_opto`: surgery and opto schemas -# -# For more information on each, see the docstring. -# - -# + -from workflow_optogenetics.ingest import ingest_subjects - -help(ingest_subjects) -# - - -# By default, these functions pull from files in the `user_files` directory. We can run each of these in succession with the default parameters with `ingest_all`. -# - -# + -from workflow_optogenetics.ingest import ingest_all - -ingest_all() -# - - -# ## Events -# -# The above `ingest_all()` also added behavioral events we can example in conjunction with optogenetic events. For convenience, these stimulation events are also reflected in the Block design of Element Event. -# - -# + -from workflow_optogenetics.pipeline import event, trial - -events_by_block = ( - trial.BlockTrial * trial.TrialEvent * trial.Block.Attribute - & "attribute_name='stimulation'" -) -events_by_block -# - - -# We can readily compare the count of events or event types across 'on' and 'off' stimulation conditions. - -events_by_block & "attribute_value='on'" - -events_by_block & "attribute_value='off'" - -# ## Next Steps -# -# Interested in using Element Optogenetics for your own project? Reach out to the DataJoint team via [email](mailto:support@datajoint.com) or [Slack](https://datajoint.slack.com). -# diff --git a/notebooks/py_scripts/06-drop-optional.py b/notebooks/py_scripts/06-drop-optional.py deleted file mode 100644 index 795e25a..0000000 --- a/notebooks/py_scripts/06-drop-optional.py +++ /dev/null @@ -1,51 +0,0 @@ -# --- -# jupyter: -# jupytext: -# text_representation: -# extension: .py -# format_name: light -# format_version: '1.5' -# jupytext_version: 1.14.4 -# kernelspec: -# display_name: ele -# language: python -# name: python3 -# --- - -# # Drop schemas -# -# - Schemas are not typically dropped in a production workflow with real data in it. -# - At the developmental phase, it might be required for the table redesign. -# - When dropping all schemas is needed, the following is the dependency order. -# - -# Change into the parent directory to find the `dj_local_conf.json` file. -# - -# + -import os - -if os.path.basename(os.getcwd()) == "notebooks": - os.chdir("..") -# - - -from workflow_optogenetics.pipeline import ( - trial, - opto, - Device, - surgery, - event, - session, - subject, - lab, -) - -# + -# trial.schema.drop() -# opto.schema.drop() -# Device.drop_quick() -# surgery.schema.drop() -# event.schema.drop() -# session.schema.drop() -# subject.schema.drop() -# lab.schema.drop() From 3c08e79fa003fa654231a540d301390dc4ca2be2 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 3 Apr 2023 09:33:05 -0500 Subject: [PATCH 09/58] Remove `ingest.py` --- workflow_optogenetics/ingest.py | 187 -------------------------------- 1 file changed, 187 deletions(-) delete mode 100644 workflow_optogenetics/ingest.py diff --git a/workflow_optogenetics/ingest.py b/workflow_optogenetics/ingest.py deleted file mode 100644 index a4de7f2..0000000 --- a/workflow_optogenetics/ingest.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python -from element_interface.utils import ingest_csv_to_table - -from .pipeline import event, lab, opto, session, subject, surgery, trial - -___all__ = [ - "subject", - "surgery", - "session", - "trial", - "event", - "opto", - "ingest_csv_to_table", - "ingest_subjects", - "ingest_sessions", - "ingest_events", - "ingest_opto", - "ingest_all", -] - - -def ingest_subjects( - subject_csv_path: str = "./user_data/subjects.csv", - skip_duplicates: bool = True, - verbose: bool = True, -): - """Ingest subjects listed in the subject column of ./user_data/subjects.csv - - Args: - subject_csv_path (str, optional): Relative path to subject csv. - Defaults to "./user_data/subjects.csv". - skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert. - Defaults to True. - verbose (bool, optional): Provides insertion info to StdOut. Defaults to True. - """ - csvs = [subject_csv_path] - tables = [subject.Subject()] - - ingest_csv_to_table(csvs, tables, skip_duplicates=skip_duplicates, verbose=verbose) - - -def ingest_sessions( - session_csv_path: str = "./user_data/sessions.csv", - skip_duplicates: bool = True, - verbose: bool = True, -): - """Ingest sessions from CSV. Defaults to ./user_data/subjects.csv - - Args: - session_csv_path (str, optional): Relative path to sessions CSV. - Defaults to "./user_data/sessions.csv". - skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert. - Defaults to True. - verbose (bool, optional): Provides insertion info to StdOut. Defaults to True. - """ - - csvs = [session_csv_path] - tables = [session.Session()] - - ingest_csv_to_table(csvs, tables, skip_duplicates=skip_duplicates, verbose=verbose) - - -def ingest_events( - recording_csv_path: str = "./user_data/behavior_recordings.csv", - block_csv_path: str = "./user_data/blocks.csv", - trial_csv_path: str = "./user_data/trials.csv", - event_csv_path: str = "./user_data/events.csv", - skip_duplicates: bool = True, - verbose: bool = True, -): - """Ingest trial structure: blocks, trials, events - - A recording is one or more blocks (i.e., phases of trials), with trials (repeated - units). Events are optionally-instantaneous occurrences within trial. This ingestion - function is duplicated across multiple DataJoint workflow repositories. - - Args: - recording_csv_path (str, optional): Relative path to recording CSV. - Defaults to "./user_data/behavior_recordings.csv". - block_csv_path (str, optional): Relative path to block CSV. - Defaults to "./user_data/blocks.csv". - trial_csv_path (str, optional): Relative path to trial CSV. - Defaults to "./user_data/trials.csv". - event_csv_path (str, optional): Relative path to event CSV. - Defaults to "./user_data/events.csv". - skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert. - Defaults to True. - verbose (bool, optional): Provides insertion info to StdOut. Defaults to True. - """ - csvs = [ - recording_csv_path, - recording_csv_path, - block_csv_path, - block_csv_path, - trial_csv_path, - trial_csv_path, - trial_csv_path, - trial_csv_path, - event_csv_path, - event_csv_path, - event_csv_path, - ] - tables = [ - event.BehaviorRecording(), - event.BehaviorRecording.File(), - trial.Block(), - trial.Block.Attribute(), - trial.TrialType(), - trial.Trial(), - trial.Trial.Attribute(), - trial.BlockTrial(), - event.EventType(), - event.Event(), - trial.TrialEvent(), - ] - - # Allow direct insert required bc element-trial has Imported tables - ingest_csv_to_table( - csvs, - tables, - skip_duplicates=skip_duplicates, - verbose=verbose, - allow_direct_insert=True, - ) - - -def ingest_opto( - opto_surgery_csv_path: str = "./user_data/opto_surgeries.csv", - opto_session_csv_path: str = "./user_data/opto_sessions.csv", - opto_events_csv_path: str = "./user_data/opto_events.csv", - waveform_csv_path: str = "./user_data/opto_waveforms.csv", - skip_duplicates: bool = True, - verbose: bool = True, -): - """Ingest optogenetic stimulation and protocol information. - - Args: - opto_surgery_csv_path (str, optional): Relative path to implantation info CSV. - Defaults to "./user_data/opto_surgeries.csv". - opto_session_csv_path (str, optional): Relative path to CSV with opto session - information. Defaults to "./user_data/opto_sessions.csv". - opto_events_csv_path (str, optional): Relative path to opto events CSV. - Defaults to "./user_data/opto_events.csv". - waveform_csv_path (str, optional): Relative path to waveform info CSV. - Defaults to "./user_data/opto_waveforms.csv". - skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert. - Defaults to True. - verbose (bool, optional): Provides insertion info to StdOut. Defaults to True. - """ - csvs = [ - waveform_csv_path, # 1 - waveform_csv_path, # 2 - waveform_csv_path, # 3 - opto_surgery_csv_path, # 4 - opto_surgery_csv_path, # 5 - opto_surgery_csv_path, # 6 - opto_surgery_csv_path, # 7 - opto_surgery_csv_path, # 8 - opto_session_csv_path, # 9 - opto_events_csv_path, # 10 - ] - tables = [ - opto.OptoWaveform(), # 1 - opto.OptoWaveform.Square(), # 2 - opto.OptoStimParams(), # 3 - surgery.CoordinateReference(), # 4 - surgery.BrainRegion(), # 5 - lab.User(), # 6 - surgery.Implantation(), # 7 - surgery.Implantation.Coordinate(), # 8 - opto.OptoProtocol(), # 9 - opto.OptoEvent(), # 10 - ] - - ingest_csv_to_table(csvs, tables, skip_duplicates=skip_duplicates, verbose=verbose) - - -def ingest_all(skip_duplicates: bool = True, verbose: bool = True): - """Run all available available ingestion functions""" - ingest_subjects(skip_duplicates=skip_duplicates, verbose=verbose) - ingest_sessions(skip_duplicates=skip_duplicates, verbose=verbose) - ingest_events(skip_duplicates=skip_duplicates, verbose=verbose) - ingest_opto(skip_duplicates=skip_duplicates, verbose=verbose) - - -if __name__ == "__main__": - ingest_all() From 4eb18a2d9312e5f575954a3acbd9076954072e32 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 3 Apr 2023 10:00:00 -0500 Subject: [PATCH 10/58] Remove notebooks --- notebooks/01-configure.ipynb | 151 -- .../02-workflow-structure-optional.ipynb | 627 ------ notebooks/03-process.ipynb | 1792 ----------------- notebooks/06-drop-optional.ipynb | 96 - 4 files changed, 2666 deletions(-) delete mode 100644 notebooks/01-configure.ipynb delete mode 100644 notebooks/02-workflow-structure-optional.ipynb delete mode 100644 notebooks/03-process.ipynb delete mode 100644 notebooks/06-drop-optional.ipynb diff --git a/notebooks/01-configure.ipynb b/notebooks/01-configure.ipynb deleted file mode 100644 index fb509c6..0000000 --- a/notebooks/01-configure.ipynb +++ /dev/null @@ -1,151 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# DataJoint configuration\n", - "\n", - "## Setup - Working Directory\n", - "\n", - "To run the workflow, we need to properly set up the DataJoint configuration. The configuration can be saved in a local directory as `dj_local_conf.json` or at your root directory as a hidden file. This notebook walks you through the setup process.\n", - "\n", - "**The configuration only needs to be set up once**, if you have gone through the configuration before, directly go to [02-workflow-structure](02-workflow-structure-optional.ipynb).\n" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import datajoint as dj\n", - "\n", - "if os.path.basename(os.getcwd()) == \"notebooks\":\n", - " os.chdir(\"..\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Setup - Credentials\n", - "\n", - "Now let's set up the host, user and password in the `dj.config` global variable\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "import getpass\n", - "\n", - "dj.config[\"database.host\"] = \"{YOUR_HOST}\" # CodeBook users should omit this\n", - "dj.config[\"database.user\"] = \"{YOUR_USERNAME}\"\n", - "dj.config[\"database.password\"] = getpass.getpass() # enter the password securely" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You should be able to connect to the database at this stage.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "dj.conn()" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Setup - `dj.config['custom']`\n", - "\n", - "The major component of the current workflow is Element Optogenetics (see [GitHub repository](https://github.com/datajoint/element-optogenetics) and [documentation](https://datajoint.com/docs/elements/element-optogenetics)). Many Elements require configurations in the field `custom` in `dj.config`:\n", - "\n", - "### Database prefix\n", - "\n", - "Giving a prefix to schemas could help when configuring database privileges. If we set the prefix to `neuro_`, e.g. `neuro_lab`, `neuro_subject`, etc.\n", - "\n", - "The prefix could be configured to your username in `dj.config` as follows.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "username_as_prefix = dj.config[\"database.user\"] + \"_\"\n", - "dj.config[\"custom\"] = {\"database.prefix\": username_as_prefix}" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Save configuration\n", - "\n", - "We could save this as a file, either as a local json file, or a global file. Local configuration file is saved as `dj_local_conf.json` in current directory, which is great for project-specific settings.\n", - "\n", - "For first-time users, we recommend saving globally. This will create a hidden configuration file in your root directory, which will be loaded whenever there is no local version to override it.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# dj.config.save_local()\n", - "dj.config.save_global()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Next Step\n", - "\n", - "After the configuration, we will be able to run through the workflow with the [02-workflow-structure](02-workflow-structure-optional.ipynb) notebook.\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3.9.13 ('ele')", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "vscode": { - "interpreter": { - "hash": "d00c4ad21a7027bf1726d6ae3a9a6ef39c8838928eca5a3d5f51f3eb68720410" - } - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/notebooks/02-workflow-structure-optional.ipynb b/notebooks/02-workflow-structure-optional.ipynb deleted file mode 100644 index 6dc246d..0000000 --- a/notebooks/02-workflow-structure-optional.ipynb +++ /dev/null @@ -1,627 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Introduction to the workflow structure\n", - "\n", - "This notebook gives a brief overview of the workflow structure and introduces some useful DataJoint tools to facilitate the exploration.\n", - "\n", - "- DataJoint needs to be pre-configured before running this notebook, if you haven't set up the configuration, refer to notebook [01-configure](01-configure.ipynb).\n", - "\n", - "- If you are familiar with DataJoint and the workflow structure, proceed to the next notebook [03-process](03-process.ipynb) directly to run the workflow.\n", - "\n", - "- For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/)\n", - "\n", - "To load the local configuration, we will change the directory to the package root.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "if os.path.basename(os.getcwd()) == \"notebooks\":\n", - " os.chdir(\"..\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Schemas and tables\n", - "\n", - "The current workflow is composed of multiple database schemas, each of them corresponds to a module within `workflow_optogenetics.pipeline`\n" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[2022-12-06 14:56:19,370][INFO]: Connecting cbroz@dss-db.datajoint.io:3306\n", - "[2022-12-06 14:56:19,698][INFO]: Connected cbroz@dss-db.datajoint.io:3306\n" - ] - } - ], - "source": [ - "import datajoint as dj\n", - "from workflow_optogenetics.pipeline import lab, subject, surgery, session, opto, Device" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Each module contains a schema object that enables interaction with the schema in the database.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "Schema `cbroz_wfopto_opto`" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "opto.schema" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The table classes in the module corresponds to a table in the schema in the database.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " OptoWaveform defines the shape of one cycle of the optogenetic stimulus\n", - "
\n", - " \n", - " \n", - " \n", - "\n", - "\n", - "\n", - "
\n", - "

waveform_name

\n", - " \n", - "
\n", - "

waveform_type

\n", - " \n", - "
\n", - "

normalized_waveform

\n", - " For one cycle, normalized to peak\n", - "
\n", - "

waveform_description

\n", - " description of the waveform\n", - "
square_10square=BLOB=Square waveform: 10%/90% on/off cycle
\n", - " \n", - "

Total: 1

\n", - " " - ], - "text/plain": [ - "*waveform_name waveform_type normalized waveform_descr\n", - "+------------+ +------------+ +--------+ +------------+\n", - "square_10 square =BLOB= Square wavefor\n", - " (Total: 1)" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# preview columns and contents in a table\n", - "opto.OptoWaveform()" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "lines_to_next_cell": 0, - "title": "The first time importing the modules, empty schemas and tables will be created in the database." - }, - "source": [ - "By importing the modules for the first time, the schemas and tables will be created inside the database.\n", - "\n", - "Once created, importing modules will not create schemas and tables again, but the existing schemas/tables can be accessed and manipulated by the modules.\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "lines_to_next_cell": 0, - "title": "The schemas and tables will not be re-created when importing modules if they have existed." - }, - "source": [ - "## DataJoint tools to explore schemas and tables\n", - "\n", - "`dj.list_schemas()`: list all schemas a user has access to in the current database\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "dj.list_schemas()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`dj.Diagram()`: plot tables and dependencies in a schema. See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/).\n" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nopto.OptoEvent\n\n\nopto.OptoEvent\n\n\n\n\n\nopto.OptoProtocol->opto.OptoEvent\n\n\n\n\nopto.OptoWaveform\n\n\nopto.OptoWaveform\n\n\n\n\n\nopto.OptoStimParams\n\n\nopto.OptoStimParams\n\n\n\n\n\nopto.OptoWaveform->opto.OptoStimParams\n\n\n\n\nopto.OptoWaveform.Ramp\n\n\nopto.OptoWaveform.Ramp\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Ramp\n\n\n\n\nopto.OptoWaveform.Sine\n\n\nopto.OptoWaveform.Sine\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Sine\n\n\n\n\nopto.OptoWaveform.Square\n\n\nopto.OptoWaveform.Square\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Square\n\n\n\n\nopto.OptoStimParams->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveformType\n\n\nopto.OptoWaveformType\n\n\n\n\n\nopto.OptoWaveformType->opto.OptoWaveform\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Plot diagram for all tables in a schema\n", - "dj.Diagram(opto)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\n7\n\n7\n\n\n\nsurgery.Implantation\n\n\nsurgery.Implantation\n\n\n\n\n\n7->surgery.Implantation\n\n\n\n\n10\n\n10\n\n\n\n10->surgery.Implantation\n\n\n\n\n8\n\n8\n\n\n\n8->surgery.Implantation\n\n\n\n\n9\n\n9\n\n\n\n9->surgery.Implantation\n\n\n\n\nsubject.Subject\n\n\nsubject.Subject\n\n\n\n\n\nsession.Session\n\n\nsession.Session\n\n\n\n\n\nsubject.Subject->session.Session\n\n\n\n\nsubject.Subject->surgery.Implantation\n\n\n\n\nsurgery.Hemisphere\n\n\nsurgery.Hemisphere\n\n\n\n\n\nsurgery.Hemisphere->surgery.Implantation\n\n\n\n\nopto.OptoWaveform.Sine\n\n\nopto.OptoWaveform.Sine\n\n\n\n\n\nDevice\n\n\nDevice\n\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nDevice->opto.OptoProtocol\n\n\n\n\nopto.OptoStimParams\n\n\nopto.OptoStimParams\n\n\n\n\n\nopto.OptoStimParams->opto.OptoProtocol\n\n\n\n\nopto.OptoEvent\n\n\nopto.OptoEvent\n\n\n\n\n\nopto.OptoProtocol->opto.OptoEvent\n\n\n\n\nopto.OptoWaveformType\n\n\nopto.OptoWaveformType\n\n\n\n\n\nopto.OptoWaveform\n\n\nopto.OptoWaveform\n\n\n\n\n\nopto.OptoWaveformType->opto.OptoWaveform\n\n\n\n\nsurgery.CoordinateReference\n\n\nsurgery.CoordinateReference\n\n\n\n\n\nsurgery.CoordinateReference->10\n\n\n\n\nsurgery.CoordinateReference->8\n\n\n\n\nsurgery.CoordinateReference->9\n\n\n\n\nsession.Session->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveform.Square\n\n\nopto.OptoWaveform.Square\n\n\n\n\n\nsurgery.Implantation->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveform.Ramp\n\n\nopto.OptoWaveform.Ramp\n\n\n\n\n\nlab.User\n\n\nlab.User\n\n\n\n\n\nlab.User->7\n\n\n\n\nsurgery.BrainRegion\n\n\nsurgery.BrainRegion\n\n\n\n\n\nsurgery.BrainRegion->surgery.Implantation\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Sine\n\n\n\n\nopto.OptoWaveform->opto.OptoStimParams\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Square\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Ramp\n\n\n\n\nsurgery.ImplantationType\n\n\nsurgery.ImplantationType\n\n\n\n\n\nsurgery.ImplantationType->surgery.Implantation\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Plot diagram of tables in multiple schemas.\n", - "# Adding and subtracting looks downstream and upstream respectively\n", - "dj.Diagram(surgery) + dj.Diagram(opto) - 1" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nsession.Session\n\n\nsession.Session\n\n\n\n\n\nsession.Session->opto.OptoProtocol\n\n\n\n\nsurgery.Implantation\n\n\nsurgery.Implantation\n\n\n\n\n\nsurgery.Implantation->opto.OptoProtocol\n\n\n\n\nsubject.Subject\n\n\nsubject.Subject\n\n\n\n\n\nsubject.Subject->session.Session\n\n\n\n\nsubject.Subject->surgery.Implantation\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Plot diagram of selected tables and schemas\n", - "(\n", - " dj.Diagram(subject.Subject)\n", - " + dj.Diagram(session.Session)\n", - " + dj.Diagram(surgery.Implantation)\n", - " + dj.Diagram(opto.OptoProtocol)\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "lines_to_next_cell": 0, - "title": "`heading`:" - }, - "source": [ - "`describe()`: show table definition with foreign key references.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-> session.Session\n", - "protocol_id : int \n", - "---\n", - "-> opto.OptoStimParams\n", - "-> surgery.Implantation\n", - "-> [nullable] Device\n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "'-> session.Session\\nprotocol_id : int \\n---\\n-> opto.OptoStimParams\\n-> surgery.Implantation\\n-> [nullable] Device\\n'" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "opto.OptoProtocol.describe()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "`heading`: show attribute definitions regardless of foreign key references\n" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "# \n", - "subject : varchar(8) # \n", - "session_id : int # \n", - "protocol_id : int # \n", - "---\n", - "opto_params_id : smallint # \n", - "implant_date : datetime # surgery date\n", - "implant_type : varchar(16) # Short name for type of implanted device\n", - "region_acronym : varchar(32) # Brain region shorthand\n", - "hemisphere : varchar(8) # Brain region hemisphere\n", - "device=null : varchar(32) # " - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "opto.OptoProtocol.heading" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Elements in `workflow-optogenetics`\n", - "\n", - "[`lab`](https://datajoint.com/docs/elements/element-animal/): lab management related information, such as Lab, User, Project, Protocol, Source.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\nlab.Lab.Organization\n\n\nlab.Lab.Organization\n\n\n\n\n\nlab.Location\n\n\nlab.Location\n\n\n\n\n\nlab.Project\n\n\nlab.Project\n\n\n\n\n\nlab.ProjectKeywords\n\n\nlab.ProjectKeywords\n\n\n\n\n\nlab.Project->lab.ProjectKeywords\n\n\n\n\nlab.ProjectPublication\n\n\nlab.ProjectPublication\n\n\n\n\n\nlab.Project->lab.ProjectPublication\n\n\n\n\nlab.ProjectUser\n\n\nlab.ProjectUser\n\n\n\n\n\nlab.Project->lab.ProjectUser\n\n\n\n\nlab.ProjectSourceCode\n\n\nlab.ProjectSourceCode\n\n\n\n\n\nlab.Project->lab.ProjectSourceCode\n\n\n\n\nlab.UserRole\n\n\nlab.UserRole\n\n\n\n\n\nlab.LabMembership\n\n\nlab.LabMembership\n\n\n\n\n\nlab.UserRole->lab.LabMembership\n\n\n\n\nlab.ProtocolType\n\n\nlab.ProtocolType\n\n\n\n\n\nlab.Protocol\n\n\nlab.Protocol\n\n\n\n\n\nlab.ProtocolType->lab.Protocol\n\n\n\n\nlab.User\n\n\nlab.User\n\n\n\n\n\nlab.User->lab.ProjectUser\n\n\n\n\nlab.User->lab.LabMembership\n\n\n\n\nlab.Organization\n\n\nlab.Organization\n\n\n\n\n\nlab.Organization->lab.Lab.Organization\n\n\n\n\nlab.Lab\n\n\nlab.Lab\n\n\n\n\n\nlab.Lab->lab.Lab.Organization\n\n\n\n\nlab.Lab->lab.Location\n\n\n\n\nlab.Lab->lab.LabMembership\n\n\n\n\nDevice\n\n\nDevice\n\n\n\n\n\nlab.Source\n\n\nlab.Source\n\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dj.Diagram(lab)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "[`subject`](https://datajoint.com/docs/elements/element-animal/): general animal metadata and surgery information" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\nsubject.Line\n\n\nsubject.Line\n\n\n\n\n\nsubject.Subject.Line\n\n\nsubject.Subject.Line\n\n\n\n\n\nsubject.Line->subject.Subject.Line\n\n\n\n\nsubject.Line.Allele\n\n\nsubject.Line.Allele\n\n\n\n\n\nsubject.Line->subject.Line.Allele\n\n\n\n\nsubject.Zygosity\n\n\nsubject.Zygosity\n\n\n\n\n\nsubject.Subject.Lab\n\n\nsubject.Subject.Lab\n\n\n\n\n\nsubject.SubjectCull\n\n\nsubject.SubjectCull\n\n\n\n\n\nsubject.SubjectDeath\n\n\nsubject.SubjectDeath\n\n\n\n\n\nsubject.SubjectDeath->subject.SubjectCull\n\n\n\n\nsubject.Subject.Strain\n\n\nsubject.Subject.Strain\n\n\n\n\n\nsubject.Subject.User\n\n\nsubject.Subject.User\n\n\n\n\n\nsubject.Subject.Protocol\n\n\nsubject.Subject.Protocol\n\n\n\n\n\nsubject.Strain\n\n\nsubject.Strain\n\n\n\n\n\nsubject.Strain->subject.Subject.Strain\n\n\n\n\nsubject.Subject.Source\n\n\nsubject.Subject.Source\n\n\n\n\n\nsubject.Allele.Source\n\n\nsubject.Allele.Source\n\n\n\n\n\nsubject.Allele\n\n\nsubject.Allele\n\n\n\n\n\nsubject.Allele->subject.Zygosity\n\n\n\n\nsubject.Allele->subject.Line.Allele\n\n\n\n\nsubject.Allele->subject.Allele.Source\n\n\n\n\nsubject.Subject\n\n\nsubject.Subject\n\n\n\n\n\nsubject.Subject->subject.Zygosity\n\n\n\n\nsubject.Subject->subject.Subject.Lab\n\n\n\n\nsubject.Subject->subject.SubjectDeath\n\n\n\n\nsubject.Subject->subject.Subject.Strain\n\n\n\n\nsubject.Subject->subject.Subject.Line\n\n\n\n\nsubject.Subject->subject.Subject.User\n\n\n\n\nsubject.Subject->subject.Subject.Protocol\n\n\n\n\nsubject.Subject->subject.Subject.Source\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dj.Diagram(subject)" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "subject : varchar(8) \n", - "---\n", - "subject_nickname=\"\" : varchar(64) \n", - "sex : enum('M','F','U') \n", - "subject_birth_date : date \n", - "subject_description=\"\" : varchar(1024) \n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "'subject : varchar(8) \\n---\\nsubject_nickname=\"\" : varchar(64) \\nsex : enum(\\'M\\',\\'F\\',\\'U\\') \\nsubject_birth_date : date \\nsubject_description=\"\" : varchar(1024) \\n'" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "subject.Subject.describe()" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\n148\n\n148\n\n\n\nsurgery.Implantation\n\n\nsurgery.Implantation\n\n\n\n\n\n148->surgery.Implantation\n\n\n\n\n150\n\n150\n\n\n\n150->surgery.Implantation\n\n\n\n\n149\n\n149\n\n\n\n149->surgery.Implantation\n\n\n\n\nsurgery.Hemisphere\n\n\nsurgery.Hemisphere\n\n\n\n\n\nsurgery.Hemisphere->surgery.Implantation\n\n\n\n\nsurgery.CoordinateReference\n\n\nsurgery.CoordinateReference\n\n\n\n\n\nsurgery.CoordinateReference->148\n\n\n\n\nsurgery.CoordinateReference->150\n\n\n\n\nsurgery.CoordinateReference->149\n\n\n\n\nsurgery.BrainRegion\n\n\nsurgery.BrainRegion\n\n\n\n\n\nsurgery.BrainRegion->surgery.Implantation\n\n\n\n\nsurgery.ImplantationType\n\n\nsurgery.ImplantationType\n\n\n\n\n\nsurgery.ImplantationType->surgery.Implantation\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dj.Diagram(surgery)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "[`session`](https://datajoint.com/docs/elements/element-session/): General information of experimental sessions.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\nsession.Session.Attribute\n\n\nsession.Session.Attribute\n\n\n\n\n\nsession.Session\n\n\nsession.Session\n\n\n\n\n\nsession.Session->session.Session.Attribute\n\n\n\n\nsession.SessionDirectory\n\n\nsession.SessionDirectory\n\n\n\n\n\nsession.Session->session.SessionDirectory\n\n\n\n\nsession.SessionExperimenter\n\n\nsession.SessionExperimenter\n\n\n\n\n\nsession.Session->session.SessionExperimenter\n\n\n\n\nsession.SessionNote\n\n\nsession.SessionNote\n\n\n\n\n\nsession.Session->session.SessionNote\n\n\n\n\nsession.ProjectSession\n\n\nsession.ProjectSession\n\n\n\n\n\nsession.Session->session.ProjectSession\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dj.Diagram(session)" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-> subject.Subject\n", - "session_id : int \n", - "---\n", - "session_datetime : datetime \n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "'-> subject.Subject\\nsession_id : int \\n---\\nsession_datetime : datetime \\n'" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "session.Session.describe()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "[`opto`](https://github.com/datajoint/element-optogenetics): Optogenetics stimulus and timing data\n" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nopto.OptoEvent\n\n\nopto.OptoEvent\n\n\n\n\n\nopto.OptoProtocol->opto.OptoEvent\n\n\n\n\nopto.OptoWaveform\n\n\nopto.OptoWaveform\n\n\n\n\n\nopto.OptoStimParams\n\n\nopto.OptoStimParams\n\n\n\n\n\nopto.OptoWaveform->opto.OptoStimParams\n\n\n\n\nopto.OptoWaveform.Ramp\n\n\nopto.OptoWaveform.Ramp\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Ramp\n\n\n\n\nopto.OptoWaveform.Sine\n\n\nopto.OptoWaveform.Sine\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Sine\n\n\n\n\nopto.OptoWaveform.Square\n\n\nopto.OptoWaveform.Square\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Square\n\n\n\n\nopto.OptoStimParams->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveformType\n\n\nopto.OptoWaveformType\n\n\n\n\n\nopto.OptoWaveformType->opto.OptoWaveform\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dj.Diagram(opto)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Summary and next step\n", - "\n", - "- This notebook introduced the overall structures of the schemas and tables in the workflow and relevant tools to explore the schema structure and table definitions.\n", - "\n", - "- In the next notebook [03-process](03-process.ipynb), we will introduce the detailed steps to run through `workflow-optogenetics`.\n" - ] - } - ], - "metadata": { - "jupytext": { - "encoding": "# -*- coding: utf-8 -*-" - }, - "kernelspec": { - "display_name": "Python 3.9.13 ('ele')", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "vscode": { - "interpreter": { - "hash": "d00c4ad21a7027bf1726d6ae3a9a6ef39c8838928eca5a3d5f51f3eb68720410" - } - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/notebooks/03-process.ipynb b/notebooks/03-process.ipynb deleted file mode 100644 index 734567d..0000000 --- a/notebooks/03-process.ipynb +++ /dev/null @@ -1,1792 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Interactively run workflow optogenetics\n", - "\n", - "- This notebook walks you through the steps in detail to run the `workflow-optogenetics`.\n", - "\n", - "- If you haven't configured the paths, refer to [01-configure](01-configure.ipynb).\n", - "\n", - "- To overview the schema structures, refer to [02-workflow-structure](02-workflow-structure.ipynb).\n", - "\n", - "- For a more thorough introduction of DataJoint functionality, please visit our [Elements user guide](https://datajoint.com/docs/elements/user-guide/) and [general documentation](https://datajoint.com/docs/core/concepts/mantra/).\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Let's change the directory to the package root directory to load the local configuration (`dj_local_conf.json`).\n" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "if os.path.basename(os.getcwd()) == \"notebooks\":\n", - " os.chdir(\"..\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## `pipeline.py`\n", - "\n", - "This script `activates` the DataJoint Elements and declares other required tables.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "[2022-12-08 15:27:54,191][INFO]: Connecting cbroz@dss-db.datajoint.io:3306\n", - "[2022-12-08 15:27:54,471][INFO]: Connected cbroz@dss-db.datajoint.io:3306\n" - ] - } - ], - "source": [ - "import datajoint as dj\n", - "from workflow_optogenetics.pipeline import lab, subject, surgery, session, opto, Device" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Schema diagrams\n", - "\n", - "See also [diagram notation docs](https://datajoint.com/docs/core/concepts/getting-started/diagrams/).\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "image/svg+xml": "\n\n\n\n\nsurgery.Implantation\n\n\nsurgery.Implantation\n\n\n\n\n\nopto.OptoProtocol\n\n\nopto.OptoProtocol\n\n\n\n\n\nsurgery.Implantation->opto.OptoProtocol\n\n\n\n\nopto.OptoEvent\n\n\nopto.OptoEvent\n\n\n\n\n\nopto.OptoProtocol->opto.OptoEvent\n\n\n\n\nsubject.Subject\n\n\nsubject.Subject\n\n\n\n\n\nsubject.Subject->surgery.Implantation\n\n\n\n\nsession.Session\n\n\nsession.Session\n\n\n\n\n\nsubject.Subject->session.Session\n\n\n\n\nopto.OptoStimParams\n\n\nopto.OptoStimParams\n\n\n\n\n\nopto.OptoStimParams->opto.OptoProtocol\n\n\n\n\nopto.OptoWaveform\n\n\nopto.OptoWaveform\n\n\n\n\n\nopto.OptoWaveform->opto.OptoStimParams\n\n\n\n\nopto.OptoWaveform.Sine\n\n\nopto.OptoWaveform.Sine\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Sine\n\n\n\n\nopto.OptoWaveform.Ramp\n\n\nopto.OptoWaveform.Ramp\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Ramp\n\n\n\n\nopto.OptoWaveform.Square\n\n\nopto.OptoWaveform.Square\n\n\n\n\n\nopto.OptoWaveform->opto.OptoWaveform.Square\n\n\n\n\nopto.OptoWaveformType\n\n\nopto.OptoWaveformType\n\n\n\n\n\nopto.OptoWaveformType->opto.OptoWaveform\n\n\n\n\nsession.Session->opto.OptoProtocol\n\n\n\n", - "text/plain": [ - "" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "(\n", - " dj.Diagram(subject.Subject)\n", - " + dj.Diagram(session.Session)\n", - " + dj.Diagram(surgery.Implantation)\n", - " + dj.Diagram(opto)\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Inserting data\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### `lab` schema\n", - "\n", - "`pipeline.py` adds a Device table to the `lab` schema. This table, like other `Lookup` tables, has default contents, but we can always add more.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "Device.insert1(\n", - " dict(\n", - " device=\"OPTG_8\",\n", - " modality=\"Optogenetics\",\n", - " description=\"8 channel pulse sequence device\",\n", - " )\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "lab.User.insert1(\n", - " dict(user=\"User1\")\n", - ") # For the surgeon attribute in surgery.Implantation" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### `subject` schema\n" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "# \n", - "subject : varchar(8) # \n", - "---\n", - "subject_nickname=\"\" : varchar(64) # \n", - "sex : enum('M','F','U') # \n", - "subject_birth_date : date # \n", - "subject_description=\"\" : varchar(1024) # " - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "subject.Subject.heading" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "subject.Subject.insert1(\n", - " dict(\n", - " subject=\"subject3\",\n", - " sex=\"F\",\n", - " subject_birth_date=\"2020-03-03\",\n", - " subject_description=\"Optogenetic pilot subject.\",\n", - " )\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In order to conduct optogenetic stimulation, our subject must have an implant in the target brain region. Again, some `Lookup` tables have useful default content.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " \n", - " \n", - " \n", - "
\n", - "

reference

\n", - " \n", - "
bregma
dura
lambda
sagittal_suture
sinus
skull_surface
\n", - " \n", - "

Total: 6

\n", - " " - ], - "text/plain": [ - "*reference \n", - "+------------+\n", - "bregma \n", - "dura \n", - "lambda \n", - "sagittal_sutur\n", - "sinus \n", - "skull_surface \n", - " (Total: 6)" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "surgery.CoordinateReference()" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " \n", - " \n", - " \n", - "
\n", - "

hemisphere

\n", - " Brain region hemisphere\n", - "
left
middle
right
\n", - " \n", - "

Total: 3

\n", - " " - ], - "text/plain": [ - "*hemisphere \n", - "+------------+\n", - "left \n", - "middle \n", - "right \n", - " (Total: 3)" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "surgery.Hemisphere()" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "surgery.BrainRegion.insert1(\n", - " dict(region_acronym=\"dHP\", region_name=\"Dorsal Hippocampus\")\n", - ")\n", - "surgery.Implantation.insert1(\n", - " dict(\n", - " subject=\"subject3\",\n", - " implant_date=\"2022-04-01 12:13:14\",\n", - " implant_type=\"opto\",\n", - " target_region=\"dHP\",\n", - " target_hemisphere=\"left\",\n", - " surgeon=\"user1\",\n", - " )\n", - ")\n", - "\n", - "surgery.Implantation.Coordinate.insert1(\n", - " dict(\n", - " subject=\"subject3\",\n", - " implant_date=\"2022-04-01 12:13:14\",\n", - " implant_type=\"opto\",\n", - " target_region=\"dHP\",\n", - " target_hemisphere=\"left\",\n", - " ap=\"-7.9\", # anterior-posterior distance in mm\n", - " ap_ref=\"bregma\",\n", - " ml=\"-1.8\", # medial axis distance in mm\n", - " ml_ref=\"bregma\",\n", - " dv=\"5\", # dorso-ventral axis distance in mm\n", - " dv_ref=\"skull_surface\",\n", - " theta=\"11.5\", # degree rotation about ml-axis [0, 180] wrt z\n", - " phi=\"0\", # degree rotation about dv-axis [0, 360] wrt x\n", - " beta=None, # degree rotation about shank [-180, 180] wrt anterior\n", - " )\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Insert into `session` schema\n" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "-> subject.Subject\n", - "session_id : int \n", - "---\n", - "session_datetime : datetime \n", - "\n" - ] - }, - { - "data": { - "text/plain": [ - "'-> subject.Subject\\nsession_id : int \\n---\\nsession_datetime : datetime \\n'" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "session.Session.describe()" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "# \n", - "subject : varchar(8) # \n", - "session_id : int # \n", - "---\n", - "session_datetime : datetime # " - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "session.Session.heading" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " \n", - " \n", - " \n", - "\n", - "\n", - "
\n", - "

subject

\n", - " \n", - "
\n", - "

session_id

\n", - " \n", - "
\n", - "

session_datetime

\n", - " \n", - "
subject312022-04-04 12:13:14
\n", - " \n", - "

Total: 1

\n", - " " - ], - "text/plain": [ - "*subject *session_id session_dateti\n", - "+----------+ +------------+ +------------+\n", - "subject3 1 2022-04-04 12:\n", - " (Total: 1)" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "session_key = dict(\n", - " subject=\"subject3\", session_id=\"1\", session_datetime=\"2022-04-04 12:13:14\"\n", - ")\n", - "session.Session.insert1(session_key)\n", - "session.Session()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Insert into `opto` schema\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "First, we'll add information to describe the stimulus, including waveform shape and and application parameters.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [], - "source": [ - "opto.OptoWaveform.insert1(\n", - " dict(\n", - " waveform_name=\"square_10\",\n", - " waveform_type=\"square\",\n", - " waveform_description=\"Square waveform: 10%/90% on/off cycle\",\n", - " )\n", - ")\n", - "# Square is one part table of OptoWaveform.\n", - "# For sine and ramp waveforms, see the corresponding tables\n", - "opto.OptoWaveform.Square.insert1(\n", - " dict(waveform_name=\"square_10\", on_proportion=0.10, off_proportion=0.90)\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [], - "source": [ - "opto.OptoStimParams.insert1(\n", - " dict(\n", - " opto_params_id=1,\n", - " waveform_name=\"square_10\",\n", - " wavelength=470,\n", - " light_intensity=10.2,\n", - " frequency=1,\n", - " duration=241,\n", - " )\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Next, we'll describe the session in which these parameters were used with `OptoProtocol`\n" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "opto.OptoProtocol.insert1(\n", - " dict(\n", - " subject=\"subject3\",\n", - " session_id=\"1\",\n", - " protocol_id=\"1\",\n", - " opto_params_id=\"1\",\n", - " implant_date=\"2022-04-01 12:13:14\",\n", - " implant_type=\"opto\",\n", - " target_region=\"dHP\",\n", - " target_hemisphere=\"left\",\n", - " device=\"OPTG_4\",\n", - " )\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can describe the timing of these stimulations in `OptoEvent`.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [], - "source": [ - "opto.OptoEvent.insert(\n", - " [\n", - " dict(\n", - " subject=\"subject3\",\n", - " session_id=1,\n", - " protocol_id=1,\n", - " stim_start_time=241,\n", - " stim_end_time=482,\n", - " ),\n", - " dict(\n", - " subject=\"subject3\",\n", - " session_id=1,\n", - " protocol_id=1,\n", - " stim_start_time=482,\n", - " stim_end_time=723,\n", - " ),\n", - " ]\n", - ")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To store more experimental timing information, see documentation for [Element Event](https://datajoint.com/docs/elements/element-event/).\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Automating inserts\n", - "\n", - "This workflow provides functions for ingesting this information from csv files in `ingest.py`.\n", - "\n", - "- `ingest_subjects`: subject.Subject\n", - "- `ingest_sessions`: session.Session\n", - "- `ingest_events`: Element Event schemas\n", - "- `ingest_opto`: surgery and opto schemas\n", - "\n", - "For more information on each, see the docstring.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Help on function ingest_subjects in module workflow_optogenetics.ingest:\n", - "\n", - "ingest_subjects(subject_csv_path: str = './user_data/subjects.csv', skip_duplicates: bool = True, verbose: bool = True)\n", - " Ingest subjects listed in the subject column of ./user_data/subjects.csv\n", - " \n", - " Args:\n", - " subject_csv_path (str, optional): Relative path to subject csv.\n", - " Defaults to \"./user_data/subjects.csv\".\n", - " skip_duplicates (bool, optional): Skips duplicates, see DataJoint insert.\n", - " Defaults to True.\n", - " verbose (bool, optional): Provides insertion info to StdOut. Defaults to True.\n", - "\n" - ] - } - ], - "source": [ - "from workflow_optogenetics.ingest import ingest_subjects\n", - "\n", - "help(ingest_subjects)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "By default, these functions pull from files in the `user_files` directory. We can run each of these in succession with the default parameters with `ingest_all`.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "---- Inserting 0 entry(s) into subject ----\n", - "\n", - "---- Inserting 0 entry(s) into session ----\n", - "\n", - "---- Inserting 1 entry(s) into behavior_recording ----\n", - "\n", - "---- Inserting 5 entry(s) into behavior_recording__file ----\n", - "\n", - "---- Inserting 4 entry(s) into _block ----\n", - "\n", - "---- Inserting 8 entry(s) into _block__attribute ----\n", - "\n", - "---- Inserting 2 entry(s) into #trial_type ----\n", - "\n", - "---- Inserting 40 entry(s) into _trial ----\n", - "\n", - "---- Inserting 40 entry(s) into _trial__attribute ----\n", - "\n", - "---- Inserting 40 entry(s) into _block_trial ----\n", - "\n", - "---- Inserting 3 entry(s) into #event_type ----\n", - "\n", - "---- Inserting 37 entry(s) into _event ----\n", - "\n", - "---- Inserting 37 entry(s) into _trial_event ----\n", - "\n", - "---- Inserting 0 entry(s) into #opto_waveform ----\n", - "\n", - "---- Inserting 0 entry(s) into #opto_waveform__square ----\n", - "\n", - "---- Inserting 0 entry(s) into opto_stim_params ----\n", - "\n", - "---- Inserting 0 entry(s) into #coordinate_reference ----\n", - "\n", - "---- Inserting 0 entry(s) into brain_region ----\n", - "\n", - "---- Inserting 0 entry(s) into #user ----\n", - "\n", - "---- Inserting 0 entry(s) into implantation ----\n", - "\n", - "---- Inserting 0 entry(s) into implantation__coordinate ----\n", - "\n", - "---- Inserting 0 entry(s) into opto_protocol ----\n", - "\n", - "---- Inserting 0 entry(s) into opto_event ----\n" - ] - } - ], - "source": [ - "from workflow_optogenetics.ingest import ingest_all\n", - "\n", - "ingest_all()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Events\n", - "\n", - "The above `ingest_all()` also added behavioral events we can example in conjunction with optogenetic events. For convenience, these stimulation events are also reflected in the Block design of Element Event.\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " \n", - " \n", - " \n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "

subject

\n", - " \n", - "
\n", - "

session_id

\n", - " \n", - "
\n", - "

block_id

\n", - " block number (1-based indexing)\n", - "
\n", - "

trial_id

\n", - " trial number (1-based indexing)\n", - "
\n", - "

event_type

\n", - " \n", - "
\n", - "

event_start_time

\n", - " (second) relative to recording start\n", - "
\n", - "

attribute_name

\n", - " \n", - "
\n", - "

attribute_value

\n", - " \n", - "
\n", - "

attribute_blob

\n", - " \n", - "
subject3111center4.864stimulationoff=BLOB=
subject3111center10.192stimulationoff=BLOB=
subject3111center17.865stimulationoff=BLOB=
subject3113center59.128stimulationoff=BLOB=
subject3113left49.536stimulationoff=BLOB=
subject3113left64.453stimulationoff=BLOB=
subject3114center90.447stimulationoff=BLOB=
subject3114right73.844stimulationoff=BLOB=
subject3114right79.506stimulationoff=BLOB=
subject3115left101.14stimulationoff=BLOB=
subject3115right108.499stimulationoff=BLOB=
subject3119right192.237stimulationoff=BLOB=
subject31110center216.542stimulationoff=BLOB=
subject31322center509.98stimulationon=BLOB=
subject31323center534.82stimulationon=BLOB=
subject31324center555.033stimulationon=BLOB=
subject31326center605.523stimulationon=BLOB=
subject31329center673.068stimulationon=BLOB=
subject31431left725.153stimulationoff=BLOB=
subject31433center771.223stimulationoff=BLOB=
subject31433left778.669stimulationoff=BLOB=
subject31433left789.797stimulationoff=BLOB=
subject31434center798.334stimulationoff=BLOB=
subject31434left804.33stimulationoff=BLOB=
subject31435center836.522stimulationoff=BLOB=
subject31435left818.922stimulationoff=BLOB=
subject31435left831.301stimulationoff=BLOB=
subject31436center847.933stimulationoff=BLOB=
subject31436left852.572stimulationoff=BLOB=
subject31437center870.121stimulationoff=BLOB=
\n", - "

...

\n", - "

Total: 37

\n", - " " - ], - "text/plain": [ - "*subject *session_id *block_id *trial_id *event_type *event_start_t *attribute_nam attribute_valu attribute_\n", - "+----------+ +------------+ +----------+ +----------+ +------------+ +------------+ +------------+ +------------+ +--------+\n", - "subject3 1 1 1 center 4.864 stimulation off =BLOB= \n", - "subject3 1 1 1 center 10.192 stimulation off =BLOB= \n", - "subject3 1 1 1 center 17.865 stimulation off =BLOB= \n", - "subject3 1 1 3 center 59.128 stimulation off =BLOB= \n", - "subject3 1 1 3 left 49.536 stimulation off =BLOB= \n", - "subject3 1 1 3 left 64.453 stimulation off =BLOB= \n", - "subject3 1 1 4 center 90.447 stimulation off =BLOB= \n", - "subject3 1 1 4 right 73.844 stimulation off =BLOB= \n", - "subject3 1 1 4 right 79.506 stimulation off =BLOB= \n", - "subject3 1 1 5 left 101.14 stimulation off =BLOB= \n", - "subject3 1 1 5 right 108.499 stimulation off =BLOB= \n", - "subject3 1 1 9 right 192.237 stimulation off =BLOB= \n", - "subject3 1 1 10 center 216.542 stimulation off =BLOB= \n", - "subject3 1 3 22 center 509.98 stimulation on =BLOB= \n", - "subject3 1 3 23 center 534.82 stimulation on =BLOB= \n", - "subject3 1 3 24 center 555.033 stimulation on =BLOB= \n", - "subject3 1 3 26 center 605.523 stimulation on =BLOB= \n", - "subject3 1 3 29 center 673.068 stimulation on =BLOB= \n", - "subject3 1 4 31 left 725.153 stimulation off =BLOB= \n", - "subject3 1 4 33 center 771.223 stimulation off =BLOB= \n", - "subject3 1 4 33 left 778.669 stimulation off =BLOB= \n", - "subject3 1 4 33 left 789.797 stimulation off =BLOB= \n", - "subject3 1 4 34 center 798.334 stimulation off =BLOB= \n", - "subject3 1 4 34 left 804.33 stimulation off =BLOB= \n", - "subject3 1 4 35 center 836.522 stimulation off =BLOB= \n", - "subject3 1 4 35 left 818.922 stimulation off =BLOB= \n", - "subject3 1 4 35 left 831.301 stimulation off =BLOB= \n", - "subject3 1 4 36 center 847.933 stimulation off =BLOB= \n", - "subject3 1 4 36 left 852.572 stimulation off =BLOB= \n", - "subject3 1 4 37 center 870.121 stimulation off =BLOB= \n", - " ...\n", - " (Total: 37)" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from workflow_optogenetics.pipeline import event, trial\n", - "\n", - "events_by_block = (\n", - " trial.BlockTrial * trial.TrialEvent * trial.Block.Attribute\n", - " & \"attribute_name='stimulation'\"\n", - ")\n", - "events_by_block" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can readily compare the count of events or event types across 'on' and 'off' stimulation conditions." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " \n", - " \n", - " \n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "

subject

\n", - " \n", - "
\n", - "

session_id

\n", - " \n", - "
\n", - "

block_id

\n", - " block number (1-based indexing)\n", - "
\n", - "

trial_id

\n", - " trial number (1-based indexing)\n", - "
\n", - "

event_type

\n", - " \n", - "
\n", - "

event_start_time

\n", - " (second) relative to recording start\n", - "
\n", - "

attribute_name

\n", - " \n", - "
\n", - "

attribute_value

\n", - " \n", - "
\n", - "

attribute_blob

\n", - " \n", - "
subject31322center509.98stimulationon=BLOB=
subject31323center534.82stimulationon=BLOB=
subject31324center555.033stimulationon=BLOB=
subject31326center605.523stimulationon=BLOB=
subject31329center673.068stimulationon=BLOB=
\n", - " \n", - "

Total: 5

\n", - " " - ], - "text/plain": [ - "*subject *session_id *block_id *trial_id *event_type *event_start_t *attribute_nam attribute_valu attribute_\n", - "+----------+ +------------+ +----------+ +----------+ +------------+ +------------+ +------------+ +------------+ +--------+\n", - "subject3 1 3 22 center 509.98 stimulation on =BLOB= \n", - "subject3 1 3 23 center 534.82 stimulation on =BLOB= \n", - "subject3 1 3 24 center 555.033 stimulation on =BLOB= \n", - "subject3 1 3 26 center 605.523 stimulation on =BLOB= \n", - "subject3 1 3 29 center 673.068 stimulation on =BLOB= \n", - " (Total: 5)" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events_by_block & \"attribute_value='on'\"" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "\n", - " \n", - " \n", - " \n", - " \n", - "
\n", - " \n", - " \n", - " \n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "
\n", - "

subject

\n", - " \n", - "
\n", - "

session_id

\n", - " \n", - "
\n", - "

block_id

\n", - " block number (1-based indexing)\n", - "
\n", - "

trial_id

\n", - " trial number (1-based indexing)\n", - "
\n", - "

event_type

\n", - " \n", - "
\n", - "

event_start_time

\n", - " (second) relative to recording start\n", - "
\n", - "

attribute_name

\n", - " \n", - "
\n", - "

attribute_value

\n", - " \n", - "
\n", - "

attribute_blob

\n", - " \n", - "
subject3111center4.864stimulationoff=BLOB=
subject3111center10.192stimulationoff=BLOB=
subject3111center17.865stimulationoff=BLOB=
subject3113center59.128stimulationoff=BLOB=
subject3113left49.536stimulationoff=BLOB=
subject3113left64.453stimulationoff=BLOB=
subject3114center90.447stimulationoff=BLOB=
subject3114right73.844stimulationoff=BLOB=
subject3114right79.506stimulationoff=BLOB=
subject3115left101.14stimulationoff=BLOB=
subject3115right108.499stimulationoff=BLOB=
subject3119right192.237stimulationoff=BLOB=
subject31110center216.542stimulationoff=BLOB=
subject31431left725.153stimulationoff=BLOB=
subject31433center771.223stimulationoff=BLOB=
subject31433left778.669stimulationoff=BLOB=
subject31433left789.797stimulationoff=BLOB=
subject31434center798.334stimulationoff=BLOB=
subject31434left804.33stimulationoff=BLOB=
subject31435center836.522stimulationoff=BLOB=
subject31435left818.922stimulationoff=BLOB=
subject31435left831.301stimulationoff=BLOB=
subject31436center847.933stimulationoff=BLOB=
subject31436left852.572stimulationoff=BLOB=
subject31437center870.121stimulationoff=BLOB=
subject31437right877.152stimulationoff=BLOB=
subject31437right881.91stimulationoff=BLOB=
subject31439center917.759stimulationoff=BLOB=
subject31439center923.011stimulationoff=BLOB=
subject31440center940.588stimulationoff=BLOB=
\n", - "

...

\n", - "

Total: 32

\n", - " " - ], - "text/plain": [ - "*subject *session_id *block_id *trial_id *event_type *event_start_t *attribute_nam attribute_valu attribute_\n", - "+----------+ +------------+ +----------+ +----------+ +------------+ +------------+ +------------+ +------------+ +--------+\n", - "subject3 1 1 1 center 4.864 stimulation off =BLOB= \n", - "subject3 1 1 1 center 10.192 stimulation off =BLOB= \n", - "subject3 1 1 1 center 17.865 stimulation off =BLOB= \n", - "subject3 1 1 3 center 59.128 stimulation off =BLOB= \n", - "subject3 1 1 3 left 49.536 stimulation off =BLOB= \n", - "subject3 1 1 3 left 64.453 stimulation off =BLOB= \n", - "subject3 1 1 4 center 90.447 stimulation off =BLOB= \n", - "subject3 1 1 4 right 73.844 stimulation off =BLOB= \n", - "subject3 1 1 4 right 79.506 stimulation off =BLOB= \n", - "subject3 1 1 5 left 101.14 stimulation off =BLOB= \n", - "subject3 1 1 5 right 108.499 stimulation off =BLOB= \n", - "subject3 1 1 9 right 192.237 stimulation off =BLOB= \n", - "subject3 1 1 10 center 216.542 stimulation off =BLOB= \n", - "subject3 1 4 31 left 725.153 stimulation off =BLOB= \n", - "subject3 1 4 33 center 771.223 stimulation off =BLOB= \n", - "subject3 1 4 33 left 778.669 stimulation off =BLOB= \n", - "subject3 1 4 33 left 789.797 stimulation off =BLOB= \n", - "subject3 1 4 34 center 798.334 stimulation off =BLOB= \n", - "subject3 1 4 34 left 804.33 stimulation off =BLOB= \n", - "subject3 1 4 35 center 836.522 stimulation off =BLOB= \n", - "subject3 1 4 35 left 818.922 stimulation off =BLOB= \n", - "subject3 1 4 35 left 831.301 stimulation off =BLOB= \n", - "subject3 1 4 36 center 847.933 stimulation off =BLOB= \n", - "subject3 1 4 36 left 852.572 stimulation off =BLOB= \n", - "subject3 1 4 37 center 870.121 stimulation off =BLOB= \n", - "subject3 1 4 37 right 877.152 stimulation off =BLOB= \n", - "subject3 1 4 37 right 881.91 stimulation off =BLOB= \n", - "subject3 1 4 39 center 917.759 stimulation off =BLOB= \n", - "subject3 1 4 39 center 923.011 stimulation off =BLOB= \n", - "subject3 1 4 40 center 940.588 stimulation off =BLOB= \n", - " ...\n", - " (Total: 32)" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "events_by_block & \"attribute_value='off'\"" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Next Steps\n", - "\n", - "Interested in using Element Optogenetics for your own project? Reach out to the DataJoint team via [email](mailto:support@datajoint.com) or [Slack](https://datajoint.slack.com).\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3.9.13 ('ele')", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "vscode": { - "interpreter": { - "hash": "d00c4ad21a7027bf1726d6ae3a9a6ef39c8838928eca5a3d5f51f3eb68720410" - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/06-drop-optional.ipynb b/notebooks/06-drop-optional.ipynb deleted file mode 100644 index d3dd74b..0000000 --- a/notebooks/06-drop-optional.ipynb +++ /dev/null @@ -1,96 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Drop schemas\n", - "\n", - "- Schemas are not typically dropped in a production workflow with real data in it.\n", - "- At the developmental phase, it might be required for the table redesign.\n", - "- When dropping all schemas is needed, the following is the dependency order.\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Change into the parent directory to find the `dj_local_conf.json` file.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "if os.path.basename(os.getcwd()) == \"notebooks\":\n", - " os.chdir(\"..\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from workflow_optogenetics.pipeline import (\n", - " trial,\n", - " opto,\n", - " Device,\n", - " surgery,\n", - " event,\n", - " session,\n", - " subject,\n", - " lab,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# trial.schema.drop()\n", - "# opto.schema.drop()\n", - "# Device.drop_quick()\n", - "# surgery.schema.drop()\n", - "# event.schema.drop()\n", - "# session.schema.drop()\n", - "# subject.schema.drop()\n", - "# lab.schema.drop()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "ele", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.16" - }, - "vscode": { - "interpreter": { - "hash": "d00c4ad21a7027bf1726d6ae3a9a6ef39c8838928eca5a3d5f51f3eb68720410" - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} From 3b0a3eb739a7a0cb3a028f0abbf11316f0c968b5 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 3 Apr 2023 10:00:37 -0500 Subject: [PATCH 11/58] Update changelog --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 824f14c..504b734 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,8 +3,8 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -## [0.1.0] - 2023-02-20 +## [0.1.0] - 2023-04-03 -+ Add - Version ++ Add - First release [0.1.0]: https://github.com/datajoint/workflow-optogenetics/releases/tag/0.1.0 From cfc400368f720f2af21b04d41c2f9da0f37bf01a Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 3 Apr 2023 10:01:00 -0500 Subject: [PATCH 12/58] Update requirements --- requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 99b43e4..7b71b95 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,5 +5,4 @@ element-interface>=0.3.0 element-lab>=0.1.1 element-optogenetics>=0.1.0 element-session>=0.1.2 -ipykernel>=6.0.1 -jupytext>=1.13.7 +ipykernel>=6.0.1 \ No newline at end of file From 0a8e515463515682ba2cb43f7bcf136cc3324514 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Wed, 5 Apr 2023 21:36:14 +0000 Subject: [PATCH 13/58] Add vscode settings --- .vscode/launch.json | 16 ++++++++++++++++ .vscode/settings.json | 15 +++++++++++++++ 2 files changed, 31 insertions(+) create mode 100644 .vscode/launch.json create mode 100644 .vscode/settings.json diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..2b2502c --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Current File", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "justMyCode": false + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..e1160fa --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,15 @@ +{ + "editor.formatOnPaste": false, + "editor.formatOnSave": true, + "editor.rulers": [ + 88 + ], + "python.formatting.provider": "black", + "[python]": { + "editor.defaultFormatter": null + }, + "[markdown]": { + "editor.defaultFormatter": "disable" + }, + "files.autoSave": "off" +} \ No newline at end of file From bd21ad96070dc66bb25242f37bba19919ff2ef7d Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 11 Apr 2023 02:39:44 +0000 Subject: [PATCH 14/58] Rename file --- docker/{Dockerfile.test => Dockerfile} | 1 - 1 file changed, 1 deletion(-) rename docker/{Dockerfile.test => Dockerfile} (99%) diff --git a/docker/Dockerfile.test b/docker/Dockerfile similarity index 99% rename from docker/Dockerfile.test rename to docker/Dockerfile index 4f47702..4c6347a 100755 --- a/docker/Dockerfile.test +++ b/docker/Dockerfile @@ -8,7 +8,6 @@ RUN /entrypoint.sh echo "Installed dependencies." WORKDIR /main/workflow-optogenetics - # Always move local - conditional install in setup.sh COPY --chown=anaconda:anaconda ./element-lab/ /main/element-lab/ COPY --chown=anaconda:anaconda ./element-animal/ /main/element-animal/ From b4ec243d4123c7deb6863d0d81db429542710c86 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 11 Apr 2023 02:41:59 +0000 Subject: [PATCH 15/58] Update for rename --- docker/docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 1cdba07..26868a8 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -26,7 +26,7 @@ services: optogenetics: build: context: ../../ - dockerfile: ./workflow-optogenetics/docker/Dockerfile.test + dockerfile: ./workflow-optogenetics/docker/Dockerfile args: - GITHUB_USERNAME=${GITHUB_USERNAME} image: workflow-optogenetics:0.1.0 From baf317aae01d3e3fd568055a65cfd4d145c0f04a Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 11 Apr 2023 02:52:17 +0000 Subject: [PATCH 16/58] Update pipeline --- workflow_optogenetics/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workflow_optogenetics/pipeline.py b/workflow_optogenetics/pipeline.py index de2ce7d..77454fd 100644 --- a/workflow_optogenetics/pipeline.py +++ b/workflow_optogenetics/pipeline.py @@ -3,7 +3,7 @@ from element_animal.surgery import Implantation # Dependency for opto schema from element_event import event, trial from element_lab import lab -from element_lab.lab import User # Alias for session schema +from element_lab.lab import User as Experimenter # Alias for session schema from element_lab.lab import Lab, Project, Protocol, Source from element_optogenetics import optogenetics as opto from element_session import session_with_id as session From 1535fdad1f1215e2f0668e23d72578be60d558cf Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 11 Apr 2023 02:54:23 +0000 Subject: [PATCH 17/58] Add Compose file --- docker-compose.yaml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 docker-compose.yaml diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..0a2c1fc --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,15 @@ +# MYSQL_VER=8.0 docker compose up --build +version: "2.4" +services: + db: + restart: always + image: datajoint/mysql:${MYSQL_VER} + environment: + - MYSQL_ROOT_PASSWORD=${DJ_PASS} + ports: + - "3306:3306" + healthcheck: + test: [ "CMD", "mysqladmin", "ping", "-h", "localhost" ] + timeout: 15s + retries: 10 + interval: 15s \ No newline at end of file From afc7ef5ccc607d8d449f7f730e76b2cf1b227df5 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 14 Apr 2023 22:36:40 -0500 Subject: [PATCH 18/58] Update pipeline --- workflow_optogenetics/pipeline.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/workflow_optogenetics/pipeline.py b/workflow_optogenetics/pipeline.py index 77454fd..37f67e3 100644 --- a/workflow_optogenetics/pipeline.py +++ b/workflow_optogenetics/pipeline.py @@ -1,26 +1,22 @@ from element_animal import subject, surgery from element_animal.subject import Subject # Dependency for session schema -from element_animal.surgery import Implantation # Dependency for opto schema -from element_event import event, trial +from element_animal.surgery import Implantation # Dependency for optogenetics schema from element_lab import lab from element_lab.lab import User as Experimenter # Alias for session schema from element_lab.lab import Lab, Project, Protocol, Source -from element_optogenetics import optogenetics as opto +from element_optogenetics import optogenetics from element_session import session_with_id as session from element_session.session_with_id import Session from . import db_prefix -from .paths import get_opto_root_data_dir from .reference import Device __all__ = [ - "event", "lab", - "opto", + "optogenetics", "session", "subject", "surgery", - "trial", "Device", "Implantation", "Lab", @@ -30,19 +26,16 @@ "Source", "Subject", "User", - "get_opto_root_data_dir", ] -# Activate "lab", "subject", "surgery", "session", "event", "trial" schemas ------- +# Activate "lab", "subject", "surgery", "session" schemas ------- lab.activate(db_prefix + "lab") subject.activate(db_prefix + "subject", linking_module=__name__) surgery.activate(db_prefix + "surgery", linking_module=__name__) session.activate(db_prefix + "session", linking_module=__name__) -trial.activate(db_prefix + "trial", db_prefix + "event", linking_module=__name__) +# Activate "optogenetics" schema ------------- -# ------------- Activate "opto" schema ------------- - -opto.activate(db_prefix + "opto", linking_module=__name__) +optogenetics.activate(db_prefix + "optogenetics", linking_module=__name__) From 6a938d754d03ada05f5cd0379c95194a074343b1 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 14 Apr 2023 22:43:55 -0500 Subject: [PATCH 19/58] Remove csv files --- user_data/behavior_recordings.csv | 6 ----- user_data/blocks.csv | 9 ------- user_data/events.csv | 38 ---------------------------- user_data/opto_events.csv | 3 --- user_data/opto_sessions.csv | 2 -- user_data/opto_surgeries.csv | 2 -- user_data/opto_waveforms.csv | 2 -- user_data/sessions.csv | 2 -- user_data/subjects.csv | 2 -- user_data/trials.csv | 41 ------------------------------- 10 files changed, 107 deletions(-) delete mode 100644 user_data/behavior_recordings.csv delete mode 100644 user_data/blocks.csv delete mode 100644 user_data/events.csv delete mode 100644 user_data/opto_events.csv delete mode 100644 user_data/opto_sessions.csv delete mode 100644 user_data/opto_surgeries.csv delete mode 100644 user_data/opto_waveforms.csv delete mode 100644 user_data/sessions.csv delete mode 100644 user_data/subjects.csv delete mode 100644 user_data/trials.csv diff --git a/user_data/behavior_recordings.csv b/user_data/behavior_recordings.csv deleted file mode 100644 index 9fc66db..0000000 --- a/user_data/behavior_recordings.csv +++ /dev/null @@ -1,6 +0,0 @@ -subject,session_id,filepath -subject3,1,./user_data/blocks.csv -subject3,1,./user_data/trials.csv -subject3,1,./user_data/events.csv -subject3,1,./user_data/opto_session.csv -subject3,1,./user_data/opto_surgery.csv diff --git a/user_data/blocks.csv b/user_data/blocks.csv deleted file mode 100644 index 755e6c5..0000000 --- a/user_data/blocks.csv +++ /dev/null @@ -1,9 +0,0 @@ -subject,session_id,block_id,block_start_time,block_stop_time,attribute_name,attribute_value -subject3,1,1,0,241,stimulation,off -subject3,1,2,241,482,stimulation,on -subject3,1,3,482,723,stimulation,on -subject3,1,4,723,964,stimulation,off -subject3,1,1,0,241,environment,light -subject3,1,2,241,482,environment,dark -subject3,1,3,482,723,environment,light -subject3,1,4,723,964,environment,dark diff --git a/user_data/events.csv b/user_data/events.csv deleted file mode 100644 index 457abfc..0000000 --- a/user_data/events.csv +++ /dev/null @@ -1,38 +0,0 @@ -subject,session_id,trial_id,event_id,event_start_time,event_type -subject3,1,1,1,4.864,center -subject3,1,1,2,10.192,center -subject3,1,1,3,17.865,center -subject3,1,3,1,49.536,left -subject3,1,3,2,59.128,center -subject3,1,3,3,64.453,left -subject3,1,4,1,73.844,right -subject3,1,4,2,79.506,right -subject3,1,4,3,90.447,center -subject3,1,5,1,101.14,left -subject3,1,5,2,108.499,right -subject3,1,9,1,192.237,right -subject3,1,10,1,216.542,center -subject3,1,22,1,509.98,center -subject3,1,23,1,534.82,center -subject3,1,24,1,555.033,center -subject3,1,26,1,605.523,center -subject3,1,29,1,673.068,center -subject3,1,31,1,725.153,left -subject3,1,33,1,771.223,center -subject3,1,33,2,778.669,left -subject3,1,33,3,789.797,left -subject3,1,34,1,798.334,center -subject3,1,34,2,804.33,left -subject3,1,35,1,818.922,left -subject3,1,35,2,831.301,left -subject3,1,35,3,836.522,center -subject3,1,36,1,847.933,center -subject3,1,36,2,852.572,left -subject3,1,37,1,870.121,center -subject3,1,37,2,877.152,right -subject3,1,37,3,881.91,right -subject3,1,39,1,917.759,center -subject3,1,39,2,923.011,center -subject3,1,40,1,940.588,center -subject3,1,40,2,944.6,right -subject3,1,40,3,954.637,left diff --git a/user_data/opto_events.csv b/user_data/opto_events.csv deleted file mode 100644 index be34d4e..0000000 --- a/user_data/opto_events.csv +++ /dev/null @@ -1,3 +0,0 @@ -subject,session_id,protocol_id,stim_start_time,stim_end_time -subject3,1,1,241,482 -subject3,1,1,482,723 diff --git a/user_data/opto_sessions.csv b/user_data/opto_sessions.csv deleted file mode 100644 index b44ceb6..0000000 --- a/user_data/opto_sessions.csv +++ /dev/null @@ -1,2 +0,0 @@ -subject,session_id,protocol_id,opto_params_id,implant_date,implant_type,target_region,target_hemisphere -subject3,1,1,1,2022-04-01 12:13:14,opto,dHP,left diff --git a/user_data/opto_surgeries.csv b/user_data/opto_surgeries.csv deleted file mode 100644 index f61f3b3..0000000 --- a/user_data/opto_surgeries.csv +++ /dev/null @@ -1,2 +0,0 @@ -subject,implant_date,reference,region_acronym,region_name,hemisphere,implant_type,ap,ap_ref,ml,ml_ref,dv,dv_ref,theta,phi,user,surgeon,target_region,target_hemisphere -subject3,2022-04-01 12:13:14,bregma,dHP,Dorsal Hippocampus,left,opto,-7.9,bregma,-1.8,bregma,5,skull_surface,11.5,0,user1,user1,dHP,left diff --git a/user_data/opto_waveforms.csv b/user_data/opto_waveforms.csv deleted file mode 100644 index 077ece6..0000000 --- a/user_data/opto_waveforms.csv +++ /dev/null @@ -1,2 +0,0 @@ -waveform_type,waveform_name,waveform_description,on_proportion,off_proportion,opto_params_id,wavelength,light_intensity,frequency,duration -square,square_10,Square waveform with 10-90 on-off cycle,.10,.90,1,470,10.2,1,241 diff --git a/user_data/sessions.csv b/user_data/sessions.csv deleted file mode 100644 index 8db7b37..0000000 --- a/user_data/sessions.csv +++ /dev/null @@ -1,2 +0,0 @@ -subject,session_dir,session_id,session_datetime -subject3,subject3/opto_session1/,1,2022-04-04 12:13:14 diff --git a/user_data/subjects.csv b/user_data/subjects.csv deleted file mode 100644 index 47510f9..0000000 --- a/user_data/subjects.csv +++ /dev/null @@ -1,2 +0,0 @@ -subject,sex,subject_birth_date,subject_description -subject3,F,2022-03-03,Optogenetic pilot subject diff --git a/user_data/trials.csv b/user_data/trials.csv deleted file mode 100644 index df14974..0000000 --- a/user_data/trials.csv +++ /dev/null @@ -1,41 +0,0 @@ -subject,session_id,block_id,trial_id,trial_start_time,trial_stop_time,trial_type,attribute_name,attribute_value -subject3,1,1,1,0.393,20.393,stim,lumen,887 -subject3,1,1,2,24.187,44.187,ctrl,lumen,994 -subject3,1,1,3,47.747,67.747,stim,lumen,887 -subject3,1,1,4,71.615,91.615,stim,lumen,966 -subject3,1,1,5,95.56,115.56,stim,lumen,559 -subject3,1,1,6,119.373,139.373,ctrl,lumen,555 -subject3,1,1,7,143.113,163.113,stim,lumen,849 -subject3,1,1,8,166.702,186.702,ctrl,lumen,593 -subject3,1,1,9,190.539,210.539,ctrl,lumen,548 -subject3,1,1,10,214.382,234.382,ctrl,lumen,974 -subject3,1,2,11,241.255,261.255,stim,lumen,0 -subject3,1,2,12,264.76,284.76,ctrl,lumen,0 -subject3,1,2,13,288.591,308.591,ctrl,lumen,0 -subject3,1,2,14,312.1,332.1,stim,lumen,0 -subject3,1,2,15,336.024,356.024,ctrl,lumen,0 -subject3,1,2,16,359.8,379.8,stim,lumen,0 -subject3,1,2,17,383.357,403.357,ctrl,lumen,0 -subject3,1,2,18,407.129,427.129,ctrl,lumen,0 -subject3,1,2,19,431.011,451.011,ctrl,lumen,0 -subject3,1,2,20,454.87,474.87,stim,lumen,0 -subject3,1,3,21,482.043,502.043,ctrl,lumen,775 -subject3,1,3,22,505.609,525.609,stim,lumen,887 -subject3,1,3,23,529.228,549.228,ctrl,lumen,972 -subject3,1,3,24,552.769,572.769,stim,lumen,963 -subject3,1,3,25,576.278,596.278,ctrl,lumen,682 -subject3,1,3,26,600.029,620.029,stim,lumen,746 -subject3,1,3,27,623.997,643.997,stim,lumen,793 -subject3,1,3,28,647.847,667.847,stim,lumen,995 -subject3,1,3,29,671.532,691.532,ctrl,lumen,866 -subject3,1,3,30,695.186,715.186,ctrl,lumen,501 -subject3,1,4,31,723.47,743.47,ctrl,lumen,0 -subject3,1,4,32,747.186,767.186,stim,lumen,0 -subject3,1,4,33,771.171,791.171,ctrl,lumen,0 -subject3,1,4,34,794.755,814.755,stim,lumen,0 -subject3,1,4,35,818.322,838.322,stim,lumen,0 -subject3,1,4,36,842.046,862.046,ctrl,lumen,0 -subject3,1,4,37,865.999,885.999,stim,lumen,0 -subject3,1,4,38,889.587,909.587,stim,lumen,0 -subject3,1,4,39,913.564,933.564,stim,lumen,0 -subject3,1,4,40,937.31,957.31,stim,lumen,0 From e8ea93798e62be48fbbab508d1c79b63159ba9ea Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sat, 15 Apr 2023 09:43:19 -0500 Subject: [PATCH 20/58] Remove path function --- workflow_optogenetics/paths.py | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 workflow_optogenetics/paths.py diff --git a/workflow_optogenetics/paths.py b/workflow_optogenetics/paths.py deleted file mode 100644 index 281fffc..0000000 --- a/workflow_optogenetics/paths.py +++ /dev/null @@ -1,8 +0,0 @@ -import pathlib - -import datajoint as dj - - -def get_opto_root_data_dir(): - data_dir = dj.config.get("custom", {}).get("opto_root_data_dir", None) - return pathlib.Path(data_dir) if data_dir else None From 14c2dd5018c218b2957f3eb46c55600de4a3dc9c Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sat, 15 Apr 2023 09:43:38 -0500 Subject: [PATCH 21/58] Update requirements --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 7b71b95..38ed1cf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ datajoint>=0.13.0 element-animal>=0.1.2 element-event>=0.1.2 -element-interface>=0.3.0 +element-interface>=0.5.1 element-lab>=0.1.1 element-optogenetics>=0.1.0 element-session>=0.1.2 From 49b4ba338da155beb54a80dd52e6091a195c47d4 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sat, 15 Apr 2023 09:43:52 -0500 Subject: [PATCH 22/58] Remove ingestion pytest --- tests/test_ingest.py | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100644 tests/test_ingest.py diff --git a/tests/test_ingest.py b/tests/test_ingest.py deleted file mode 100644 index afcbf93..0000000 --- a/tests/test_ingest.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Tests ingestion into schema tables: Lab, Subject, Session - 1. Assert length of data populated with conftest - 2. Assert exact matches of inserted data for key tables -""" - - -def test_ingest(pipeline, ingest_csvs): - """Check successful ingestion of csv data""" - import datetime - - subject = pipeline["subject"] - session = pipeline["session"] - surgery = pipeline["surgery"] - opto = pipeline["opto"] - - table_lengths = [ - (subject.Subject(), 1, "subject3"), - (session.Session(), 1, datetime.datetime(2022, 4, 4, 12, 13, 14)), - (surgery.Implantation.Coordinate(), 1, 11.5), - (opto.OptoStimParams(), 1, "square_10"), - (opto.OptoEvent(), 2, 482), - ] - - for t in table_lengths: - assert len(t[0]) == t[1], f"Check length of {t[0].full_table_name}" - assert t[2] in t[0].fetch()[0], f"Check contents of {t[0].full_table_name}" From 04abda778e6f965c1cf277eaa1774681c69488bc Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sun, 16 Apr 2023 09:46:02 -0500 Subject: [PATCH 23/58] Add dev container --- .devcontainer/Dockerfile | 23 +++++++++++++++++++++++ .devcontainer/devcontainer.json | 26 ++++++++++++++++++++++++++ .devcontainer/docker-compose.yaml | 17 +++++++++++++++++ 3 files changed, 66 insertions(+) create mode 100644 .devcontainer/Dockerfile create mode 100644 .devcontainer/devcontainer.json create mode 100644 .devcontainer/docker-compose.yaml diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..36c6238 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,23 @@ +FROM mcr.microsoft.com/devcontainers/python:3.9-bullseye + +RUN \ + # dev setup + apt update && \ + apt-get install bash-completion graphviz default-mysql-client -y && \ + pip install --no-cache-dir --upgrade black pip + +COPY ./requirements.txt /tmp/ +RUN \ + # workflow dependencies + pip install --no-cache-dir -r /tmp/requirements.txt && \ + # clean up + rm /tmp/requirements.txt && \ + apt-get clean + +ENV DJ_HOST fakeservices.datajoint.io +ENV DJ_USER root +ENV DJ_PASS simple + +ENV DATABASE_PREFIX neuro_ + +USER vscode \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..204e883 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,26 @@ +{ + "name": "Tutorial", + "dockerComposeFile": "docker-compose.yaml", + "service": "app", + "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", + "remoteEnv": { + "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" + }, + "onCreateCommand": "pip install -e . && MYSQL_VER=8.0 docker compose down && MYSQL_VER=8.0 docker compose up --build --wait", + "postStartCommand": "docker volume prune -f", + "hostRequirements": { + "cpus": 2, + "memory": "4gb", + "storage": "8gb" + }, + "forwardPorts": [ + 3306 + ], + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python" + ] + } + } +} \ No newline at end of file diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml new file mode 100644 index 0000000..83d422d --- /dev/null +++ b/.devcontainer/docker-compose.yaml @@ -0,0 +1,17 @@ +version: "3" +services: + app: + cpus: 2 + mem_limit: 4g + build: + context: .. + dockerfile: ./.devcontainer/Dockerfile + extra_hosts: + - fakeservices.datajoint.io:127.0.0.1 + privileged: true # only because of dind + devices: + - /dev/fuse + cap_add: + - SYS_ADMIN + security_opt: + - apparmor:unconfined \ No newline at end of file From e43f79a7d6c80f7af214d6eb7f132c465325e231 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sun, 16 Apr 2023 09:53:25 -0500 Subject: [PATCH 24/58] Update requirements for testing --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 38ed1cf..3dd2473 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,6 @@ element-animal>=0.1.2 element-event>=0.1.2 element-interface>=0.5.1 element-lab>=0.1.1 -element-optogenetics>=0.1.0 +element-optogenetics @ git+https://github.com/datajoint/element-optogenetics.git element-session>=0.1.2 ipykernel>=6.0.1 \ No newline at end of file From 7f1e18b5fdfb9862ece97004d6d418949a41c07d Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sun, 16 Apr 2023 10:19:39 -0500 Subject: [PATCH 25/58] Update Docker and Compose files --- .devcontainer/Dockerfile | 4 +--- .devcontainer/docker-compose.yaml | 2 -- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 36c6238..dcd5285 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -18,6 +18,4 @@ ENV DJ_HOST fakeservices.datajoint.io ENV DJ_USER root ENV DJ_PASS simple -ENV DATABASE_PREFIX neuro_ - -USER vscode \ No newline at end of file +ENV DATABASE_PREFIX neuro_ \ No newline at end of file diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml index 83d422d..be81d2b 100644 --- a/.devcontainer/docker-compose.yaml +++ b/.devcontainer/docker-compose.yaml @@ -9,8 +9,6 @@ services: extra_hosts: - fakeservices.datajoint.io:127.0.0.1 privileged: true # only because of dind - devices: - - /dev/fuse cap_add: - SYS_ADMIN security_opt: From 644a5bb5ba9285417b43c2a2138e72b14cc52c73 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sun, 16 Apr 2023 20:22:52 -0500 Subject: [PATCH 26/58] Increase resources --- .devcontainer/devcontainer.json | 6 +++--- .devcontainer/docker-compose.yaml | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 204e883..47d92ee 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -9,9 +9,9 @@ "onCreateCommand": "pip install -e . && MYSQL_VER=8.0 docker compose down && MYSQL_VER=8.0 docker compose up --build --wait", "postStartCommand": "docker volume prune -f", "hostRequirements": { - "cpus": 2, - "memory": "4gb", - "storage": "8gb" + "cpus": 4, + "memory": "8gb", + "storage": "32gb" }, "forwardPorts": [ 3306 diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml index be81d2b..867b95a 100644 --- a/.devcontainer/docker-compose.yaml +++ b/.devcontainer/docker-compose.yaml @@ -1,14 +1,13 @@ version: "3" services: app: - cpus: 2 - mem_limit: 4g + cpus: 4 + mem_limit: 8g build: context: .. dockerfile: ./.devcontainer/Dockerfile extra_hosts: - fakeservices.datajoint.io:127.0.0.1 - privileged: true # only because of dind cap_add: - SYS_ADMIN security_opt: From 1948bd4fe29f79777fc2a253b5a7329ddbeaefb7 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sun, 16 Apr 2023 20:38:04 -0500 Subject: [PATCH 27/58] Revert base image --- .devcontainer/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index dcd5285..faba18c 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/devcontainers/python:3.9-bullseye +FROM mcr.microsoft.com/devcontainers/python:3.7-bullseye RUN \ # dev setup From 174d71dccd14ef8622e55987321bc7735d26732d Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 17 Apr 2023 02:15:04 +0000 Subject: [PATCH 28/58] Update Docker and Compose files --- .devcontainer/Dockerfile | 32 ++++++++++++++++++++++++++----- .devcontainer/docker-compose.yaml | 5 +++++ 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index faba18c..a65d696 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,14 +1,33 @@ -FROM mcr.microsoft.com/devcontainers/python:3.7-bullseye +FROM python:3.9-slim + +RUN \ + adduser --system --disabled-password --shell /bin/bash vscode && \ + # install docker + apt-get update && \ + apt-get install ca-certificates curl gnupg lsb-release -y && \ + mkdir -m 0755 -p /etc/apt/keyrings && \ + curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg && \ + echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null && \ + apt-get update && \ + apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -y && \ + usermod -aG docker vscode && \ + apt-get clean RUN \ # dev setup apt update && \ - apt-get install bash-completion graphviz default-mysql-client -y && \ - pip install --no-cache-dir --upgrade black pip - + apt-get install sudo git bash-completion graphviz default-mysql-client s3fs procps -y && \ + usermod -aG sudo vscode && \ + echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers && \ + pip install --no-cache-dir --upgrade black pip && \ + echo '. /etc/bash_completion' >> /home/vscode/.bashrc && \ + echo 'export PS1="\[\e[32;1m\]\u\[\e[m\]@\[\e[34;1m\]\H\[\e[m\]:\[\e[33;1m\]\w\[\e[m\]$ "' >> /home/vscode/.bashrc && \ + # dircolors -b >> /home/vscode/.bashrc && \ # somehow fix colors + apt-get clean COPY ./requirements.txt /tmp/ RUN \ # workflow dependencies + apt-get install gcc ffmpeg libsm6 libxext6 -y && \ pip install --no-cache-dir -r /tmp/requirements.txt && \ # clean up rm /tmp/requirements.txt && \ @@ -18,4 +37,7 @@ ENV DJ_HOST fakeservices.datajoint.io ENV DJ_USER root ENV DJ_PASS simple -ENV DATABASE_PREFIX neuro_ \ No newline at end of file +ENV DATABASE_PREFIX neuro_ + +USER vscode +CMD bash -c "sudo rm /var/run/docker.pid; sudo dockerd" \ No newline at end of file diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml index 867b95a..01c47ca 100644 --- a/.devcontainer/docker-compose.yaml +++ b/.devcontainer/docker-compose.yaml @@ -8,6 +8,11 @@ services: dockerfile: ./.devcontainer/Dockerfile extra_hosts: - fakeservices.datajoint.io:127.0.0.1 + volumes: + - ../..:/workspaces:cached + privileged: true # only because of dind + devices: + - /dev/fuse cap_add: - SYS_ADMIN security_opt: From 914b3d31900c9890a99f279221a2b2691b23f8d4 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 21:49:45 +0000 Subject: [PATCH 29/58] Update tests --- tests/test_pipeline_generation.py | 33 ++++++++++++++++--------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/tests/test_pipeline_generation.py b/tests/test_pipeline_generation.py index 3692f69..85fb97a 100644 --- a/tests/test_pipeline_generation.py +++ b/tests/test_pipeline_generation.py @@ -1,28 +1,29 @@ +from . import pipeline + + def test_upstream_pipeline(pipeline): session = pipeline["session"] surgery = pipeline["surgery"] subject = pipeline["subject"] - # test connection Subject->Session - assert subject.Subject.full_table_name == session.Session.parents()[0] + # Test connection from Subject to Session + assert subject.Subject.full_table_name in session.Session.parents() assert subject.Subject.full_table_name in surgery.Implantation.parents() -def test_opto_pipeline(pipeline): +def test_optogenetics_pipeline(pipeline): session = pipeline["session"] surgery = pipeline["surgery"] - opto = pipeline["opto"] + optogenetics = pipeline["optogenetics"] Device = pipeline["Device"] - # test connection opto.OptoProtocol -> parents - opto_parent_links = opto.OptoProtocol.parents() - opto_parent_list = [ - session.Session, - opto.OptoStimParams, - surgery.Implantation, - Device, - ] - for parent in opto_parent_list: - assert ( - parent.full_table_name in opto_parent_links - ), f"opto.OptoProtocol.parents() did not include {parent.full_table_name}" + # Test connection from optogenetics.OptoProtocol to parent tables + assert session.Session.full_table_name in optogenetics.OptoProtocol.parents() + assert ( + optogenetics.OptoStimParams.full_table_name + in optogenetics.OptoProtocol.parents() + ) + assert surgery.Implantation.full_table_name in optogenetics.OptoProtocol.parents() + assert Device.full_table_name in optogenetics.OptoProtocol.parents() + + assert "stim_start_time" in optogenetics.OptoEvent.heading.attributes From 870ffbb74d8ee21a5ba4ac34fc3f88c82e3750bf Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 21:54:09 +0000 Subject: [PATCH 30/58] Update pipeline --- workflow_optogenetics/pipeline.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/workflow_optogenetics/pipeline.py b/workflow_optogenetics/pipeline.py index 37f67e3..1ee99cf 100644 --- a/workflow_optogenetics/pipeline.py +++ b/workflow_optogenetics/pipeline.py @@ -2,8 +2,7 @@ from element_animal.subject import Subject # Dependency for session schema from element_animal.surgery import Implantation # Dependency for optogenetics schema from element_lab import lab -from element_lab.lab import User as Experimenter # Alias for session schema -from element_lab.lab import Lab, Project, Protocol, Source +from element_lab.lab import Lab, Project, Protocol, Source, User from element_optogenetics import optogenetics from element_session import session_with_id as session from element_session.session_with_id import Session @@ -34,6 +33,8 @@ lab.activate(db_prefix + "lab") subject.activate(db_prefix + "subject", linking_module=__name__) surgery.activate(db_prefix + "surgery", linking_module=__name__) + +Experimenter = User session.activate(db_prefix + "session", linking_module=__name__) # Activate "optogenetics" schema ------------- From b4d91a3d6bbb2aeb7231cf9cacb09e957e6b384a Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 21:56:51 +0000 Subject: [PATCH 31/58] Add Dev Container for pytest environment --- .devcontainer/docker-compose.yaml | 13 +++------- .devcontainer/local-test/devcontainer.json | 26 ++++++++++++++++++++ .devcontainer/local-test/docker-compose.yaml | 13 ++++++++++ 3 files changed, 42 insertions(+), 10 deletions(-) create mode 100644 .devcontainer/local-test/devcontainer.json create mode 100644 .devcontainer/local-test/docker-compose.yaml diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml index 01c47ca..975c28d 100644 --- a/.devcontainer/docker-compose.yaml +++ b/.devcontainer/docker-compose.yaml @@ -1,16 +1,9 @@ version: "3" services: app: - cpus: 4 - mem_limit: 8g - build: - context: .. - dockerfile: ./.devcontainer/Dockerfile - extra_hosts: - - fakeservices.datajoint.io:127.0.0.1 - volumes: - - ../..:/workspaces:cached - privileged: true # only because of dind + extends: + file: ./test/docker-compose.yaml + service: app devices: - /dev/fuse cap_add: diff --git a/.devcontainer/local-test/devcontainer.json b/.devcontainer/local-test/devcontainer.json new file mode 100644 index 0000000..3e57a7b --- /dev/null +++ b/.devcontainer/local-test/devcontainer.json @@ -0,0 +1,26 @@ +{ + "name": "Local Test", + "dockerComposeFile": "docker-compose.yaml", + "service": "app", + "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", + "remoteEnv": { + "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" + }, + "onCreateCommand": "pip install -r ./requirements_dev.txt && pip install -e . && pip install -e ../element-optogenetics && MYSQL_VER=8.0 docker compose down && MYSQL_VER=8.0 docker compose up --build --wait", + "postStartCommand": "docker volume prune -f", + "hostRequirements": { + "cpus": 4, + "memory": "8gb", + "storage": "32gb" + }, + "forwardPorts": [ + 3306 + ], + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python" + ] + } + } +} \ No newline at end of file diff --git a/.devcontainer/local-test/docker-compose.yaml b/.devcontainer/local-test/docker-compose.yaml new file mode 100644 index 0000000..0dd6db4 --- /dev/null +++ b/.devcontainer/local-test/docker-compose.yaml @@ -0,0 +1,13 @@ +version: "3" +services: + app: + cpus: 4 + mem_limit: 8g + build: + context: ../.. + dockerfile: ./.devcontainer/Dockerfile + extra_hosts: + - fakeservices.datajoint.io:127.0.0.1 + volumes: + - ../../..:/workspaces + privileged: true # only because of dind \ No newline at end of file From 4e74820a5301ed7ed7dc35ac8b0e1cc8d91c3df3 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 21:57:04 +0000 Subject: [PATCH 32/58] Update setup --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 707c52d..cba2956 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,6 @@ + [element-lab](https://github.com/datajoint/element-lab) + [element-animal](https://github.com/datajoint/element-animal) + [element-session](https://github.com/datajoint/element-session) -+ [element-event](https://github.com/datajoint/element-event) + [element-optogenetics](https://github.com/datajoint/element-optogenetics) """ @@ -26,7 +25,7 @@ setup( name="workflow-optogenetics", version=__version__, # noqa: F821 - description="Optogenetics workflow using the DataJoint elements", + description="Optogenetics workflow using DataJoint Elements", long_description=long_description, long_description_content_type="text/markdown", author="DataJoint", From 8490173e60135501d7d39138d4467a3a616ca99d Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 22:05:44 +0000 Subject: [PATCH 33/58] Remove Docker files in place of Dev Container --- docker/Dockerfile | 26 ------------- docker/apt_requirements.txt | 2 - docker/docker-compose.yaml | 63 -------------------------------- docker/setup.sh | 33 ----------------- workflow_optogenetics/version.py | 1 - 5 files changed, 125 deletions(-) delete mode 100755 docker/Dockerfile delete mode 100755 docker/apt_requirements.txt delete mode 100644 docker/docker-compose.yaml delete mode 100644 docker/setup.sh diff --git a/docker/Dockerfile b/docker/Dockerfile deleted file mode 100755 index 4c6347a..0000000 --- a/docker/Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -FROM datajoint/djbase:py3.9-debian-8eb1715 - -# ARG GITHUB_USERNAME=datajoint # tried moving to ENV -USER anaconda:anaconda - -COPY ./workflow-optogenetics/docker/apt_requirements.txt /tmp/ -RUN /entrypoint.sh echo "Installed dependencies." - -WORKDIR /main/workflow-optogenetics - -# Always move local - conditional install in setup.sh -COPY --chown=anaconda:anaconda ./element-lab/ /main/element-lab/ -COPY --chown=anaconda:anaconda ./element-animal/ /main/element-animal/ -COPY --chown=anaconda:anaconda ./element-session/ /main/element-session/ -COPY --chown=anaconda:anaconda ./element-event/ /main/element-event/ -COPY --chown=anaconda:anaconda ./element-interface/ /main/element-interface/ -COPY --chown=anaconda:anaconda ./element-optogenetics/ /main/element-optogenetics/ -COPY --chown=anaconda:anaconda ./workflow-optogenetics/ /main/workflow-optogenetics/ - -# Conditional install - local-all, local-dlc, or git -COPY --chown=anaconda:anaconda ./workflow-optogenetics/docker/setup.sh /main/ -COPY --chown=anaconda:anaconda ./workflow-optogenetics/docker/.env /main/ -RUN chmod 755 /main/setup.sh -RUN chmod 755 /main/.env -RUN /main/setup.sh -RUN rm -f ./dj_local_conf.json diff --git a/docker/apt_requirements.txt b/docker/apt_requirements.txt deleted file mode 100755 index 3505bb3..0000000 --- a/docker/apt_requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -git -locales-all diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml deleted file mode 100644 index 26868a8..0000000 --- a/docker/docker-compose.yaml +++ /dev/null @@ -1,63 +0,0 @@ -# .env file. Careful that vscode black does not add spaces around '=' -# COMPOSE_PROJECT_NAME='opto' -# TEST_DATA_DIR= -# GITHUB_USERNAME=datajoint -# INSTALL_OPTION=local-all, local-only, or git -# TEST_CMD="pytest" # pytest --dj-{verbose,teardown} False # options -# # to do nothing, set as "True" -# export COMPOSE_DOCKER_CLI_BUILD=0 # some machines need for smooth --build -# docker-compose --env-file ./docker/.env -f ./docker/docker-compose-test.yaml up --build --force-recreate --detached -# docker exec -it workflow-optogenetics /bin/bash -# docker-compose -f ./docker/docker-compose-test.yaml down --volumes - -version: "2.4" - -services: - db: - networks: - optogenetics: - image: datajoint/mysql:5.7 - environment: - MYSQL_ROOT_PASSWORD: simple - container_name: workflow-optogenetics-db - - workflow: - networks: - optogenetics: - build: - context: ../../ - dockerfile: ./workflow-optogenetics/docker/Dockerfile - args: - - GITHUB_USERNAME=${GITHUB_USERNAME} - image: workflow-optogenetics:0.1.0 - container_name: workflow-optogenetics - environment: - - DJ_HOST=db - - DJ_USER=root - - DJ_PASS=simple - - opto_ROOT_DATA_DIR=/main/test_data/ - - DATABASE_PREFIX=test_ - - COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME} - - GITHUB_USERNAME=${GITHUB_USERNAME} - - INSTALL_OPTION=${INSTALL_OPTION} - - TEST_CMD=${TEST_CMD} - command: - - bash - - -c - - | - eval ${TEST_CMD} - tail -f /dev/null - volumes: - - ${TEST_DATA_DIR}:/main/test_data/ - - ../../workflow-optogenetics/docker/apt_requirements.txt:/tmp/apt_requirements.txt - - ../../element-lab:/main/element-lab - - ../../element-animal:/main/element-animal - - ../../element-session:/main/element-session - - ../../element-optogenetics:/main/element-optogenetics - - ../../workflow-optogenetics:/main/workflow-optogenetics - depends_on: - db: - condition: service_healthy - -networks: - optogenetics: diff --git a/docker/setup.sh b/docker/setup.sh deleted file mode 100644 index 8d2d977..0000000 --- a/docker/setup.sh +++ /dev/null @@ -1,33 +0,0 @@ -#! /bin/bash -export $(grep -v '^#' /main/.env | xargs) - -echo "INSTALL OPTION:" $INSTALL_OPTION -cd /main/ -# all local installs, mapped from host -if [ "$INSTALL_OPTION" == "local-all" ]; then - for f in lab animal session event optogenetics; do - pip install -e ./element-${f} - done - pip install -e ./workflow-optogenetics -# Install all from GitHub except for (optionally) optogenetics Element and Workflow -else - pip install git+https://github.com/${GITHUB_USERNAME}/element-lab.git - pip install git+https://github.com/${GITHUB_USERNAME}/element-animal.git - pip install git+https://github.com/${GITHUB_USERNAME}/element-session.git - pip install git+https://github.com/${GITHUB_USERNAME}/element-event.git - # only optogenetics items from local install - if [ "$INSTALL_OPTION" == "local-only" ]; then - pip install -e ./element-optogenetics - pip install -e ./workflow-optogenetics - # all from github - elif [ "$INSTALL_OPTION" == "git" ]; then - pip install git+https://github.com/${GITHUB_USERNAME}/element-optogenetics.git - pip install git+https://github.com/${GITHUB_USERNAME}/workflow-optogenetics.git - fi -fi - -# If test cmd contains pytest, install -if [[ "$TEST_CMD" == *pytest* ]]; then - pip install pytest - pip install pytest-cov -fi diff --git a/workflow_optogenetics/version.py b/workflow_optogenetics/version.py index ee1b0bb..652faa3 100644 --- a/workflow_optogenetics/version.py +++ b/workflow_optogenetics/version.py @@ -1,5 +1,4 @@ """ Package metadata -Update the Docker image tag in `docker-compose.yaml` to match """ __version__ = "0.1.0" From 26cf8c0caeb9fe1dfd43ebd55be83d8bccd84ae3 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 22:06:32 +0000 Subject: [PATCH 34/58] Move to `__init__.py` --- tests/conftest.py | 268 ---------------------------------------------- 1 file changed, 268 deletions(-) delete mode 100644 tests/conftest.py diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index 4a61fc5..0000000 --- a/tests/conftest.py +++ /dev/null @@ -1,268 +0,0 @@ -import logging -import os -import sys -from contextlib import nullcontext -from pathlib import Path - -import datajoint as dj -import pytest -from element_interface.utils import ingest_csv_to_table - -from workflow_optogenetics.ingest import ( - ingest_all, - ingest_events, - ingest_opto, - ingest_sessions, - ingest_subjects, -) -from workflow_optogenetics.paths import get_opto_root_data_dir - -__all__ = [ - "ingest_all", - "ingest_events", - "ingest_opto", - "ingest_subjects", - "ingest_sessions", - "get_opto_root_data_dir", -] - -# ---------------------- CONSTANTS --------------------- - -logger = logging.getLogger("datajoint") - - -def pytest_addoption(parser): - """ - Permit constants when calling pytest at commandline e.g., pytest --dj-verbose False - - Parameters - ---------- - --dj-verbose (bool): Default True. Pass print statements from Elements. - --dj-teardown (bool): Default True. Delete pipeline on close. - --dj-datadir (str): Default ./tests/user_data. Relative path of test CSV data. - """ - parser.addoption( - "--dj-verbose", - action="store", - default="True", - help="Verbose for dj items: True or False", - choices=("True", "False"), - ) - parser.addoption( - "--dj-teardown", - action="store", - default="True", - help="Verbose for dj items: True or False", - choices=("True", "False"), - ) - parser.addoption( - "--dj-datadir", - action="store", - default="./tests/user_data", - help="Relative path for saving tests data", - ) - - -@pytest.fixture(scope="session") -def setup(request): - """Take passed commandline variables, set as global""" - global verbose, _tear_down, test_user_data_dir, verbose_context - - verbose = str_to_bool(request.config.getoption("--dj-verbose")) - _tear_down = str_to_bool(request.config.getoption("--dj-teardown")) - test_user_data_dir = Path(request.config.getoption("--dj-datadir")) - test_user_data_dir.mkdir(exist_ok=True) - - verbose_context = nullcontext() if verbose else QuietStdOut() - - yield verbose_context, verbose - - -# ------------------ GENERAL FUCNTION ------------------ - - -def str_to_bool(value) -> bool: - """Return whether the provided string represents true. Otherwise false. - - Args: - value (any): Any input - - Returns: - bool (bool): True if value in ("y", "yes", "t", "true", "on", "1") - """ - # Due to distutils equivalent depreciation in 3.10 - # Adopted from github.com/PostHog/posthog/blob/master/posthog/utils.py - if not value: - return False - return str(value).lower() in ("y", "yes", "t", "true", "on", "1") - - -def write_csv(path, content): - """General function for writing strings to lines in CSV - - Args: - path: pathlib PosixPath - content: list of strings, each as row of CSV - """ - with open(path, "w") as f: - for line in content: - f.write(line + "\n") - - -class QuietStdOut: - """If verbose set to false, used to quiet tear_down table.delete prints""" - - def __enter__(self): - logger.setLevel("WARNING") - self._original_stdout = sys.stdout - sys.stdout = open(os.devnull, "w") - - def __exit__(self, exc_type, exc_val, exc_tb): - logger.setLevel("INFO") - sys.stdout.close() - sys.stdout = self._original_stdout - - -# ------------------- FIXTURES ------------------- - - -@pytest.fixture(autouse=True, scope="session") -def dj_config(): - """If dj_local_config exists, load""" - if Path("./dj_local_conf.json").exists(): - dj.config.load("./dj_local_conf.json") - - dj.config.update( - { - "safemode": False, - "database.host": os.environ.get("DJ_HOST") or dj.config["database.host"], - "database.password": os.environ.get("DJ_PASS") - or dj.config["database.password"], - "database.user": os.environ.get("DJ_USER") or dj.config["database.user"], - "custom": { - "database.prefix": os.environ.get("DATABASE_PREFIX") - or dj.config["custom"]["database.prefix"], - "dlc_root_data_dir": os.environ.get("DLC_ROOT_DATA_DIR") - or dj.config["custom"]["dlc_root_data_dir"], - }, - } - ) - - return - - -@pytest.fixture(scope="session") -def pipeline(setup): - """Loads workflow_optogenetics.pipeline lab, session, subject, dlc""" - with verbose_context: - from workflow_optogenetics import pipeline - - yield { - "lab": pipeline.lab, - "subject": pipeline.subject, - "surgery": pipeline.surgery, - "session": pipeline.session, - "opto": pipeline.opto, - "Device": pipeline.Device, - } - if _tear_down: - with verbose_context: - pipeline.opto.OptoWaveform.delete() - pipeline.surgery.BrainRegion.delete() - pipeline.subject.Subject.delete() - pipeline.session.Session.delete() - pipeline.lab.User.delete() - - -@pytest.fixture(scope="session") -def ingest_csvs(setup, pipeline): - """For each input, generates csv in test_user_data_dir and ingests in schema""" - # CSV as list of 3: filename, relevant tables, content - all_csvs = { - "subjects.csv": { - "tables": [pipeline["subject"].Subject()], - "content": [ - "subject,sex,subject_birth_date,subject_description", - "subject3,F,2022-03-03,Optogenetic pilot subject", - ], - }, - "sessions.csv": { - "tables": [pipeline["session"].Session()], - "content": [ - "subject,session_dir,session_id,session_datetime", - "subject3,subject3/opto_session1/,1,2022-04-04 12:13:14", - ], - }, - "opto_waveforms.csv": { - "tables": [ - pipeline["opto"].OptoWaveform(), - pipeline["opto"].OptoWaveform.Square(), - pipeline["opto"].OptoStimParams(), - ], - "content": [ - "waveform_type,waveform_name,waveform_description,on_proportion," - + "off_proportion,opto_params_id,wavelength,light_intensity,frequency," - + "duration", - "square,square_10,Square waveform with 10-90 on-off cycle,.10," - + ".90,1,470,10.2,1,241", - ], - }, - "opto_surgeries.csv": { - "tables": [ - pipeline["surgery"].CoordinateReference(), - pipeline["surgery"].BrainRegion(), - pipeline["lab"].User(), - pipeline["surgery"].Implantation(), - pipeline["surgery"].Implantation.Coordinate(), - ], - "content": [ - "subject,implant_date,reference,region_acronym,region_name,hemisphere," - + "implant_type,ap,ap_ref,ml,ml_ref,dv,dv_ref,theta,phi,user,surgeon," - + "target_region,target_hemisphere", - "subject3,2022-04-01 12:13:14,bregma,dHP,Dorsal Hippocampus,left," - + "opto,-7.9,bregma,-1.8,bregma,5,skull_surface,11.5,0,user1,user1," - + "dHP,left", - ], - }, - "opto_sessions.csv": { - "tables": [ - pipeline["opto"].OptoProtocol(), - ], - "content": [ - "subject,session_id,protocol_id,opto_params_id,implant_date," - + "implant_type,target_region,target_hemisphere", - "subject3,1,1,1,2022-04-01 12:13:14,opto,dHP,left", - ], - }, - "opto_events.csv": { - "tables": [ - pipeline["opto"].OptoEvent(), - ], - "content": [ - "subject,session_id,protocol_id,stim_start_time,stim_end_time", - "subject3,1,1,241,482", - "subject3,1,1,482,723", - ], - }, - } - # If data in last table, presume didn't tear down last time, skip insert - if len(pipeline["opto"].OptoEvent()) == 0: - for csv_filename, csv_dict in all_csvs.items(): - csv_path = test_user_data_dir / csv_filename # add prefix for rel path - write_csv(csv_path, csv_dict["content"]) # write content at rel path - # repeat csv path n times as list to match n tables - csv_path_as_list = [str(csv_path)] * len(csv_dict["tables"]) - ingest_csv_to_table( # insert csv content into each of n tables - csv_path_as_list, - csv_dict["tables"], - skip_duplicates=True, - verbose=verbose, - ) - - yield - - if _tear_down: - with verbose_context: - for csv_info in all_csvs: - csv_path = test_user_data_dir / csv_info[1] - csv_path.unlink() From 8ea99e0988e26658f4fbe73b79211010cf52b9ed Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 22:36:18 +0000 Subject: [PATCH 35/58] Update test init --- tests/__init__.py | 64 ++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 61 insertions(+), 3 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index bc2505c..316a845 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,63 @@ """ -run all: pytest tests/ -run one: pytest --pdb tests/tests_name.py -k function_name -options: pytest --dj-verbose True --dj-teardown True --dj-datadir './tests/user_data' +run all: python -m pytest tests/ +run one: python -m pytest --pdb tests/module_name.py -k function_name """ + +import logging +import os +import sys +from contextlib import nullcontext +from pathlib import Path + +import datajoint as dj +import pytest + +# Constants ---------------------------------------------------------------------------- + +logger = logging.getLogger("datajoint") + +_tear_down = True +verbose = True + +# Functions ---------------------------------------------------------------------------- + + +class QuietStdOut: + """If verbose set to false, used to quiet table deletion print statements""" + + def __enter__(self): + logger.setLevel("WARNING") + self._original_stdout = sys.stdout + sys.stdout = open(os.devnull, "w") + + def __exit__(self, exc_type, exc_val, exc_tb): + logger.setLevel("INFO") + sys.stdout.close() + sys.stdout = self._original_stdout + +verbose_context = nullcontext() if verbose else QuietStdOut() + +# Fixtures ----------------------------------------------------------------------------- + + +@pytest.fixture(scope="session") +def pipeline(): + """Loads lab, subject, session, optogenetics, Device""" + with verbose_context: + from workflow_optogenetics import pipeline + + yield { + "lab": pipeline.lab, + "subject": pipeline.subject, + "surgery": pipeline.surgery, + "session": pipeline.session, + "optogenetics": pipeline.optogenetics, + "Device": pipeline.Device, + } + if _tear_down: + with verbose_context: + pipeline.optogenetics.OptoWaveform.delete() + pipeline.surgery.BrainRegion.delete() + pipeline.subject.Subject.delete() + pipeline.session.Session.delete() + pipeline.lab.User.delete() From 5ea89305dee9e831683fa84c47a934b0b522ee37 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 22:37:36 +0000 Subject: [PATCH 36/58] Add database prefix to dj.config --- workflow_optogenetics/__init__.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/workflow_optogenetics/__init__.py b/workflow_optogenetics/__init__.py index 111bd2f..93b1d40 100644 --- a/workflow_optogenetics/__init__.py +++ b/workflow_optogenetics/__init__.py @@ -1,6 +1,12 @@ +import os import datajoint as dj -if "custom" not in dj.config: - dj.config["custom"] = {} + +if 'custom' not in dj.config: + dj.config['custom'] = {} + +dj.config['custom']['database.prefix'] = os.getenv( + 'DATABASE_PREFIX', + dj.config['custom'].get('database.prefix', '')) db_prefix = dj.config["custom"].get("database.prefix", "") From 9ba4d95f45c7b5d3d4e881d6d1d5a73cf5e57d67 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 18 Apr 2023 17:50:14 -0500 Subject: [PATCH 37/58] Update gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index c99cdf8..6e856e1 100644 --- a/.gitignore +++ b/.gitignore @@ -120,3 +120,6 @@ Diagram.ipynb # docker .env tests/user_dat* + +# pytest +.pytest_cache/ From 1c7549812fc977799e8d7259ff66a5fb13c70c85 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Wed, 19 Apr 2023 22:05:33 -0500 Subject: [PATCH 38/58] Update readme --- README.md | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index fa6dc3d..516784b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# DataJoint Workflow - Optogenetics +# DataJoint Workflow for Optogenetics The DataJoint Workflow for Optogenetics combines multiple DataJoint Elements for optogenetics research. DataJoint Elements collectively standardize and automate data @@ -6,7 +6,25 @@ collection and analysis for neuroscience experiments. Each Element is a modular pipeline for data storage and processing with corresponding database tables that can be combined with other Elements to assemble a fully functional pipeline. -Installation and usage instructions can be found at the -[Element documentation](https://datajoint.com/docs/elements/element-optogenetics). +## Experiment Flowchart + +![flowchart](https://raw.githubusercontent.com/datajoint/element-optogenetics/main/images/flowchart.svg) + +## Data Pipeline + +![pipeline](https://raw.githubusercontent.com/datajoint/element-optogenetics/main/images/pipeline.svg) + +## Getting Started + ++ [Interactive tutorial on GitHub Codespaces](#interactive-tutorial) + ++ Install Element Optogenetics from PyPI + + ```bash + pip install element-optogenetics + ``` + ++ [Documentation](https://datajoint.com/docs/elements/element-optogenetics) + ![diagram](https://raw.githubusercontent.com/datajoint/element-optogenetics/main/images/diagram_flowchart.svg) From 560b0928273d10e7104969f304ecec299784d5c4 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Wed, 19 Apr 2023 22:06:53 -0500 Subject: [PATCH 39/58] Update gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 6e856e1..18fcc2f 100644 --- a/.gitignore +++ b/.gitignore @@ -123,3 +123,6 @@ tests/user_dat* # pytest .pytest_cache/ + +# vscode +*.code-workspace \ No newline at end of file From 54a6cd06a4cf79faa0e3d28c1d1dee32ec8a450e Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 20 Apr 2023 11:12:37 -0500 Subject: [PATCH 40/58] Update readme --- README.md | 46 ++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 516784b..3539126 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,12 @@ # DataJoint Workflow for Optogenetics -The DataJoint Workflow for Optogenetics combines multiple DataJoint Elements for -optogenetics research. DataJoint Elements collectively standardize and automate data -collection and analysis for neuroscience experiments. Each Element is a modular -pipeline for data storage and processing with corresponding database tables that can be -combined with other Elements to assemble a fully functional pipeline. +The DataJoint Workflow for Optogenetics combines four DataJoint Elements for +optogenetics research - Elements Lab, Animal, Session, and Optogenetics. DataJoint +Elements collectively standardize and automate data collection and analysis for +neuroscience experiments. Each Element is a modular pipeline for data storage and +processing with corresponding database tables that can be combined with other Elements +to assemble a fully functional pipeline. This repository also provides a tutorial +environment and notebook to learn the pipeline. ## Experiment Flowchart @@ -26,5 +28,37 @@ combined with other Elements to assemble a fully functional pipeline. + [Documentation](https://datajoint.com/docs/elements/element-optogenetics) +## Support -![diagram](https://raw.githubusercontent.com/datajoint/element-optogenetics/main/images/diagram_flowchart.svg) ++ If you need help getting started or run into any errors, please contact our team by email at support@datajoint.com. + +## Interactive Tutorial + ++ The easiest way to learn about DataJoint Elements is to use the tutorial notebook within the included interactive environment configured using [Dev Container](https://containers.dev/). + +### Launch Environment + +Here are some options that provide a great experience: + +- Cloud-based Environment (*recommended*) + - Launch using [GitHub Codespaces](https://github.com/features/codespaces) using the `+` option which will `Create codespace on main` in the codebase repository on your fork with default options. For more control, see the `...` where you may create `New with options...`. + - Build time for a codespace is several minutes. This is done infrequently and cached for convenience. + - Start time for a codespace is less than 1 minute. This will pull the built codespace from cache when you need it. + - *Tip*: Each month, GitHub renews a [free-tier](https://docs.github.com/en/billing/managing-billing-for-github-codespaces/about-billing-for-github-codespaces#monthly-included-storage-and-core-hours-for-personal-accounts) quota of compute and storage. Typically we run into the storage limits before anything else since codespaces consume storage while stopped. It is best to delete Codespaces when not actively in use and recreate when needed. We'll soon be creating prebuilds to avoid larger build times. Once any portion of your quota is reached, you will need to wait for it to be reset at the end of your cycle or add billing info to your GitHub account to handle overages. + - *Tip*: GitHub auto names the codespace but you can rename the codespace so that it is easier to identify later. + +- Local Environment + - Install [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) + - Install [Docker](https://docs.docker.com/get-docker/) + - Install [VSCode](https://code.visualstudio.com/) + - Install the VSCode [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) + - `git clone` the codebase repository and open it in VSCode + - Use the `Dev Containers extension` to `Reopen in Container` (More info is in the `Getting started` included with the extension.) + +You will know your environment has finished loading once you either see a terminal open related to `Running postStartCommand` with a final message of `Done` or the `README.md` is opened in `Preview`. + +### Instructions + +1. We recommend you start by navigating to the `notebooks` directory on the left panel and go through the `tutorial.ipynb` Jupyter notebook. Execute the cells in the notebook to begin your walk through of the tutorial. + +1. Once you are done, see the options available to you in the menu in the bottom-left corner. For example, in codespace you will have an option to `Stop Current Codespace` but when running Dev Container on your own machine the equivalent option is `Reopen folder locally`. By default, GitHub will also automatically stop the Codespace after 30 minutes of inactivity. Once the codespace is no longer being used, we recommend deleting the codespace. From e4bb2797641bac4af5c4e8ef9f127c56152daf64 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 20 Apr 2023 12:03:27 -0500 Subject: [PATCH 41/58] Update changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 504b734..0344ead 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -## [0.1.0] - 2023-04-03 +## [0.1.0] - 2023-04-20 + Add - First release From 904569c156cb570f53421e4d0b7cf8f4f0478481 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 20 Apr 2023 12:04:26 -0500 Subject: [PATCH 42/58] Update Compose file --- .devcontainer/docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml index 975c28d..28db3c5 100644 --- a/.devcontainer/docker-compose.yaml +++ b/.devcontainer/docker-compose.yaml @@ -2,7 +2,7 @@ version: "3" services: app: extends: - file: ./test/docker-compose.yaml + file: ./local-test/docker-compose.yaml service: app devices: - /dev/fuse From 4c9619163d19a6a7cffc27a86d71493bc0c11877 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 20 Apr 2023 12:11:46 -0500 Subject: [PATCH 43/58] Fix typo --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index cba2956..d62ffb3 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ long_description = """" # Workflow for optogenetics research. -Build a complete imaging workflow using the DataJoint Elements +Build a complete optogenetics workflow using the DataJoint Elements + [element-lab](https://github.com/datajoint/element-lab) + [element-animal](https://github.com/datajoint/element-animal) + [element-session](https://github.com/datajoint/element-session) From 07ab0789e96d9ee8dbd4e5950c192840185691b2 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 20 Apr 2023 12:40:28 -0500 Subject: [PATCH 44/58] Update comment --- tests/test_pipeline_generation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_pipeline_generation.py b/tests/test_pipeline_generation.py index 85fb97a..4676d54 100644 --- a/tests/test_pipeline_generation.py +++ b/tests/test_pipeline_generation.py @@ -6,7 +6,7 @@ def test_upstream_pipeline(pipeline): surgery = pipeline["surgery"] subject = pipeline["subject"] - # Test connection from Subject to Session + # Test connection from Subject to parent tables assert subject.Subject.full_table_name in session.Session.parents() assert subject.Subject.full_table_name in surgery.Implantation.parents() From 77de4ed2fe3f2376a1f20cfc4a7d497f8a854fcc Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 20 Apr 2023 12:42:09 -0500 Subject: [PATCH 45/58] Update comment --- tests/test_pipeline_generation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_pipeline_generation.py b/tests/test_pipeline_generation.py index 4676d54..6f177e4 100644 --- a/tests/test_pipeline_generation.py +++ b/tests/test_pipeline_generation.py @@ -6,7 +6,7 @@ def test_upstream_pipeline(pipeline): surgery = pipeline["surgery"] subject = pipeline["subject"] - # Test connection from Subject to parent tables + # Test connection from Subject to downstream tables assert subject.Subject.full_table_name in session.Session.parents() assert subject.Subject.full_table_name in surgery.Implantation.parents() From 11e6ac452ae4eaca9a7545c051afdea2ff7bd108 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 4 May 2023 16:52:48 -0500 Subject: [PATCH 46/58] Add tutorial notebook --- notebooks/tutorial.ipynb | 843 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 843 insertions(+) create mode 100644 notebooks/tutorial.ipynb diff --git a/notebooks/tutorial.ipynb b/notebooks/tutorial.ipynb new file mode 100644 index 0000000..9431268 --- /dev/null +++ b/notebooks/tutorial.ipynb @@ -0,0 +1,843 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Manage optogenetics experiments with DataJoint Elements\n", + "\n", + "In this tutorial, we will walk through storing optogenetic stimulus data with the DataJoint Workflow for Optogenetics.\n", + "\n", + "We will explain the following concepts as they relate to this workflow:\n", + "- What is an Element versus a Workflow?\n", + "- Plot the workflow with `dj.Diagram`\n", + "- Insert data into tables\n", + "- Query table contents\n", + "- Fetch table contents\n", + "- Run the workflow for your experiments\n", + "\n", + "For detailed documentation and tutorials on general DataJoint principles that support collaboration, automation, reproducibility, and visualizations:\n", + "\n", + "- [DataJoint Interactive Tutorials](https://github.com/datajoint/datajoint-tutorials) - Fundamentals including table tiers, query operations, fetch operations, automated computations with the `make` function, etc.\n", + "\n", + "- [DataJoint Core - Documentation](https://datajoint.com/docs/core/) - Relational data model principles\n", + "\n", + "- [DataJoint API for Python - Documentation](https://datajoint.com/docs/core/datajoint-python/)\n", + "\n", + "- [DataJoint Element for Optogenetics - Documentation](https://datajoint.com/docs/elements/element-optogenetics/)\n", + "\n", + "Let's start by importing the packages necessary to run this workflow. " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import datajoint as dj" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## The DataJoint Workflow for Optogenetics is assembled from 4 DataJoint Elements\n", + "\n", + "| Element | Source Code | Documentation | Description |\n", + "| -- | -- | -- | -- |\n", + "| Element Lab | [Link](https://github.com/datajoint/element-lab) | [Link](https://datajoint.com/docs/elements/element-lab) | Lab management related information, such as Lab, User, Project, Protocol, Source. |\n", + "| Element Animal | [Link](https://github.com/datajoint/element-animal) | [Link](https://datajoint.com/docs/elements/element-animal) | General animal metadata and surgery information. |\n", + "| Element Session | [Link](https://github.com/datajoint/element-session) | [Link](https://datajoint.com/docs/elements/element-session) | General information of experimental sessions. |\n", + "| Element Optogenetics | [Link](https://github.com/datajoint/element-optogenetics) | [Link](https://datajoint.com/docs/elements/element-optogenetics) | Optogenetics stimulus and timing data. |\n", + "\n", + "Each workflow is composed of multiple Elements. Each Element contains 1 or more modules, and each module declares its own schema in the database.\n", + "\n", + "The Elements are imported within the `workflow_optogenetics.pipeline` script.\n", + "\n", + "By importing the modules for the first time, the schemas and tables will be created in the database. Once created, importing modules will not create schemas and tables again, but the existing schemas/tables can be accessed.\n", + "\n", + "The schema diagram (shown below) is a good reference for understanding the order of the tables within the workflow.\n", + "\n", + "Let's activate the Elements." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2023-04-17 02:17:46,707][WARNING]: lab.Project and related tables will be removed in a future version of Element Lab. Please use the project schema.\n", + "[2023-04-17 02:17:46,711][INFO]: Connecting root@fakeservices.datajoint.io:3306\n", + "[2023-04-17 02:17:46,727][INFO]: Connected root@fakeservices.datajoint.io:3306\n" + ] + } + ], + "source": [ + "from workflow_optogenetics.pipeline import lab, subject, surgery, session, optogenetics, Device" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Diagram\n", + "\n", + "We can plot the diagram of tables within multiple schemas and their dependencies using `dj.Diagram()`. For details, see the [documentation](https://datajoint.com/docs/core/concepts/getting-started/diagrams/)." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "%3\n", + "\n", + "\n", + "\n", + "optogenetics.OptoProtocol\n", + "\n", + "\n", + "optogenetics.OptoProtocol\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoEvent\n", + "\n", + "\n", + "optogenetics.OptoEvent\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoProtocol->optogenetics.OptoEvent\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveform\n", + "\n", + "\n", + "optogenetics.OptoWaveform\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Square\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Square\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveform->optogenetics.OptoWaveform.Square\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoStimParams\n", + "\n", + "\n", + "optogenetics.OptoStimParams\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveform->optogenetics.OptoStimParams\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Sine\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Sine\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveform->optogenetics.OptoWaveform.Sine\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Ramp\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Ramp\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveform->optogenetics.OptoWaveform.Ramp\n", + "\n", + "\n", + "\n", + "\n", + "subject.Subject\n", + "\n", + "\n", + "subject.Subject\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "surgery.Implantation\n", + "\n", + "\n", + "surgery.Implantation\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "subject.Subject->surgery.Implantation\n", + "\n", + "\n", + "\n", + "\n", + "session.Session\n", + "\n", + "\n", + "session.Session\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "subject.Subject->session.Session\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoStimParams->optogenetics.OptoProtocol\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveformType\n", + "\n", + "\n", + "optogenetics.OptoWaveformType\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "optogenetics.OptoWaveformType->optogenetics.OptoWaveform\n", + "\n", + "\n", + "\n", + "\n", + "surgery.Implantation->optogenetics.OptoProtocol\n", + "\n", + "\n", + "\n", + "\n", + "session.Session->optogenetics.OptoProtocol\n", + "\n", + "\n", + "\n", + "" + ], + "text/plain": [ + "" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "(\n", + " dj.Diagram(subject.Subject)\n", + " + dj.Diagram(surgery.Implantation)\n", + " + dj.Diagram(session.Session)\n", + " + dj.Diagram(Device)\n", + " + dj.Diagram(optogenetics)\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "While the diagram above seems complex at first, it becomes more clear when it's approached as a hierarchy of tables that define the order in which the workflow expects to receive data in each of its tables.\n", + "\n", + "The tables higher up in the diagram such as `subject.Subject()` should be the first to receive data.\n", + "\n", + "Data is manually entered into the green, rectangular tables with the `insert1()` method.\n", + "\n", + "Tables connected by a solid line depend on entries from the table above it.\n", + "\n", + "There are 5 table tiers in DataJoint. Some of these tables appear in the diagram above.\n", + "\n", + "| Table tier | Color and shape | Description |\n", + "| -- | -- | -- |\n", + "| Manual table | Green box | Data entered from outside the pipeline, either by hand or with external helper scripts. |\n", + "| Lookup table | Gray box | Small tables containing general facts and settings of the data pipeline; not specific to any experiment or dataset. | \n", + "| Imported table | Blue oval | Data ingested automatically inside the pipeline but requiring access to data outside the pipeline. |\n", + "| Computed table | Red circle | Data computed automatically entirely inside the pipeline. |\n", + "| Part table | Plain text | Part tables share the same tier as their master table. |" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Insert entries into manual tables\n", + "\n", + "In this section, we will insert metadata about an animal subject, experiment session, and optogenetic stimulation parameters.\n", + "\n", + "Let's start with the first schema and table in the schema diagram (i.e. `subject.Subject` table).\n", + "\n", + "Each module (e.g. `subject`) contains a schema object that enables interaction with the schema in the database." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "subject.schema" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The table classes in the module corresponds to a table in the database." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "subject.Subject()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can view the table dependencies and the attributes we need to insert by using the functions `.describe()` and `.heading`. The `describe()` function displays the table definition with foreign key references and the `heading` function displays the attributes of the table definition. These are particularly useful functions if you are new to DataJoint Elements and are unsure of the attributes required for each table." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "subject.Subject.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "subject.Subject.heading" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We will insert data into the `subject.Subject` table. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "subject.Subject.insert1(\n", + " dict(\n", + " subject=\"subject1\",\n", + " sex=\"F\",\n", + " subject_birth_date=\"2020-01-01\",\n", + " subject_description=\"Optogenetic pilot subject\",\n", + " )\n", + ")\n", + "subject.Subject()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's continue inserting in the other manual tables. The `Session` table is next." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "session.Session.describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "session.Session.heading" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The cells above show the dependencies and attributes for the `session.Session` table.\n", + "\n", + "Notice that `describe` shows the dependencies of the table on upstream tables (i.e. foreign key references). The `Session` table depends on the upstream `Subject` table. \n", + "\n", + "Whereas `heading` lists all the attributes of the `Session` table, regardless of\n", + "whether they are declared in an upstream table." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "session.Session.insert1(\n", + " dict(\n", + " subject=\"subject1\", \n", + " session_id=\"1\", \n", + " session_datetime=\"2022-04-04 12:22:15.032\"\n", + " )\n", + ")\n", + "session.Session()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `OptoProtocol` table's attributes include the `Session` and `Device` tables. Let's insert into the `Device` table." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Device.insert1(\n", + " dict(\n", + " device=\"OPTG_8\",\n", + " modality=\"Optogenetics\",\n", + " description=\"8 channel pulse sequence device\",\n", + " )\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `surgery.Implantation` table's attribute includes the `User` table. Let's insert into the `User` table. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lab.User.insert1(\n", + " dict(user=\"User1\")\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `Implantation` table's attributes includes the `CoordinateReference` and `Hemisphere` tables. Let's view the contents of these lookup tables, which have default contents." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "surgery.CoordinateReference()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "surgery.Hemisphere()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Insert a new entry for the location of the optogenetics probe." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "surgery.BrainRegion.insert1(\n", + " dict(\n", + " region_acronym=\"dHP\", \n", + " region_name=\"Dorsal Hippocampus\")\n", + ")\n", + "\n", + "surgery.Implantation.insert1(\n", + " dict(\n", + " subject=\"subject1\",\n", + " implant_date=\"2022-04-01 12:13:14\",\n", + " implant_type=\"optogenetics\",\n", + " target_region=\"dHP\",\n", + " target_hemisphere=\"left\",\n", + " surgeon=\"User1\",\n", + " )\n", + ")\n", + "\n", + "surgery.Implantation.Coordinate.insert1(\n", + " dict(\n", + " subject=\"subject1\",\n", + " implant_date=\"2022-04-01 12:13:14\",\n", + " implant_type=\"optogenetics\",\n", + " target_region=\"dHP\",\n", + " target_hemisphere=\"left\",\n", + " ap=\"-7.9\", # [mm] anterior-posterior distance\n", + " ap_ref=\"bregma\",\n", + " ml=\"-1.8\", # [mm] medial axis distance\n", + " ml_ref=\"bregma\",\n", + " dv=\"5\", # [mm] dorso-ventral axis distance\n", + " dv_ref=\"skull_surface\",\n", + " theta=\"11.5\", # [0, 180] degree rotation about ml-axis relative to z\n", + " phi=\"0\", # [0, 360] degree rotation about dv-axis relative to x\n", + " beta=None, # [-180, 180] degree rotation about shank relative to anterior\n", + " )\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We'll add information to describe the stimulus, including waveform shape and and stimulation parameters." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoWaveform.insert1(\n", + " dict(\n", + " waveform_name=\"square_10\",\n", + " waveform_type=\"square\",\n", + " waveform_description=\"Square waveform: 10%/90% on/off cycle\",\n", + " )\n", + ")\n", + "\n", + "# Square is one part table of OptoWaveform.\n", + "# For sine and ramp waveforms, see the corresponding tables.\n", + "optogenetics.OptoWaveform.Square.insert1(\n", + " dict(\n", + " waveform_name=\"square_10\", \n", + " on_proportion=0.10, \n", + " off_proportion=0.90)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoStimParams.insert1(\n", + " dict(\n", + " opto_params_id=1,\n", + " waveform_name=\"square_10\",\n", + " wavelength=470,\n", + " light_intensity=10.2,\n", + " frequency=1,\n", + " duration=241,\n", + " )\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we'll describe the session in which these parameters are used in `OptoProtocol`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoProtocol.insert1(\n", + " dict(\n", + " subject=\"subject1\",\n", + " session_id=\"1\",\n", + " protocol_id=\"1\",\n", + " opto_params_id=\"1\",\n", + " implant_date=\"2022-04-01 12:13:14\",\n", + " implant_type=\"optogenetics\",\n", + " target_region=\"dHP\",\n", + " target_hemisphere=\"left\",\n", + " device=\"OPTG_4\",\n", + " )\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can describe the timing of these stimulations in `OptoEvent`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoEvent.insert(\n", + " dict(\n", + " subject=\"subject1\",\n", + " session_id=1,\n", + " protocol_id=1,\n", + " stim_start_time=241,\n", + " stim_end_time=482,\n", + " )\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can insert a second set of timing information for the stimulation." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoEvent.insert(\n", + " dict(\n", + " subject=\"subject1\",\n", + " session_id=1,\n", + " protocol_id=1,\n", + " stim_start_time=543,\n", + " stim_end_time=797,\n", + " )\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Query\n", + "\n", + "Queries allow you to view the contents of the database. The simplest query is the instance of the table class." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoEvent()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "With the `&` operator, we will restrict the contents of the `OptoEvent` table to those entries with a `stim_start_time` of 543." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoEvent & \"stim_start_time=543\"" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "DataJoint queries can be a highly flexible tool with several [operators](https://datajoint.com/docs/core/concepts/query-lang/operators/). The next operator we will explore is `join` which combines matching information from tables." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoProtocol * optogenetics.OptoStimParams" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Fetch\n", + "\n", + "The `fetch` and `fetch1` methods download the data from the query object into the workspace.\n", + "\n", + "Below we will run `fetch()` without any arguments to return a list of dictionaries containing all attributes of all entries in the table." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "optogenetics.OptoEvent.fetch()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we will fetch the entry with a `stim_start_time` of 543 with the `fetch1` method, which returns a dictionary containing all attributes of one entry in the table." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "(optogenetics.OptoEvent & \"stim_start_time=543\").fetch1()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Next steps\n", + "\n", + "Follow the steps below to run this workflow for your experiments:\n", + "\n", + "- Create a fork of this repository to your GitHub account.\n", + "- Clone the repository to your local machine and configure for use with the instructions in the [User Guide](https://datajoint.com/docs/elements/user-guide/).\n", + "- The DataJoint team offers free [Office Hours](https://datajoint.com/docs/community/support/) to help you setup this workflow.\n", + "- If you have any questions, please reach out at support@datajoint.com." + ] + } + ], + "metadata": { + "jupytext": { + "encoding": "# -*- coding: utf-8 -*-" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.16" + }, + "metadata": { + "interpreter": { + "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" + } + }, + "vscode": { + "interpreter": { + "hash": "949777d72b0d2535278d3dc13498b2535136f6dfe0678499012e853ee9abcab1" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} From 11f77ca5ec9055f77f92bde597a3172307d3afce Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 02:32:21 +0000 Subject: [PATCH 47/58] Add default python kernel --- .devcontainer/devcontainer.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 47d92ee..80131cb 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -16,7 +16,10 @@ "forwardPorts": [ 3306 ], - "customizations": { + "customizations": { + "settings": { + "python.pythonPath": "/usr/local/bin/python" + }, "vscode": { "extensions": [ "ms-python.python" From 9e22a816a6d675e11a1428f8c0169996f6071952 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 03:48:09 +0000 Subject: [PATCH 48/58] Update notebook with outputs --- notebooks/tutorial.ipynb | 2105 +++++++++++++++++++++++++++++++++++--- 1 file changed, 1942 insertions(+), 163 deletions(-) diff --git a/notebooks/tutorial.ipynb b/notebooks/tutorial.ipynb index 9431268..bc53838 100644 --- a/notebooks/tutorial.ipynb +++ b/notebooks/tutorial.ipynb @@ -73,9 +73,9 @@ "name": "stderr", "output_type": "stream", "text": [ - "[2023-04-17 02:17:46,707][WARNING]: lab.Project and related tables will be removed in a future version of Element Lab. Please use the project schema.\n", - "[2023-04-17 02:17:46,711][INFO]: Connecting root@fakeservices.datajoint.io:3306\n", - "[2023-04-17 02:17:46,727][INFO]: Connected root@fakeservices.datajoint.io:3306\n" + "[2023-05-05 03:13:02,092][WARNING]: lab.Project and related tables will be removed in a future version of Element Lab. Please use the project schema.\n", + "[2023-05-05 03:13:02,117][INFO]: Connecting root@fakeservices.datajoint.io:3306\n", + "[2023-05-05 03:13:02,146][INFO]: Connected root@fakeservices.datajoint.io:3306\n" ] } ], @@ -95,178 +95,192 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "image/svg+xml": [ - "\n", + "\n", "\n", "%3\n", - "\n", - "\n", + "\n", + "\n", "\n", - "optogenetics.OptoProtocol\n", - "\n", - "\n", - "optogenetics.OptoProtocol\n", + "surgery.Implantation\n", + "\n", + "\n", + "surgery.Implantation\n", "\n", "\n", "\n", - "\n", - "\n", - "optogenetics.OptoEvent\n", - "\n", - "\n", - "optogenetics.OptoEvent\n", + "\n", + "\n", + "optogenetics.OptoProtocol\n", + "\n", + "\n", + "optogenetics.OptoProtocol\n", "\n", "\n", "\n", - "\n", + "\n", "\n", - "optogenetics.OptoProtocol->optogenetics.OptoEvent\n", - "\n", + "surgery.Implantation->optogenetics.OptoProtocol\n", + "\n", "\n", - "\n", + "\n", "\n", - "optogenetics.OptoWaveform\n", - "\n", - "\n", - "optogenetics.OptoWaveform\n", + "optogenetics.OptoEvent\n", + "\n", + "\n", + "optogenetics.OptoEvent\n", "\n", "\n", "\n", - "\n", + "\n", "\n", - "optogenetics.OptoWaveform.Square\n", - "\n", - "\n", - "optogenetics.OptoWaveform.Square\n", + "optogenetics.OptoWaveformType\n", + "\n", + "\n", + "optogenetics.OptoWaveformType\n", "\n", "\n", "\n", - "\n", + "\n", + "\n", + "optogenetics.OptoWaveform\n", + "\n", + "\n", + "optogenetics.OptoWaveform\n", + "\n", + "\n", + "\n", + "\n", "\n", - "optogenetics.OptoWaveform->optogenetics.OptoWaveform.Square\n", - "\n", + "optogenetics.OptoWaveformType->optogenetics.OptoWaveform\n", + "\n", "\n", - "\n", - "\n", - "optogenetics.OptoStimParams\n", - "\n", - "\n", - "optogenetics.OptoStimParams\n", + "\n", + "\n", + "Device\n", + "\n", + "\n", + "Device\n", "\n", "\n", "\n", - "\n", + "\n", "\n", - "optogenetics.OptoWaveform->optogenetics.OptoStimParams\n", - "\n", + "Device->optogenetics.OptoProtocol\n", + "\n", "\n", "\n", "\n", "optogenetics.OptoWaveform.Sine\n", "\n", - "\n", - "optogenetics.OptoWaveform.Sine\n", + "\n", + "optogenetics.OptoWaveform.Sine\n", "\n", "\n", "\n", "\n", "\n", "optogenetics.OptoWaveform->optogenetics.OptoWaveform.Sine\n", - "\n", + "\n", "\n", - "\n", + "\n", "\n", - "optogenetics.OptoWaveform.Ramp\n", - "\n", - "\n", - "optogenetics.OptoWaveform.Ramp\n", + "optogenetics.OptoWaveform.Square\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Square\n", "\n", "\n", "\n", - "\n", + "\n", "\n", - "optogenetics.OptoWaveform->optogenetics.OptoWaveform.Ramp\n", - "\n", - "\n", - "\n", - "\n", - "subject.Subject\n", - "\n", - "\n", - "subject.Subject\n", - "\n", - "\n", + "optogenetics.OptoWaveform->optogenetics.OptoWaveform.Square\n", + "\n", "\n", - "\n", - "\n", - "surgery.Implantation\n", - "\n", - "\n", - "surgery.Implantation\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Ramp\n", + "\n", + "\n", + "optogenetics.OptoWaveform.Ramp\n", "\n", "\n", "\n", - "\n", + "\n", "\n", - "subject.Subject->surgery.Implantation\n", - "\n", + "optogenetics.OptoWaveform->optogenetics.OptoWaveform.Ramp\n", + "\n", "\n", - "\n", - "\n", - "session.Session\n", - "\n", - "\n", - "session.Session\n", + "\n", + "\n", + "optogenetics.OptoStimParams\n", + "\n", + "\n", + "optogenetics.OptoStimParams\n", "\n", "\n", "\n", - "\n", + "\n", "\n", - "subject.Subject->session.Session\n", - "\n", + "optogenetics.OptoWaveform->optogenetics.OptoStimParams\n", + "\n", "\n", - "\n", + "\n", "\n", - "optogenetics.OptoStimParams->optogenetics.OptoProtocol\n", - "\n", + "optogenetics.OptoProtocol->optogenetics.OptoEvent\n", + "\n", "\n", - "\n", - "\n", - "optogenetics.OptoWaveformType\n", - "\n", - "\n", - "optogenetics.OptoWaveformType\n", + "\n", + "\n", + "session.Session\n", + "\n", + "\n", + "session.Session\n", "\n", "\n", "\n", - "\n", + "\n", "\n", - "optogenetics.OptoWaveformType->optogenetics.OptoWaveform\n", - "\n", + "session.Session->optogenetics.OptoProtocol\n", + "\n", "\n", - "\n", + "\n", "\n", - "surgery.Implantation->optogenetics.OptoProtocol\n", - "\n", + "optogenetics.OptoStimParams->optogenetics.OptoProtocol\n", + "\n", "\n", - "\n", + "\n", + "\n", + "subject.Subject\n", + "\n", + "\n", + "subject.Subject\n", + "\n", + "\n", + "\n", + "\n", "\n", - "session.Session->optogenetics.OptoProtocol\n", - "\n", + "subject.Subject->surgery.Implantation\n", + "\n", + "\n", + "\n", + "\n", + "subject.Subject->session.Session\n", + "\n", "\n", "\n", "" ], "text/plain": [ - "" + "" ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -321,9 +335,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "Schema `neuro_subject`" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "subject.schema" ] @@ -338,9 +363,99 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

subject_nickname

\n", + " \n", + "
\n", + "

sex

\n", + " \n", + "
\n", + "

subject_birth_date

\n", + " \n", + "
\n", + "

subject_description

\n", + " \n", + "
\n", + " \n", + "

Total: 0

\n", + " " + ], + "text/plain": [ + "*subject subject_nickna sex subject_birth_ subject_descri\n", + "+---------+ +------------+ +-----+ +------------+ +------------+\n", + "\n", + " (Total: 0)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "subject.Subject()" ] @@ -355,18 +470,46 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'subject : varchar(8) \\n---\\nsubject_nickname=\"\" : varchar(64) \\nsex : enum(\\'M\\',\\'F\\',\\'U\\') \\nsubject_birth_date : date \\nsubject_description=\"\" : varchar(1024) \\n'" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "subject.Subject.describe()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "# \n", + "subject : varchar(8) # \n", + "---\n", + "subject_nickname=\"\" : varchar(64) # \n", + "sex : enum('M','F','U') # \n", + "subject_birth_date : date # \n", + "subject_description=\"\" : varchar(1024) # " + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "subject.Subject.heading" ] @@ -381,9 +524,103 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

subject_nickname

\n", + " \n", + "
\n", + "

sex

\n", + " \n", + "
\n", + "

subject_birth_date

\n", + " \n", + "
\n", + "

subject_description

\n", + " \n", + "
subject1F2020-01-01Optogenetic pilot subject
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*subject subject_nickna sex subject_birth_ subject_descri\n", + "+----------+ +------------+ +-----+ +------------+ +------------+\n", + "subject1 F 2020-01-01 Optogenetic pi\n", + " (Total: 1)" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "subject.Subject.insert1(\n", " dict(\n", @@ -406,18 +643,44 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'-> subject.Subject\\nsession_id : int \\n---\\nsession_datetime : datetime \\n'" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "session.Session.describe()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "# \n", + "subject : varchar(8) # \n", + "session_id : int # \n", + "---\n", + "session_datetime : datetime # " + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "session.Session.heading" ] @@ -437,9 +700,95 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

session_datetime

\n", + " \n", + "
subject112022-04-04 12:22:15
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*subject *session_id session_dateti\n", + "+----------+ +------------+ +------------+\n", + "subject1 1 2022-04-04 12:\n", + " (Total: 1)" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "session.Session.insert1(\n", " dict(\n", @@ -461,9 +810,98 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

device

\n", + " \n", + "
\n", + "

modality

\n", + " \n", + "
\n", + "

description

\n", + " \n", + "
OPTG_4OptogeneticsDoric Pulse Sequence Generator
OPTG_8Optogenetics8 channel pulse sequence device
\n", + " \n", + "

Total: 2

\n", + " " + ], + "text/plain": [ + "*device modality description \n", + "+--------+ +------------+ +------------+\n", + "OPTG_4 Optogenetics Doric Pulse Se\n", + "OPTG_8 Optogenetics 8 channel puls\n", + " (Total: 2)" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "Device.insert1(\n", " dict(\n", @@ -471,7 +909,8 @@ " modality=\"Optogenetics\",\n", " description=\"8 channel pulse sequence device\",\n", " )\n", - ")" + ")\n", + "Device()" ] }, { @@ -484,13 +923,104 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " Table for storing user information.\n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "
\n", + "

user

\n", + " username, short identifier\n", + "
\n", + "

user_email

\n", + " \n", + "
\n", + "

user_cellphone

\n", + " \n", + "
\n", + "

user_fullname

\n", + " Full name used to uniquely identify an individual\n", + "
User1
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*user user_email user_cellphone user_fullname \n", + "+-------+ +------------+ +------------+ +------------+\n", + "User1 \n", + " (Total: 1)" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "lab.User.insert1(\n", " dict(user=\"User1\")\n", - ")" + ")\n", + "lab.User()" ] }, { @@ -503,18 +1033,181 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "
\n", + "

reference

\n", + " \n", + "
bregma
dura
lambda
sagittal_suture
sinus
skull_surface
\n", + " \n", + "

Total: 6

\n", + " " + ], + "text/plain": [ + "*reference \n", + "+------------+\n", + "bregma \n", + "dura \n", + "lambda \n", + "sagittal_sutur\n", + "sinus \n", + "skull_surface \n", + " (Total: 6)" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "surgery.CoordinateReference()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "
\n", + "

hemisphere

\n", + " Brain region hemisphere\n", + "
left
middle
right
\n", + " \n", + "

Total: 3

\n", + " " + ], + "text/plain": [ + "*hemisphere \n", + "+------------+\n", + "left \n", + "middle \n", + "right \n", + " (Total: 3)" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "surgery.Hemisphere()" ] @@ -529,32 +1222,251 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "
\n", + "

region_acronym

\n", + " Brain region shorthand\n", + "
\n", + "

region_name

\n", + " Brain region full name\n", + "
dHPDorsal Hippocampus
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*region_acrony region_name \n", + "+------------+ +------------+\n", + "dHP Dorsal Hippoca\n", + " (Total: 1)" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "surgery.BrainRegion.insert1(\n", " dict(\n", " region_acronym=\"dHP\", \n", " region_name=\"Dorsal Hippocampus\")\n", ")\n", - "\n", + "surgery.BrainRegion()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

implant_date

\n", + " surgery date\n", + "
\n", + "

implant_type

\n", + " Short name for type of implanted device\n", + "
\n", + "

target_region

\n", + " Brain region shorthand\n", + "
\n", + "

target_hemisphere

\n", + " Brain region hemisphere\n", + "
\n", + "

ap

\n", + " (mm) anterior-posterior; ref is 0\n", + "
\n", + "

ap_ref

\n", + " \n", + "
\n", + "

ml

\n", + " (mm) medial axis; ref is 0\n", + "
\n", + "

ml_ref

\n", + " \n", + "
\n", + "

dv

\n", + " (mm) dorso-ventral axis; ventral negative\n", + "
\n", + "

dv_ref

\n", + " \n", + "
\n", + "

theta

\n", + " (deg) rot about ml-axis [0, 180] wrt z\n", + "
\n", + "

phi

\n", + " (deg) rot about dv-axis [0, 360] wrt x\n", + "
\n", + "

beta

\n", + " (deg) rot about shank [-180, 180] wrt anterior\n", + "
subject12022-04-01 12:13:14optodHPleft-7.9bregma-1.8bregma5.0skull_surface11.50.0nan
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*subject *implant_date *implant_type *target_region *target_hemisp ap ap_ref ml ml_ref dv dv_ref theta phi beta \n", + "+----------+ +------------+ +------------+ +------------+ +------------+ +------+ +--------+ +------+ +--------+ +-----+ +------------+ +-------+ +-----+ +------+\n", + "subject1 2022-04-01 12: opto dHP left -7.9 bregma -1.8 bregma 5.0 skull_surface 11.5 0.0 nan \n", + " (Total: 1)" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ "surgery.Implantation.insert1(\n", " dict(\n", " subject=\"subject1\",\n", " implant_date=\"2022-04-01 12:13:14\",\n", - " implant_type=\"optogenetics\",\n", + " implant_type=\"opto\",\n", " target_region=\"dHP\",\n", " target_hemisphere=\"left\",\n", " surgeon=\"User1\",\n", " )\n", ")\n", - "\n", "surgery.Implantation.Coordinate.insert1(\n", " dict(\n", " subject=\"subject1\",\n", " implant_date=\"2022-04-01 12:13:14\",\n", - " implant_type=\"optogenetics\",\n", + " implant_type=\"opto\",\n", " target_region=\"dHP\",\n", " target_hemisphere=\"left\",\n", " ap=\"-7.9\", # [mm] anterior-posterior distance\n", @@ -567,7 +1479,8 @@ " phi=\"0\", # [0, 360] degree rotation about dv-axis relative to x\n", " beta=None, # [-180, 180] degree rotation about shank relative to anterior\n", " )\n", - ")" + ")\n", + "surgery.Implantation.Coordinate()" ] }, { @@ -580,9 +1493,95 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "
\n", + "

waveform_name

\n", + " \n", + "
\n", + "

on_proportion

\n", + " Proportion of stimulus on time within a cycle\n", + "
\n", + "

off_proportion

\n", + " Proportion of stimulus off time within a cycle\n", + "
square_100.100.90
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*waveform_name on_proportion off_proportion\n", + "+------------+ +------------+ +------------+\n", + "square_10 0.10 0.90 \n", + " (Total: 1)" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "optogenetics.OptoWaveform.insert1(\n", " dict(\n", @@ -599,14 +1598,117 @@ " waveform_name=\"square_10\", \n", " on_proportion=0.10, \n", " off_proportion=0.90)\n", - ")" + ")\n", + "optogenetics.OptoWaveform.Square()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " Defines a single optical stimulus that repeats.\n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

opto_params_id

\n", + " \n", + "
\n", + "

waveform_name

\n", + " \n", + "
\n", + "

wavelength

\n", + " (nm) wavelength of optical stimulation light\n", + "
\n", + "

power

\n", + " (mW) total power from light source\n", + "
\n", + "

light_intensity

\n", + " (mW/mm2) power for given area\n", + "
\n", + "

frequency

\n", + " (Hz) frequency of the waveform\n", + "
\n", + "

duration

\n", + " (ms) duration of each optical stimulus\n", + "
1square_10470None10.201.0241.0
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*opto_params_i waveform_name wavelength power light_intensit frequency duration \n", + "+------------+ +------------+ +------------+ +-------+ +------------+ +-----------+ +----------+\n", + "1 square_10 470 None 10.20 1.0 241.0 \n", + " (Total: 1)" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "optogenetics.OptoStimParams.insert1(\n", " dict(\n", @@ -617,7 +1719,8 @@ " frequency=1,\n", " duration=241,\n", " )\n", - ")" + ")\n", + "optogenetics.OptoStimParams()" ] }, { @@ -630,9 +1733,123 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

protocol_id

\n", + " \n", + "
\n", + "

opto_params_id

\n", + " \n", + "
\n", + "

implant_date

\n", + " surgery date\n", + "
\n", + "

implant_type

\n", + " Short name for type of implanted device\n", + "
\n", + "

target_region

\n", + " Brain region shorthand\n", + "
\n", + "

target_hemisphere

\n", + " Brain region hemisphere\n", + "
\n", + "

device

\n", + " \n", + "
\n", + "

protocol_description

\n", + " description of optogenetics protocol\n", + "
subject11112022-04-01 12:13:14optodHPleftOPTG_4
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*subject *session_id *protocol_id opto_params_id implant_date implant_type target_region target_hemisph device protocol_descr\n", + "+----------+ +------------+ +------------+ +------------+ +------------+ +------------+ +------------+ +------------+ +--------+ +------------+\n", + "subject1 1 1 1 2022-04-01 12: opto dHP left OPTG_4 \n", + " (Total: 1)" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "optogenetics.OptoProtocol.insert1(\n", " dict(\n", @@ -641,12 +1858,13 @@ " protocol_id=\"1\",\n", " opto_params_id=\"1\",\n", " implant_date=\"2022-04-01 12:13:14\",\n", - " implant_type=\"optogenetics\",\n", + " implant_type=\"opto\",\n", " target_region=\"dHP\",\n", " target_hemisphere=\"left\",\n", " device=\"OPTG_4\",\n", " )\n", - ")" + ")\n", + "optogenetics.OptoProtocol()" ] }, { @@ -659,11 +1877,105 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

protocol_id

\n", + " \n", + "
\n", + "

stim_start_time

\n", + " (s) stimulus start time relative to session start\n", + "
\n", + "

stim_end_time

\n", + " (s) stimulus end time relative session start\n", + "
subject111241.0482.0
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*subject *session_id *protocol_id *stim_start_ti stim_end_time \n", + "+----------+ +------------+ +------------+ +------------+ +------------+\n", + "subject1 1 1 241.0 482.0 \n", + " (Total: 1)" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "optogenetics.OptoEvent.insert(\n", + "optogenetics.OptoEvent.insert1(\n", " dict(\n", " subject=\"subject1\",\n", " session_id=1,\n", @@ -671,7 +1983,8 @@ " stim_start_time=241,\n", " stim_end_time=482,\n", " )\n", - ")" + ")\n", + "optogenetics.OptoEvent()" ] }, { @@ -684,11 +1997,110 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

protocol_id

\n", + " \n", + "
\n", + "

stim_start_time

\n", + " (s) stimulus start time relative to session start\n", + "
\n", + "

stim_end_time

\n", + " (s) stimulus end time relative session start\n", + "
subject111241.0482.0
subject111543.0797.0
\n", + " \n", + "

Total: 2

\n", + " " + ], + "text/plain": [ + "*subject *session_id *protocol_id *stim_start_ti stim_end_time \n", + "+----------+ +------------+ +------------+ +------------+ +------------+\n", + "subject1 1 1 241.0 482.0 \n", + "subject1 1 1 543.0 797.0 \n", + " (Total: 2)" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "optogenetics.OptoEvent.insert(\n", + "optogenetics.OptoEvent.insert1(\n", " dict(\n", " subject=\"subject1\",\n", " session_id=1,\n", @@ -696,7 +2108,8 @@ " stim_start_time=543,\n", " stim_end_time=797,\n", " )\n", - ")" + ")\n", + "optogenetics.OptoEvent()" ] }, { @@ -711,9 +2124,108 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

protocol_id

\n", + " \n", + "
\n", + "

stim_start_time

\n", + " (s) stimulus start time relative to session start\n", + "
\n", + "

stim_end_time

\n", + " (s) stimulus end time relative session start\n", + "
subject111241.0482.0
subject111543.0797.0
\n", + " \n", + "

Total: 2

\n", + " " + ], + "text/plain": [ + "*subject *session_id *protocol_id *stim_start_ti stim_end_time \n", + "+----------+ +------------+ +------------+ +------------+ +------------+\n", + "subject1 1 1 241.0 482.0 \n", + "subject1 1 1 543.0 797.0 \n", + " (Total: 2)" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "optogenetics.OptoEvent()" ] @@ -728,9 +2240,103 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

protocol_id

\n", + " \n", + "
\n", + "

stim_start_time

\n", + " (s) stimulus start time relative to session start\n", + "
\n", + "

stim_end_time

\n", + " (s) stimulus end time relative session start\n", + "
subject111543.0797.0
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*subject *session_id *protocol_id *stim_start_ti stim_end_time \n", + "+----------+ +------------+ +------------+ +------------+ +------------+\n", + "subject1 1 1 543.0 797.0 \n", + " (Total: 1)" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "optogenetics.OptoEvent & \"stim_start_time=543\"" ] @@ -745,9 +2351,147 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
\n", + "

subject

\n", + " \n", + "
\n", + "

session_id

\n", + " \n", + "
\n", + "

protocol_id

\n", + " \n", + "
\n", + "

opto_params_id

\n", + " \n", + "
\n", + "

implant_date

\n", + " surgery date\n", + "
\n", + "

implant_type

\n", + " Short name for type of implanted device\n", + "
\n", + "

target_region

\n", + " Brain region shorthand\n", + "
\n", + "

target_hemisphere

\n", + " Brain region hemisphere\n", + "
\n", + "

device

\n", + " \n", + "
\n", + "

protocol_description

\n", + " description of optogenetics protocol\n", + "
\n", + "

waveform_name

\n", + " \n", + "
\n", + "

wavelength

\n", + " (nm) wavelength of optical stimulation light\n", + "
\n", + "

power

\n", + " (mW) total power from light source\n", + "
\n", + "

light_intensity

\n", + " (mW/mm2) power for given area\n", + "
\n", + "

frequency

\n", + " (Hz) frequency of the waveform\n", + "
\n", + "

duration

\n", + " (ms) duration of each optical stimulus\n", + "
subject11112022-04-01 12:13:14optodHPleftOPTG_4square_10470None10.201.0241.0
\n", + " \n", + "

Total: 1

\n", + " " + ], + "text/plain": [ + "*subject *session_id *protocol_id *opto_params_i implant_date implant_type target_region target_hemisph device protocol_descr waveform_name wavelength power light_intensit frequency duration \n", + "+----------+ +------------+ +------------+ +------------+ +------------+ +------------+ +------------+ +------------+ +--------+ +------------+ +------------+ +------------+ +-------+ +------------+ +-----------+ +----------+\n", + "subject1 1 1 1 2022-04-01 12: opto dHP left OPTG_4 square_10 470 None 10.20 1.0 241.0 \n", + " (Total: 1)" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "optogenetics.OptoProtocol * optogenetics.OptoStimParams" ] @@ -761,16 +2505,36 @@ "\n", "The `fetch` and `fetch1` methods download the data from the query object into the workspace.\n", "\n", - "Below we will run `fetch()` without any arguments to return a list of dictionaries containing all attributes of all entries in the table." + "Below we will run `fetch()` without any arguments to return all attributes of all entries in the table." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "[{'subject': 'subject1',\n", + " 'session_id': 1,\n", + " 'protocol_id': 1,\n", + " 'stim_start_time': 241.0,\n", + " 'stim_end_time': 482.0},\n", + " {'subject': 'subject1',\n", + " 'session_id': 1,\n", + " 'protocol_id': 1,\n", + " 'stim_start_time': 543.0,\n", + " 'stim_end_time': 797.0}]" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "optogenetics.OptoEvent.fetch()" + "optogenetics.OptoEvent.fetch(as_dict=True)" ] }, { @@ -783,9 +2547,24 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "{'subject': 'subject1',\n", + " 'session_id': 1,\n", + " 'protocol_id': 1,\n", + " 'stim_start_time': 543.0,\n", + " 'stim_end_time': 797.0}" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "(optogenetics.OptoEvent & \"stim_start_time=543\").fetch1()" ] From dcbed0123a971c77fc083172752d3a30960a2976 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 04:13:18 +0000 Subject: [PATCH 49/58] Update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3539126..5090f5f 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ environment and notebook to learn the pipeline. ![flowchart](https://raw.githubusercontent.com/datajoint/element-optogenetics/main/images/flowchart.svg) -## Data Pipeline +## Data Pipeline Diagram ![pipeline](https://raw.githubusercontent.com/datajoint/element-optogenetics/main/images/pipeline.svg) From 147f8762c9cb056412f2d0145e575ad85dda6bdd Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 04:13:30 +0000 Subject: [PATCH 50/58] Update changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0344ead..9690f16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -## [0.1.0] - 2023-04-20 +## [0.1.0] - 2023-05-04 + Add - First release From 06acd652f925a01110e7e0a5c0d7c9939af574b6 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 04:13:42 +0000 Subject: [PATCH 51/58] Update python kernel --- .devcontainer/local-test/devcontainer.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.devcontainer/local-test/devcontainer.json b/.devcontainer/local-test/devcontainer.json index 3e57a7b..e95e23e 100644 --- a/.devcontainer/local-test/devcontainer.json +++ b/.devcontainer/local-test/devcontainer.json @@ -17,6 +17,9 @@ 3306 ], "customizations": { + "settings": { + "python.pythonPath": "/usr/local/bin/python" + }, "vscode": { "extensions": [ "ms-python.python" From 6bd7c47de592ae03f6b4438de0c1f05e44b64ef9 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 04:16:00 +0000 Subject: [PATCH 52/58] Update resources --- .devcontainer/devcontainer.json | 4 ++-- .devcontainer/local-test/devcontainer.json | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 80131cb..b9cf1a2 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -9,8 +9,8 @@ "onCreateCommand": "pip install -e . && MYSQL_VER=8.0 docker compose down && MYSQL_VER=8.0 docker compose up --build --wait", "postStartCommand": "docker volume prune -f", "hostRequirements": { - "cpus": 4, - "memory": "8gb", + "cpus": 8, + "memory": "16gb", "storage": "32gb" }, "forwardPorts": [ diff --git a/.devcontainer/local-test/devcontainer.json b/.devcontainer/local-test/devcontainer.json index e95e23e..4eed2e0 100644 --- a/.devcontainer/local-test/devcontainer.json +++ b/.devcontainer/local-test/devcontainer.json @@ -9,8 +9,8 @@ "onCreateCommand": "pip install -r ./requirements_dev.txt && pip install -e . && pip install -e ../element-optogenetics && MYSQL_VER=8.0 docker compose down && MYSQL_VER=8.0 docker compose up --build --wait", "postStartCommand": "docker volume prune -f", "hostRequirements": { - "cpus": 4, - "memory": "8gb", + "cpus": 8, + "memory": "16gb", "storage": "32gb" }, "forwardPorts": [ From 38ebbabdd621407c816a77455036db496cc2bb44 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 04:29:44 +0000 Subject: [PATCH 53/58] Add extension --- .devcontainer/devcontainer.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b9cf1a2..d18e1d2 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -22,7 +22,8 @@ }, "vscode": { "extensions": [ - "ms-python.python" + "ms-python.python", + "ms-toolsai.jupyter" ] } } From e528e42d9619396e2f8aff2358f89b6af309f7da Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 04:43:36 +0000 Subject: [PATCH 54/58] Update requirements --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3dd2473..dc4f6cd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,6 @@ element-animal>=0.1.2 element-event>=0.1.2 element-interface>=0.5.1 element-lab>=0.1.1 -element-optogenetics @ git+https://github.com/datajoint/element-optogenetics.git +element-optogenetics>=0.1.1 element-session>=0.1.2 ipykernel>=6.0.1 \ No newline at end of file From 961aa308a7873a682de9a5fe2d13a86e321e99bb Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 04:44:41 +0000 Subject: [PATCH 55/58] Add extension --- .devcontainer/local-test/devcontainer.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.devcontainer/local-test/devcontainer.json b/.devcontainer/local-test/devcontainer.json index 4eed2e0..ace83ee 100644 --- a/.devcontainer/local-test/devcontainer.json +++ b/.devcontainer/local-test/devcontainer.json @@ -22,7 +22,8 @@ }, "vscode": { "extensions": [ - "ms-python.python" + "ms-python.python", + "ms-toolsai.jupyter" ] } } From 8dbee85916e4ff92c8aba0d37416eeae0a0d4b04 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 5 May 2023 09:08:28 -0500 Subject: [PATCH 56/58] Add print statements --- notebooks/tutorial.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/notebooks/tutorial.ipynb b/notebooks/tutorial.ipynb index bc53838..d0470c4 100644 --- a/notebooks/tutorial.ipynb +++ b/notebooks/tutorial.ipynb @@ -485,7 +485,7 @@ } ], "source": [ - "subject.Subject.describe()" + "print(subject.Subject.describe())" ] }, { @@ -658,7 +658,7 @@ } ], "source": [ - "session.Session.describe()" + "print(session.Session.describe())" ] }, { From 64ff479392b301932732d45e7035c0cf84774fa6 Mon Sep 17 00:00:00 2001 From: kushalbakshi Date: Fri, 5 May 2023 11:18:10 -0500 Subject: [PATCH 57/58] Minor typo fix in `tutorial.ipynb` --- notebooks/tutorial.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/tutorial.ipynb b/notebooks/tutorial.ipynb index d0470c4..fa611be 100644 --- a/notebooks/tutorial.ipynb +++ b/notebooks/tutorial.ipynb @@ -1488,7 +1488,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We'll add information to describe the stimulus, including waveform shape and and stimulation parameters." + "We'll add information to describe the stimulus, including waveform shape and stimulation parameters." ] }, { From 98aad0d4e7966e18936c5c90b9ed50c9c22c5404 Mon Sep 17 00:00:00 2001 From: kushalbakshi Date: Mon, 8 May 2023 10:55:58 -0500 Subject: [PATCH 58/58] Fix typo in notebook --- notebooks/tutorial.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/tutorial.ipynb b/notebooks/tutorial.ipynb index fa611be..5a73025 100644 --- a/notebooks/tutorial.ipynb +++ b/notebooks/tutorial.ipynb @@ -358,7 +358,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The table classes in the module corresponds to a table in the database." + "The table classes in the module correspond to a table in the database." ] }, {