From d0ab982cd2e8c763d4c9da4298bca7eb9c8e0c4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yoshiki=20V=C3=A1zquez=20Baeza?= Date: Mon, 23 Jun 2014 21:45:00 -0700 Subject: [PATCH 1/9] Initial commit Note that this commit ports over the contents of the previously existing repository. Fixes #59 --- .coveragerc | 19 + .gitignore | 46 + .travis.yml | 34 + INSTALL.md | 54 + LICENSE | 27 + MANIFEST.in | 14 + README.md | 7 + config/database.yml | 4 + qiita_core/__init__.py | 10 + qiita_core/configuration_manager.py | 119 + qiita_core/environment_manager.py | 16 + qiita_core/exceptions.py | 60 + qiita_core/qiita_settings.py | 14 + qiita_core/search.py | 99 + qiita_core/support_files/config_demo.txt | 48 + qiita_core/support_files/config_test.txt | 48 + qiita_core/util.py | 129 + qiita_db/__init__.py | 12 + qiita_db/analysis.py | 381 ++ qiita_db/base.py | 280 + qiita_db/commands.py | 230 + qiita_db/data.py | 505 ++ qiita_db/environment_manager.py | 255 + qiita_db/exceptions.py | 64 + qiita_db/investigation.py | 79 + qiita_db/job.py | 363 ++ qiita_db/logger.py | 187 + qiita_db/metadata_template.py | 943 ++++ qiita_db/sql_connection.py | 217 + qiita_db/study.py | 654 +++ qiita_db/support_files/initialize.sql | 44 + qiita_db/support_files/populate_test_db.sql | 348 ++ qiita_db/support_files/qiita-db-settings.sql | 5 + qiita_db/support_files/qiita-db.dbs | 1379 +++++ qiita_db/support_files/qiita-db.html | 4612 +++++++++++++++++ qiita_db/support_files/qiita-db.sql | 882 ++++ .../test_data/job/1_job_result.txt | 1 + .../test_data/job/2_test_folder/testfile.txt | 1 + .../test_data/preprocessed_data/seqs.fna | 1 + .../test_data/preprocessed_data/seqs.qual | 1 + ...study_1001_closed_reference_otu_table.biom | 1 + .../reference/gg_97_otus_4feb2011.fasta | 1 + .../reference/gg_97_otus_4feb2011.tre | 1 + .../test_data/reference/greengenes_tax.txt | 1 + .../test_data/reference/params_qiime.txt | 1 + .../support_files/work_data/placeholder.txt | 1 + qiita_db/test/__init__.py | 12 + qiita_db/test/test_analysis.py | 146 + qiita_db/test/test_base.py | 161 + qiita_db/test/test_commands.py | 351 ++ qiita_db/test/test_data.py | 422 ++ qiita_db/test/test_data/sample_template.txt | 5 + qiita_db/test/test_job.py | 196 + qiita_db/test/test_logger.py | 68 + qiita_db/test/test_metadata_template.py | 1058 ++++ qiita_db/test/test_setup.py | 125 + qiita_db/test/test_study.py | 420 ++ qiita_db/test/test_user.py | 203 + qiita_db/test/test_util.py | 186 + qiita_db/user.py | 377 ++ qiita_db/util.py | 512 ++ qiita_pet/__init__.py | 10 + qiita_pet/handlers/__init__.py | 10 + qiita_pet/handlers/analysis_handlers.py | 176 + qiita_pet/handlers/auth_handlers.py | 98 + qiita_pet/handlers/base_handlers.py | 46 + qiita_pet/handlers/websocket_handlers.py | 63 + .../results/admin/jobname/placeholder.html | 1 + qiita_pet/static/css/style.css | 5 + qiita_pet/static/img/favicon.ico | Bin 0 -> 32988 bytes qiita_pet/static/img/logo-clear.png | Bin 0 -> 13936 bytes qiita_pet/static/img/logo.png | Bin 0 -> 14963 bytes .../static/vendor/css/bootstrap-theme.min.css | 7 + qiita_pet/static/vendor/css/bootstrap.min.css | 7 + .../static/vendor/css/jquery.qtip.min.css | 2 + qiita_pet/static/vendor/js/bootstrap.min.js | 6 + qiita_pet/static/vendor/js/dropdown.min.js | 8 + .../static/vendor/js/jquery-2.1.0.min.js | 4 + qiita_pet/static/vendor/js/jquery.qtip.min.js | 4 + .../static/vendor/js/jquery.validate.min.js | 4 + qiita_pet/static/vendor/js/popover.min.js | 8 + .../static/vendor/js/popover_edit.min.js | 8 + qiita_pet/static/vendor/js/tooltip.min.js | 9 + qiita_pet/static/vendor/js/underscore.min.js | 6 + .../vendor/licences/bootstrap_license.txt | 21 + .../static/vendor/licences/jquery_license.txt | 21 + .../vendor/licences/underscore_license.txt | 23 + qiita_pet/templates/404.html | 4 + qiita_pet/templates/analysis_results.html | 52 + qiita_pet/templates/analysis_waiting.html | 59 + qiita_pet/templates/create_user.html | 23 + qiita_pet/templates/error.html | 12 + qiita_pet/templates/index.html | 23 + qiita_pet/templates/login.html | 25 + qiita_pet/templates/select_commands.html | 47 + qiita_pet/templates/select_studies.html | 40 + qiita_pet/templates/show_analyses.html | 41 + qiita_pet/templates/sitebase.html | 131 + qiita_pet/uploads/placeholder.txt | 1 + qiita_pet/webserver.py | 74 + qiita_ware/__init__.py | 10 + qiita_ware/cluster.py | 134 + qiita_ware/exceptions.py | 42 + qiita_ware/run.py | 81 + qiita_ware/test/test_run.py | 27 + scripts/qiita_db | 156 + scripts/qiita_env | 101 + setup.py | 65 + test.txt | 4 + 109 files changed, 18139 insertions(+) create mode 100644 .coveragerc create mode 100644 .gitignore create mode 100644 .travis.yml create mode 100644 INSTALL.md create mode 100644 LICENSE create mode 100644 MANIFEST.in create mode 100644 README.md create mode 100644 config/database.yml create mode 100644 qiita_core/__init__.py create mode 100644 qiita_core/configuration_manager.py create mode 100644 qiita_core/environment_manager.py create mode 100644 qiita_core/exceptions.py create mode 100644 qiita_core/qiita_settings.py create mode 100644 qiita_core/search.py create mode 100644 qiita_core/support_files/config_demo.txt create mode 100644 qiita_core/support_files/config_test.txt create mode 100644 qiita_core/util.py create mode 100644 qiita_db/__init__.py create mode 100644 qiita_db/analysis.py create mode 100644 qiita_db/base.py create mode 100644 qiita_db/commands.py create mode 100644 qiita_db/data.py create mode 100644 qiita_db/environment_manager.py create mode 100644 qiita_db/exceptions.py create mode 100644 qiita_db/investigation.py create mode 100644 qiita_db/job.py create mode 100644 qiita_db/logger.py create mode 100644 qiita_db/metadata_template.py create mode 100644 qiita_db/sql_connection.py create mode 100644 qiita_db/study.py create mode 100644 qiita_db/support_files/initialize.sql create mode 100644 qiita_db/support_files/populate_test_db.sql create mode 100644 qiita_db/support_files/qiita-db-settings.sql create mode 100644 qiita_db/support_files/qiita-db.dbs create mode 100644 qiita_db/support_files/qiita-db.html create mode 100644 qiita_db/support_files/qiita-db.sql create mode 100644 qiita_db/support_files/test_data/job/1_job_result.txt create mode 100644 qiita_db/support_files/test_data/job/2_test_folder/testfile.txt create mode 100644 qiita_db/support_files/test_data/preprocessed_data/seqs.fna create mode 100644 qiita_db/support_files/test_data/preprocessed_data/seqs.qual create mode 100644 qiita_db/support_files/test_data/processed_data/study_1001_closed_reference_otu_table.biom create mode 100644 qiita_db/support_files/test_data/reference/gg_97_otus_4feb2011.fasta create mode 100644 qiita_db/support_files/test_data/reference/gg_97_otus_4feb2011.tre create mode 100644 qiita_db/support_files/test_data/reference/greengenes_tax.txt create mode 100644 qiita_db/support_files/test_data/reference/params_qiime.txt create mode 100644 qiita_db/support_files/work_data/placeholder.txt create mode 100644 qiita_db/test/__init__.py create mode 100644 qiita_db/test/test_analysis.py create mode 100644 qiita_db/test/test_base.py create mode 100644 qiita_db/test/test_commands.py create mode 100644 qiita_db/test/test_data.py create mode 100644 qiita_db/test/test_data/sample_template.txt create mode 100644 qiita_db/test/test_job.py create mode 100644 qiita_db/test/test_logger.py create mode 100644 qiita_db/test/test_metadata_template.py create mode 100644 qiita_db/test/test_setup.py create mode 100644 qiita_db/test/test_study.py create mode 100644 qiita_db/test/test_user.py create mode 100644 qiita_db/test/test_util.py create mode 100644 qiita_db/user.py create mode 100644 qiita_db/util.py create mode 100644 qiita_pet/__init__.py create mode 100644 qiita_pet/handlers/__init__.py create mode 100644 qiita_pet/handlers/analysis_handlers.py create mode 100644 qiita_pet/handlers/auth_handlers.py create mode 100644 qiita_pet/handlers/base_handlers.py create mode 100644 qiita_pet/handlers/websocket_handlers.py create mode 100644 qiita_pet/results/admin/jobname/placeholder.html create mode 100644 qiita_pet/static/css/style.css create mode 100644 qiita_pet/static/img/favicon.ico create mode 100644 qiita_pet/static/img/logo-clear.png create mode 100644 qiita_pet/static/img/logo.png create mode 100644 qiita_pet/static/vendor/css/bootstrap-theme.min.css create mode 100644 qiita_pet/static/vendor/css/bootstrap.min.css create mode 100644 qiita_pet/static/vendor/css/jquery.qtip.min.css create mode 100644 qiita_pet/static/vendor/js/bootstrap.min.js create mode 100644 qiita_pet/static/vendor/js/dropdown.min.js create mode 100644 qiita_pet/static/vendor/js/jquery-2.1.0.min.js create mode 100644 qiita_pet/static/vendor/js/jquery.qtip.min.js create mode 100755 qiita_pet/static/vendor/js/jquery.validate.min.js create mode 100644 qiita_pet/static/vendor/js/popover.min.js create mode 100644 qiita_pet/static/vendor/js/popover_edit.min.js create mode 100644 qiita_pet/static/vendor/js/tooltip.min.js create mode 100644 qiita_pet/static/vendor/js/underscore.min.js create mode 100644 qiita_pet/static/vendor/licences/bootstrap_license.txt create mode 100644 qiita_pet/static/vendor/licences/jquery_license.txt create mode 100644 qiita_pet/static/vendor/licences/underscore_license.txt create mode 100644 qiita_pet/templates/404.html create mode 100644 qiita_pet/templates/analysis_results.html create mode 100644 qiita_pet/templates/analysis_waiting.html create mode 100644 qiita_pet/templates/create_user.html create mode 100644 qiita_pet/templates/error.html create mode 100644 qiita_pet/templates/index.html create mode 100644 qiita_pet/templates/login.html create mode 100644 qiita_pet/templates/select_commands.html create mode 100644 qiita_pet/templates/select_studies.html create mode 100644 qiita_pet/templates/show_analyses.html create mode 100644 qiita_pet/templates/sitebase.html create mode 100644 qiita_pet/uploads/placeholder.txt create mode 100644 qiita_pet/webserver.py create mode 100644 qiita_ware/__init__.py create mode 100644 qiita_ware/cluster.py create mode 100644 qiita_ware/exceptions.py create mode 100644 qiita_ware/run.py create mode 100644 qiita_ware/test/test_run.py create mode 100755 scripts/qiita_db create mode 100755 scripts/qiita_env create mode 100644 setup.py create mode 100644 test.txt diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 000000000..3cfb7507d --- /dev/null +++ b/.coveragerc @@ -0,0 +1,19 @@ +# this file is based on the examples provided on scikit-learn's .coveragerc + +[run] +omit = + */test* + */__init__.py +source = qiita_core,qiita_ware,qiita_db,qiita_pet +branch = True +include = */qiita_*/* + +[report] +exclude_lines = + pragma: no cover + def __repr__ + raise NotImplementedError + if __name__ == .__main__.: +omit = + */test* + */__init__.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..a37c871cc --- /dev/null +++ b/.gitignore @@ -0,0 +1,46 @@ +*.py[cod] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.tox +nosetests.xml + +# Translations +*.mo + +# Mr Developer +.mr.developer.cfg +.project +.pydevproject + +# Config files +config.py + +# OSX files +.DS_Store + +# vi swap and temp files +*.swp +*.swo +*~ diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..63014c229 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,34 @@ +language: python +env: + - PYTHON_VERSION=3.4 + - PYTHON_VERSION=2.7 +before_install: + - wget http://repo.continuum.io/miniconda/Miniconda-2.2.2-Linux-x86_64.sh -O miniconda.sh + - chmod +x miniconda.sh + - ./miniconda.sh -b + - export PATH=/home/travis/anaconda/bin:$PATH + # Update conda itself + - conda update --yes conda +install: + - conda create --yes -n env_name python=$PYTHON_VERSION pip nose pep8 openpyxl=1.8.2 pandas ipython pyzmq + - source activate env_name + - pip + - pip install coveralls + - pip install . +script: + - ipython profile create qiita_general --parallel + - ipython profile create qiita_demo --parallel + - ipython profile create qiita_reserved --parallel + - qiita_env start_cluster --cluster general + - qiita_env start_cluster --cluster demo + - qiita_env start_cluster --cluster reserved + - qiita_env make_env --env test + - nosetests --with-doctest --with-coverage + - pep8 qiita_db qiita_core qiita_pet setup.py + # we need to run the test suite from setup.py for coveralls to grab the info + # - coverage run setup.py test + # - coverage report -m +services: + - redis-server +after_success: + - coveralls diff --git a/INSTALL.md b/INSTALL.md new file mode 100644 index 000000000..20edb023f --- /dev/null +++ b/INSTALL.md @@ -0,0 +1,54 @@ +Dependencies +------------ + +Qiita is a python package, with support for python 2.7 and 3.2, that depends on the following python libraries (all of them can be installed using pip): + + + +* [tornado 3.1.1](http://www.tornadoweb.org/en/stable/) +* [tornado-redis](https://pypi.python.org/pypi/tornado-redis) +* [Psycopg2](http://initd.org/psycopg/download/) +* [click](http://click.pocoo.org/) +* [NumPy](https://github.com/numpy/numpy) +* [Pandas](http://pandas.pydata.org/) +* [QIIME development version](https://github.com/biocore/qiime) +* [future](http://python-future.org/) +* [bcrypt](https://github.com/pyca/bcrypt/) +* [redis](https://github.com/andymccurdy/redis-py) + +And on the following packages: + +* [PostgresSQL 9.3](http://www.postgresql.org/download/) +* [redis 2.8.0](https://pypi.python.org/pypi/redis/) + + + +Install +------- + +Once you have [PostgresSQL](http://www.postgresql.org/download/) and [redis](https://pypi.python.org/pypi/redis/) installed (follow the instruction on their web site), simply run these commands to install qiita and configure the demo environment, replacing $QIITA_DIR for the path where qiita is installed +(note that if you are not using Ubuntu you might need to follow the instructions in the next section): + +```bash +echo "export QIITA_CONFIG_FP=$QIITA_DIR/qiita_core/support_files/config_demo.txt" >> ~/.bashrc +source ~/.bashrc +pip install https://github.com/biocore/qiita/archive/master.zip +qiita_env make_env --env demo +``` +## If using other operating systems that are not Ubuntu + +You will need to add the postgres user to the database. In order to do this, run: + +```bash +createuser -s postgres -d +``` + +If you receive the following error, you can ignore this step and continue with the qiita installation: +```bash +createuser: creation of new role failed: ERROR: role "postgres" already exists +``` diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..1b041d032 --- /dev/null +++ b/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013, Qiita development team +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + + Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 000000000..c56b47153 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,14 @@ +include README.md +include INSTALL.md +include LICENSE + +graft qiita_core +graft qiita_db +graft qiita_pet +graft qitta_ware +graft scripts + +global-exclude *.pyc +global-exclude *.pyo +global-exclude .git +global-exclude *~ diff --git a/README.md b/README.md new file mode 100644 index 000000000..fc564c345 --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +Qiita +===== + +[![Build Status](https://travis-ci.org/biocore/qiita.png?branch=master)](https://travis-ci.org/biocore/qiita) +[![Coverage Status](https://coveralls.io/repos/biocore/qiita/badge.png?branch=master)](https://coveralls.io/r/biocore/qiita) + +Welcome to Qiita (canonically pronounced *cheetah*) the QIIME databasing effort to enable rapid analysis of microbial ecology datasets. The Qiita repository is responsible for defining the data model and the Python API for interacting with a Qiita database. diff --git a/config/database.yml b/config/database.yml new file mode 100644 index 000000000..fec60d2fe --- /dev/null +++ b/config/database.yml @@ -0,0 +1,4 @@ +test: + adapter: postgresql + database: qiita_test + username: postgres \ No newline at end of file diff --git a/qiita_core/__init__.py b/qiita_core/__init__.py new file mode 100644 index 000000000..22f1e9179 --- /dev/null +++ b/qiita_core/__init__.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- diff --git a/qiita_core/configuration_manager.py b/qiita_core/configuration_manager.py new file mode 100644 index 000000000..dd878b144 --- /dev/null +++ b/qiita_core/configuration_manager.py @@ -0,0 +1,119 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from functools import partial +from os.path import join, dirname, abspath +from os import environ +from future import standard_library +with standard_library.hooks(): + from configparser import (ConfigParser, NoOptionError, + MissingSectionHeaderError) + + +class ConfigurationManager(object): + """Holds the QIITA configuration + + Parameters + ---------- + conf_fp: str, optional + Filepath to the configuration file. Default: config_test.txt + + Attributes + ---------- + test_environment : bool + If true, we are in a test environment. + base_data_dir : str + Path to the base directorys where all data file are stored + user : str + The postgres user + password : str + The postgres password for the previous user + database : str + The postgres database to connect to + host : str + The host where the database lives + port : int + The port used to connect to the postgres database in the previous host + ipyc_demo : str + The IPython demo cluster profile + ipyc_demo_n : int + The size of the demo cluster + ipyc_reserved : str + The IPython reserved cluster profile + ipyc_reserved_n : int + The size of the reserved cluster + ipyc_general : str + The IPython general cluster profile + ipyc_general_n : int + The size of the general cluster + """ + def __init__(self): + # If conf_fp is None, we default to the test configuration file + try: + conf_fp = environ['QIITA_CONFIG_FP'] + except KeyError: + conf_fp = join(dirname(abspath(__file__)), + 'support_files/config_test.txt') + + # Parse the configuration file + config = ConfigParser() + with open(conf_fp, 'U') as conf_file: + config.readfp(conf_file) + + _expected_sections = set(['main', 'ipython', 'redis', 'postgres']) + if set(config.sections()) != _expected_sections: + missing = _expected_sections - set(config.sections()) + raise MissingSectionHeaderError("Missing: %r" % missing) + + self._get_main(config) + self._get_postgres(config) + self._get_redis(config) + self._get_ipython(config) + + def _get_main(self, config): + """Get the configuration of the main section""" + self.test_environment = config.getboolean('main', 'TEST_ENVIRONMENT') + try: + self.base_data_dir = config.get('main', 'BASE_DATA_DIR') + except NoOptionError as e: + if self.test_environment: + self.base_data_dir = join(dirname(abspath(__file__)), + '../test_data') + else: + raise e + + def _get_postgres(self, config): + """Get the configuration of the postgres section""" + self.user = config.get('postgres', 'USER') + try: + self.password = config.get('postgres', 'PASSWORD') + except NoOptionError as e: + if self.test_environment: + self.password = None + else: + raise e + self.database = config.get('postgres', 'DATABASE') + self.host = config.get('postgres', 'HOST') + self.port = config.getint('postgres', 'PORT') + + def _get_redis(self, config): + """Get the configuration of the redis section""" + pass + + def _get_ipython(self, config): + """Get the configuration of the ipython section""" + sec_get = partial(config.get, 'ipython') + sec_getint = partial(config.getint, 'ipython') + + self.ipyc_demo = sec_get('DEMO_CLUSTER') + self.ipyc_reserved = sec_get('RESERVED_CLUSTER') + self.ipyc_general = sec_get('GENERAL_CLUSTER') + + self.ipyc_demo_n = sec_getint('DEMO_CLUSTER_SIZE') + self.ipyc_reserved_n = sec_getint('RESERVED_CLUSTER_SIZE') + self.ipyc_general_n = sec_getint('GENERAL_CLUSTER_SIZE') diff --git a/qiita_core/environment_manager.py b/qiita_core/environment_manager.py new file mode 100644 index 000000000..088cdd45b --- /dev/null +++ b/qiita_core/environment_manager.py @@ -0,0 +1,16 @@ +from IPython.parallel.apps.ipclusterapp import IPClusterStart, IPClusterStop + + +def start_cluster(profile, n): + """Start a cluster""" + c = IPClusterStart(profile=profile, log_level=0, daemonize=True) + c.n = n + c.initialize(argv=[]) + c.start() + + +def stop_cluster(profile): + """Stop a cluster""" + c = IPClusterStop(profile=profile, log_level=0) + c.initialize(argv=[]) + c.start() diff --git a/qiita_core/exceptions.py b/qiita_core/exceptions.py new file mode 100644 index 000000000..a5fec249c --- /dev/null +++ b/qiita_core/exceptions.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + + +class QiitaError(Exception): + """Base clase for all Qiita exceptions""" + pass + + +class IncompetentQiitaDeveloperError(QiitaError): + """Exception for developer errors""" + pass + + +class QiitaSearchError(QiitaError): + """Exception for errors when using search objects""" + pass + + +class QiitaUserError(QiitaError): + """Exception for error when handling with user objects""" + pass + + +class QiitaAnalysisError(QiitaError): + """Exception for error when handling with analysis objects""" + pass + + +class QiitaJobError(QiitaError): + """Exception for error when handling with job objects""" + pass + + +class QiitaStudyError(QiitaError): + """Exception for error when handling with study objects""" + pass + + +class IncorrectPasswordError(QiitaError): + """User passes wrong password""" + pass + + +class IncorrectEmailError(QiitaError): + """Email fails validation""" + pass + + +class QiitaEnvironmentError(QiitaError): + """Exception for error when dealing with the environment""" + pass diff --git a/qiita_core/qiita_settings.py b/qiita_core/qiita_settings.py new file mode 100644 index 000000000..dad053eff --- /dev/null +++ b/qiita_core/qiita_settings.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from qiita_core.configuration_manager import ConfigurationManager + +qiita_config = ConfigurationManager() diff --git a/qiita_core/search.py b/qiita_core/search.py new file mode 100644 index 000000000..147201f19 --- /dev/null +++ b/qiita_core/search.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +QUERY_TYPES = ["includes", "exact", "starts_with", "ends_with"] + +from qiita_core.exceptions import QiitaSearchError + + +class QiitaSearchCriterion(object): + """Models a search criterion""" + + def __init__(self, field, query_type, query): + """Initializes the criterion object + + Inputs: + field: the field in which the criterion applies + query_type: the type of query of the criterion + query: the actual string containing the query + + Raises a QiitaSearchError if the query type is not recognized + """ + if query_type not in QUERY_TYPES: + raise QiitaSearchError("Query type not recognized: %s" % + query_type) + self.field = field + self.query_type = query_type + self.query = query + + def __str__(self): + """Returns the criterion in a human-readable string""" + raise NotImplementedError("") + + +class QiitaSearch(object): + """Models a search query""" + + def __init__(self, fields, criterion): + """Initializes the search object + + Inputs: + fields: the fields in which the search can apply + criterion: the first criterion of the search + + Raises a QiitaSearchError if the criterion does not apply to the given + search fields + """ + if criterion.field not in fields: + raise QiitaSearchError("Field not recognized") + self._fields = fields + self._criteria = [criterion] + self._operators = [] + + def __str__(self): + """Returns the search string in a json string""" + raise NotImplementedError("") + + def add_criterion(self, criterion, operator): + """Adds a new criterion to the search + + Inputs: + criterion: the new criterion to be added to the search + operator: the operator used in the added criterion + """ + raise NotImplementedError("") + + def remove_criterion(self, criterion): + """Removes a given criterion from the search + + Inputs: + criterion: the criterion to be removed + """ + raise NotImplementedError("") + + def get_criteria(self): + """Iterator to loop through all the criterion on the search + + Yields a pair of (operator, criterion) in which the operator + for the first criterion is not defined + """ + yield None, self._criteria[0] + + for op, criterion in zip(self._operators, self._criteria[1:]): + yield op, criterion + + def to_json_str(self): + """""" + pass + + def load_from_json(self): + """""" + pass diff --git a/qiita_core/support_files/config_demo.txt b/qiita_core/support_files/config_demo.txt new file mode 100644 index 000000000..8ce695720 --- /dev/null +++ b/qiita_core/support_files/config_demo.txt @@ -0,0 +1,48 @@ +# WARNING!!!! DO NOT MODIFY THIS FILE +# IF YOU NEED TO PROVIDE YOUR OWN CONFIGURATION, COPY THIS FILE TO A NEW +# LOCATION AND EDIT THE COPY + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +# ------------------------------ Main settings -------------------------------- +[main] +# Change to FALSE in a production system +TEST_ENVIRONMENT = TRUE + +# Path to the base directory where the data files are going to be stored, uncomment to set +# BASE_DATA_DIR = /path/to/base/directory + +# ----------------------------- IPython settings ------------------------------ +[ipython] +# ties to cluster profiles +DEMO_CLUSTER = qiita_demo +DEMO_CLUSTER_SIZE = 1 + +RESERVED_CLUSTER = qiita_reserved +RESERVED_CLUSTER_SIZE = 1 + +GENERAL_CLUSTER = qiita_general +GENERAL_CLUSTER_SIZE = 1 + +# ----------------------------- Redis settings -------------------------------- +[redis] + +# ----------------------------- Postgres settings ----------------------------- +[postgres] +# The user name to connect to the database +USER = postgres + +# The database to connect to +DATABASE = qiita_demo + +# The host where the database lives on +HOST = localhost + +# The port to connect to the database +PORT = 5432 diff --git a/qiita_core/support_files/config_test.txt b/qiita_core/support_files/config_test.txt new file mode 100644 index 000000000..69481afba --- /dev/null +++ b/qiita_core/support_files/config_test.txt @@ -0,0 +1,48 @@ +# WARNING!!!! DO NOT MODIFY THIS FILE +# IF YOU NEED TO PROVIDE YOUR OWN CONFIGURATION, COPY THIS FILE TO A NEW +# LOCATION AND EDIT THE COPY + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +# ------------------------------ Main settings -------------------------------- +[main] +# Change to FALSE in a production system +TEST_ENVIRONMENT = TRUE + +# Path to the base directory where the data files are going to be stored, uncomment to set +# BASE_DATA_DIR = /path/to/base/directory + +# ----------------------------- IPython settings ------------------------------ +[ipython] +# ties to cluster profiles +DEMO_CLUSTER = qiita_demo +DEMO_CLUSTER_SIZE = 1 + +RESERVED_CLUSTER = qiita_reserved +RESERVED_CLUSTER_SIZE = 1 + +GENERAL_CLUSTER = qiita_general +GENERAL_CLUSTER_SIZE = 1 + +# ----------------------------- Redis settings -------------------------------- +[redis] + +# ----------------------------- Postgres settings ----------------------------- +[postgres] +# The user name to connect to the database +USER = postgres + +# The database to connect to +DATABASE = qiita_test + +# The host where the database lives on +HOST = localhost + +# The port to connect to the database +PORT = 5432 diff --git a/qiita_core/util.py b/qiita_core/util.py new file mode 100644 index 000000000..57661b97f --- /dev/null +++ b/qiita_core/util.py @@ -0,0 +1,129 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- +from smtplib import SMTP, SMTP_SSL, SMTPException +from future import standard_library +with standard_library.hooks(): + from email.mime.multipart import MIMEMultipart + from email.mime.text import MIMEText + +from qiita_core.qiita_settings import qiita_config +from qiita_db.sql_connection import SQLConnectionHandler +from qiita_db.environment_manager import (LAYOUT_FP, INITIALIZE_FP, + POPULATE_FP) + + +def send_email(to, subject, body): + # create email + msg = MIMEMultipart() + msg['From'] = qiita_config.smtp_email + msg['To'] = to + msg['Subject'] = subject + msg.attach(MIMEText(body, 'plain')) + + # connect to smtp server, using ssl if needed + if qiita_config.smtp_ssl: + smtp = SMTP_SSL() + else: + smtp = SMTP() + smtp.set_debuglevel(False) + smtp.connect(qiita_config.smtp_host, qiita_config.smtp_port) + # try tls, if not available on server just ignore error + try: + smtp.starttls() + except SMTPException: + pass + smtp.ehlo_or_helo_if_needed() + + if qiita_config.smtp_user: + smtp.login(qiita_config.smtp_user, qiita_config.smtp_password) + + # send email + try: + smtp.sendmail(qiita_config.smtp_email, to, msg.as_string()) + except Exception: + raise RuntimeError("Can't send email!") + finally: + smtp.close() + + +def build_test_database(setup_fn): + """Decorator that initializes the test database with the schema and initial + test data and executes setup_fn + """ + conn_handler = SQLConnectionHandler() + + # Get the paths to the SQL files with the schema layout, the database + # initialization and the test data + + def decorated_setup_fn(*args, **kwargs): + # Create the schema + with open(LAYOUT_FP, 'U') as f: + conn_handler.execute(f.read()) + # Initialize the database + with open(INITIALIZE_FP, 'U') as f: + conn_handler.execute(f.read()) + # Populate the database + with open(POPULATE_FP, 'U') as f: + conn_handler.execute(f.read()) + # Execute the setup function + return setup_fn(*args, **kwargs) + + return decorated_setup_fn + + +def drop_test_database(teardown_fn): + """Decorator that drops the qiita schema, leaving the test database in its + initial state, and then executes teardown_fn + """ + conn_handler = SQLConnectionHandler() + + def decorated_teardown_fn(*args, **kwargs): + # Drop the schema + conn_handler.execute("DROP SCHEMA qiita CASCADE") + # Execute the teardown function + return teardown_fn(*args, **kwargs) + + return decorated_teardown_fn + + +def qiita_test_checker(): + """Decorator that allows the execution of all methods in a test class only + and only if Qiita is set up to work in a test environment. + + Raises + ------ + RuntimeError + If Qiita is set up to work in a production environment + """ + def class_modifier(cls): + # First, we check that we are not in a production environment + conn_handler = SQLConnectionHandler() + # It is possible that we are connecting to a production database + test_db = conn_handler.execute_fetchone("SELECT test FROM settings")[0] + # Or the loaded configuration file belongs to a production environment + # or the test database is not qiita_test + if not qiita_config.test_environment or not test_db \ + or qiita_config.database != 'qiita_test': + raise RuntimeError("Working in a production environment. Not " + "executing the tests to keep the production " + "database safe.") + + # Now, we decorate the setup and teardown functions + class DecoratedClass(cls): + @build_test_database + def setUp(self): + super(DecoratedClass, self).setUp() + self.conn_handler = SQLConnectionHandler() + + @drop_test_database + def tearDown(self): + super(DecoratedClass, self).tearDown() + del self.conn_handler + + return DecoratedClass + return class_modifier diff --git a/qiita_db/__init__.py b/qiita_db/__init__.py new file mode 100644 index 000000000..a659159bc --- /dev/null +++ b/qiita_db/__init__.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +__version__ = "0.0.1-dev" diff --git a/qiita_db/analysis.py b/qiita_db/analysis.py new file mode 100644 index 000000000..310ec50bf --- /dev/null +++ b/qiita_db/analysis.py @@ -0,0 +1,381 @@ +""" +Objects for dealing with Qiita analyses + +This module provides the implementation of the Analysis class. + +Classes +------- +- `Analysis` -- A Qiita Analysis class +""" + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- +from __future__ import division +from collections import defaultdict + +from .sql_connection import SQLConnectionHandler +from .base import QiitaStatusObject +from .exceptions import QiitaDBNotImplementedError, QiitaDBStatusError +from .util import convert_to_id + + +class Analysis(QiitaStatusObject): + """ + Analysis object to access to the Qiita Analysis information + + Attributes + ---------- + owner + name + description + samples + biom_tables + shared_with + jobs + pmid + parent + children + + Methods + ------- + add_samples + remove_samples + add_biom_tables + remove_biom_tables + add_jobs + share + unshare + """ + + _table = "analysis" + + def _lock_check(self, conn_handler): + """Raises QiitaDBStatusError if analysis is public""" + if self.check_status({"public", "completed", "error"}): + raise QiitaDBStatusError("Analysis is locked!") + + def _status_setter_checks(self, conn_handler): + r"""Perform a check to make sure not setting status away from public + """ + self._lock_check(conn_handler) + + @classmethod + def create(cls, owner, name, description, parent=None): + """Creates a new analysis on the database + + Parameters + ---------- + owner : User object + The analysis' owner + name : str + Name of the analysis + description : str + Description of the analysis + parent : Analysis object, optional + The analysis this one was forked from + """ + conn_handler = SQLConnectionHandler() + # TODO after demo: if exists() + + # insert analysis information into table with "in construction" status + sql = ("INSERT INTO qiita.{0} (email, name, description, " + "analysis_status_id) VALUES (%s, %s, %s, 1) " + "RETURNING analysis_id".format(cls._table)) + a_id = conn_handler.execute_fetchone( + sql, (owner.id, name, description))[0] + + # add parent if necessary + if parent: + sql = ("INSERT INTO qiita.analysis_chain (parent_id, child_id) " + "VALUES (%s, %s)") + conn_handler.execute(sql, (parent.id, a_id)) + + return cls(a_id) + + # ---- Properties ---- + @property + def owner(self): + """The owner of the analysis + + Returns + ------- + str + Name of the Analysis + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT email FROM qiita.{0} WHERE " + "analysis_id = %s".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @property + def name(self): + """The name of the analysis + + Returns + ------- + str + Name of the Analysis + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT name FROM qiita.{0} WHERE " + "analysis_id = %s".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @property + def description(self): + """Returns the description of the analysis""" + conn_handler = SQLConnectionHandler() + sql = ("SELECT description FROM qiita.{0} WHERE " + "analysis_id = %s".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @description.setter + def description(self, description): + """Changes the description of the analysis + + Parameters + ---------- + description : str + New description for the analysis + + Raises + ------ + QiitaDBStatusError + Analysis is public + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + sql = ("UPDATE qiita.{0} SET description = %s WHERE " + "analysis_id = %s".format(self._table)) + conn_handler.execute(sql, (description, self._id)) + + @property + def samples(self): + """The processed data and samples attached to the analysis + + Returns + ------- + dict + Format is {processed_data_id: [sample_id, sample_id, ...]} + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT processed_data_id, sample_id FROM qiita.analysis_sample" + " WHERE analysis_id = %s ORDER BY processed_data_id") + ret_samples = defaultdict(list) + # turn into dict of samples keyed to processed_data_id + for pid, sample in conn_handler.execute_fetchall(sql, (self._id, )): + ret_samples[pid].append(sample) + return ret_samples + + @property + def shared_with(self): + """The user the analysis is shared with + + Returns + ------- + list of int + User ids analysis is shared with + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT email FROM qiita.analysis_users WHERE " + "analysis_id = %s") + return [u[0] for u in conn_handler.execute_fetchall(sql, (self._id, ))] + + @property + def biom_tables(self): + """The biom tables of the analysis + + Returns + ------- + list of int or None + ProcessedData ids of the biom tables or None if no tables generated + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT filepath_id FROM qiita.analysis_filepath WHERE " + "analysis_id = %s") + tables = conn_handler.execute_fetchall(sql, (self._id, )) + if tables == []: + return None + return [table[0] for table in tables] + + @property + def jobs(self): + """A list of jobs included in the analysis + + Returns + ------- + list of ints + Job ids for jobs in analysis + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT job_id FROM qiita.analysis_job WHERE " + "analysis_id = %s".format(self._table)) + job_ids = conn_handler.execute_fetchall(sql, (self._id, )) + if job_ids == []: + return None + return [job_id[0] for job_id in job_ids] + + @property + def pmid(self): + """Returns pmid attached to the analysis + + Returns + ------- + str or None + returns the PMID or None if none is attached + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT pmid FROM qiita.{0} WHERE " + "analysis_id = %s".format(self._table)) + pmid = conn_handler.execute_fetchone(sql, (self._id, ))[0] + return pmid + + @pmid.setter + def pmid(self, pmid): + """adds pmid to the analysis + + Parameters + ---------- + pmid: str + pmid to set for study + + Raises + ------ + QiitaDBStatusError + Analysis is public + + Notes + ----- + An analysis should only ever have one PMID attached to it. + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + sql = ("UPDATE qiita.{0} SET pmid = %s WHERE " + "analysis_id = %s".format(self._table)) + conn_handler.execute(sql, (pmid, self._id)) + + # @property + # def parent(self): + # """Returns the id of the parent analysis this was forked from""" + # return QiitaDBNotImplementedError() + + # @property + # def children(self): + # return QiitaDBNotImplementedError() + + # ---- Functions ---- + def share(self, user): + """Share the analysis with another user + + Parameters + ---------- + user: User object + The user to share the study with + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + + sql = ("INSERT INTO qiita.analysis_users (analysis_id, email) VALUES " + "(%s, %s)") + conn_handler.execute(sql, (self._id, user.id)) + + def unshare(self, user): + """Unshare the analysis with another user + + Parameters + ---------- + user: User object + The user to unshare the study with + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + + sql = ("DELETE FROM qiita.analysis_users WHERE analysis_id = %s AND " + "email = %s") + conn_handler.execute(sql, (self._id, user.id)) + + def add_samples(self, samples): + """Adds samples to the analysis + + Parameters + ---------- + samples : list of tuples + samples and the processed data id they come from in form + [(processed_data_id, sample_id), ...] + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + + sql = ("INSERT INTO qiita.analysis_sample (analysis_id, sample_id, " + "processed_data_id) VALUES (%s, %s, %s)") + conn_handler.executemany(sql, [(self._id, s[1], s[0]) + for s in samples]) + + def remove_samples(self, samples): + """Removes samples from the analysis + + Parameters + ---------- + samples : list of tuples + samples and the processed data id they come from in form + [(processed_data_id, sample_id), ...] + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + + sql = ("DELETE FROM qiita.analysis_sample WHERE analysis_id = %s AND " + "sample_id = %s AND processed_data_id = %s") + conn_handler.executemany(sql, [(self._id, s[1], s[0]) + for s in samples]) + + def add_biom_tables(self, tables): + """Adds biom tables to the analysis + + Parameters + ---------- + tables : list of ProcessedData objects + Biom tables to add + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + file_ids = [] + for table in tables: + file_ids.extend(table.get_filepath_ids()) + sql = ("INSERT INTO qiita.analysis_filepath (analysis_id, filepath_id)" + " VALUES (%s, %s)") + conn_handler.executemany(sql, [(self._id, f) for f in file_ids]) + + def remove_biom_tables(self, tables): + """Removes biom tables from the analysis + + Parameters + ---------- + tables : list of ProcessedData objects + Biom tables to remove + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + file_ids = [] + for table in tables: + file_ids.extend(table.get_filepath_ids()) + sql = ("DELETE FROM qiita.analysis_filepath WHERE analysis_id = %s " + "AND filepath_id = %s") + conn_handler.executemany(sql, [(self._id, f) for f in file_ids]) + + def add_jobs(self, jobs): + """Adds a list of jobs to the analysis + + Parameters + ---------- + jobs : list of Job objects + """ + conn_handler = SQLConnectionHandler() + self._lock_check(conn_handler) + sql = ("INSERT INTO qiita.analysis_job (analysis_id, job_id) " + "VALUES (%s, %s)") + conn_handler.executemany(sql, [(self._id, job.id) for job in jobs]) diff --git a/qiita_db/base.py b/qiita_db/base.py new file mode 100644 index 000000000..efcf4cefe --- /dev/null +++ b/qiita_db/base.py @@ -0,0 +1,280 @@ +r""" +Base objects (:mod: `qiita_db.base`) +==================================== + +..currentmodule:: qiita_db.base + +This module provides base objects for dealing with any qiita_db object that +needs to be stored on the database. + +Classes +------- + +..autosummary:: + :toctree: generated/ + + QiitaObject + QiitaStatusObject +""" + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from __future__ import division +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from .sql_connection import SQLConnectionHandler +from .exceptions import QiitaDBNotImplementedError, QiitaDBUnknownIDError + + +class QiitaObject(object): + r"""Base class for any qiita_db object + + Parameters + ---------- + id_: object + The object id on the storage system + + Attributes + ---------- + id + + Methods + ------- + create + delete + exists + _check_subclass + _check_id + __eq__ + __neq__ + + Raises + ------ + IncompetentQiitaDeveloperError + If trying to instantiate the base class directly + """ + + _table = None + + @classmethod + def create(cls): + r"""Creates a new object with a new id on the storage system + + Raises + ------ + QiitaDBNotImplementedError + If the method is not overwritten by a subclass + """ + raise QiitaDBNotImplementedError() + + def delete(id_): + r"""Deletes the object `id_` from the storage system + + Parameters + ---------- + id_ : object + The object identifier + + Raises + ------ + QiitaDBNotImplementedError + If the method is not overwritten by a subclass + """ + raise QiitaDBNotImplementedError() + + @classmethod + def exists(cls): + r"""Checks if a given object info is already present on the DB + + Raises + ------ + QiitaDBNotImplementedError + If the method is not overwritten by a subclass + """ + raise QiitaDBNotImplementedError() + + @classmethod + def _check_subclass(cls): + r"""Check that we are not calling a function that needs to access the + database from the base class + + Raises + ------ + IncompetentQiitaDeveloperError + If its called directly from a base class + """ + if cls._table is None: + raise IncompetentQiitaDeveloperError( + "Could not instantiate an object of the base class") + + def _check_id(self, id_, conn_handler=None): + r"""Check that the provided ID actually exists on the database + + Parameters + ---------- + id_ : object + The ID to test + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + + Notes + ----- + This function does not work for the User class. The problem is + that the User sql layout doesn't follow the same conventions done in + the other classes. However, still defining here as there is only one + subclass that doesn't follow this convention and it can override this. + """ + self._check_subclass() + + conn_handler = (conn_handler if conn_handler is not None + else SQLConnectionHandler()) + + return conn_handler.execute_fetchone( + "SELECT EXISTS(SELECT * FROM qiita.{0} WHERE " + "{0}_id=%s)".format(self._table), (id_, ))[0] + + def __init__(self, id_): + r"""Initializes the object + + Parameters + ---------- + id_: the object identifier + + Raises + ------ + QiitaDBUnknownIDError + If `id_` does not correspond to any object + """ + if not self._check_id(id_): + raise QiitaDBUnknownIDError(id_, self._table) + + self._id = id_ + + def __eq__(self, other): + r"""Self and other are equal based on type and database id""" + if type(self) != type(other): + return False + if other._id != self._id: + return False + return True + + def __ne__(self, other): + r"""Self and other are not equal based on type and database id""" + return not self.__eq__(other) + + @property + def id(self): + r"""The object id on the storage system""" + return self._id + + +class QiitaStatusObject(QiitaObject): + r"""Base class for any qiita_db object with a status property + + Attributes + ---------- + status + + Methods + ------- + check_status + _status_setter_checks + """ + + @property + def status(self): + r"""String with the current status of the analysis""" + # Check that self._table is actually defined + self._check_subclass() + + # Get the DB status of the object + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( + "SELECT status FROM qiita.{0}_status WHERE {0}_status_id = " + "(SELECT {0}_status_id FROM qiita.{0} WHERE " + "{0}_id = %s)".format(self._table), + (self._id, ))[0] + + def _status_setter_checks(self, conn_handler): + r"""Perform any extra checks that needed to be done before setting the + object status on the database. Should be overwritten by the subclasses + """ + raise QiitaDBNotImplementedError() + + @status.setter + def status(self, status): + r"""Change the status of the analysis + + Parameters + ---------- + status: str + The new object status + """ + # Check that self._table is actually defined + self._check_subclass() + + # Perform any extra checks needed before we update the status in the DB + conn_handler = SQLConnectionHandler() + self._status_setter_checks(conn_handler) + + # Update the status of the object + conn_handler.execute( + "UPDATE qiita.{0} SET {0}_status_id = " + "(SELECT {0}_status_id FROM qiita.{0}_status WHERE status = %s) " + "WHERE {0}_id = %s".format(self._table), (status, self._id)) + + def check_status(self, status, exclude=False, conn_handler=None): + r"""Checks status of object. + + Parameters + ---------- + status: iterable + Iterable of statuses to check against. + exclude: bool, optional + If True, will check that database status is NOT one of the statuses + passed. Default False. + conn_handler: SQLConnectionHandler, optional + The connection handler object connected to the DB + + Returns + ------- + bool + True if the object status is in the desired set of statuses. False + otherwise. + + Notes + ----- + This assumes the following database setup is in place: For a given + cls._table setting, such as "table", there is a corresponding table + with the name "table_status" holding the status entries allowed. This + table has a column called "status" that holds the values corresponding + to what is passed as status in this function and a column + "table_status_id" corresponding to the column of the same name in + "table". + + Table setup: + foo: foo_status_id ----> foo_status: foo_status_id, status + """ + # Check that self._table is actually defined + self._check_subclass() + + # Get all available statuses + conn_handler = (conn_handler if conn_handler is not None + else SQLConnectionHandler()) + statuses = [x[0] for x in conn_handler.execute_fetchall( + "SELECT DISTINCT status FROM qiita.{0}_status".format(self._table), + (self._id, ))] + + # Check that all the provided statuses are valid statuses + if set(status).difference(statuses): + raise ValueError("%s are not valid status values" + % set(status).difference(statuses)) + + # Get the DB status of the object + dbstatus = self.status + return dbstatus not in status if exclude else dbstatus in status diff --git a/qiita_db/commands.py b/qiita_db/commands.py new file mode 100644 index 000000000..d21793c1a --- /dev/null +++ b/qiita_db/commands.py @@ -0,0 +1,230 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from dateutil.parser import parse +from os import listdir +from os.path import join +from functools import partial +from future import standard_library +with standard_library.hooks(): + from configparser import ConfigParser + +import pandas as pd + +from .study import Study, StudyPerson +from .user import User +from .util import get_filetypes, get_filepath_types +from .data import RawData, PreprocessedData, ProcessedData +from .metadata_template import SampleTemplate, PrepTemplate + + +def load_study_from_cmd(owner, title, info): + r"""Adds a study to the database + + Parameters + ---------- + owner : str + The email address of the owner of the study_abstract + title : str + The title of the study_abstract + info : file-like object + File-like object containing study information + + """ + # Parse the configuration file + config = ConfigParser() + config.readfp(info) + + optional = dict(config.items('optional')) + get_optional = lambda name: optional.get(name, None) + get_required = partial(config.get, 'required') + required_fields = ['timeseries_type_id', 'mixs_compliant', + 'number_samples_collected', 'number_samples_promised', + 'portal_type_id', 'reprocess', 'study_alias', + 'study_description', 'study_abstract', + 'metadata_complete'] + optional_fields = ['funding', 'most_recent_contact', 'spatial_series', + 'vamps_id'] + infodict = {} + for value in required_fields: + infodict[value] = get_required(value) + + for value in optional_fields: + optvalue = get_optional(value) + if optvalue is not None: + infodict[value] = optvalue + + emp_person_name_email = get_optional('emp_person_name') + if emp_person_name_email is not None: + emp_name, emp_email = emp_person_name_email.split(',') + infodict['emp_person_id'] = StudyPerson.create(emp_name.strip(), + emp_email.strip()) + lab_name_email = get_optional('lab_person') + if lab_name_email is not None: + lab_name, lab_email = lab_name_email.split(',') + infodict['lab_person_id'] = StudyPerson.create(lab_name.strip(), + lab_email.strip()) + pi_name_email = get_required('principal_investigator') + pi_name, pi_email = pi_name_email.split(',') + infodict['principal_investigator_id'] = StudyPerson.create( + pi_name.strip(), pi_email.strip()) + # this will eventually change to using the Experimental Factory Ontolgoy + # names + efo_ids = get_required('efo_ids') + efo_ids = [x.strip() for x in efo_ids.split(',')] + + return Study.create(User(owner), title, efo_ids, infodict) + + +def load_preprocessed_data_from_cmd(study_id, params_table, filedir, + filepathtype, params_id, + submitted_to_insdc, raw_data_id): + r"""Adds preprocessed data to the database + + Parameters + ---------- + study_id : int + The study id to which the preprocessed data belongs + filedir : str + Directory path of the preprocessed data + filepathtype: str + The filepath_type of the preprecessed data + params_table_name : str + The name of the table which contains the parameters of the + preprocessing + params_id : int + The id of parameters int the params_table + submitted_to_insdc : bool + Has the data been submitted to insdc + raw_data_id : int + Raw data id associated with data + """ + fp_types_dict = get_filepath_types() + fp_type = fp_types_dict[filepathtype] + filepaths = [(join(filedir, fp), fp_type) for fp in listdir(filedir)] + raw_data = None if raw_data_id is None else RawData(raw_data_id) + return PreprocessedData.create(Study(study_id), params_table, params_id, + filepaths, raw_data=raw_data, + submitted_to_insdc=submitted_to_insdc) + + +def load_sample_template_from_cmd(sample_temp_path, study_id): + r"""Adds a sample template to the database + + Parameters + ---------- + sample_temp_path : str + Path to the sample template file + study_id : int + The study id to which the sample template belongs + """ + sample_temp = pd.DataFrame.from_csv(sample_temp_path, sep='\t', + infer_datetime_format=True) + return SampleTemplate.create(sample_temp, Study(study_id)) + + +def load_prep_template_from_cmd(sample_temp_path, study_id): + r"""Adds a prep template to the database + + Parameters + ---------- + prep_temp_path : str + Path to the sample template file + study_id : int + The study id to which the sample template belongs + """ + prep_temp = pd.DataFrame.from_csv(sample_temp_path, sep='\t', + infer_datetime_format=True) + return PrepTemplate.create(prep_temp, RawData(study_id)) + + +def load_raw_data_cmd(filepaths, filepath_types, filetype, study_ids): + """Add new raw data by populating the relevant tables + + Parameters + ---------- + filepaths : iterable of str + Paths to the raw data files + filepath_types : iterable of str + Describes the contents of the files. + filetype : str + The type of file being loaded + study_ids : iterable of int + The IDs of the studies with which to associate this raw data + + Returns + ------- + qiita_db.RawData + The newly created `qiita_db.RawData` object + """ + if len(filepaths) != len(filepath_types): + raise ValueError("Please pass exactly one filepath_type for each " + "and every filepath") + + filetypes_dict = get_filetypes() + filetype_id = filetypes_dict[filetype] + + filepath_types_dict = get_filepath_types() + filepath_types = [filepath_types_dict[x] for x in filepath_types] + + studies = [Study(x) for x in study_ids] + + return RawData.create(filetype_id, list(zip(filepaths, filepath_types)), + studies) + + +def load_processed_data_cmd(fps, fp_types, processed_params_table_name, + processed_params_id, preprocessed_data_id=None, + study_id=None, processed_date=None): + """Add a new processed data entry + + Parameters + ---------- + fps : list of str + Paths to the processed data files to associate with the ProcessedData + object + fp_types: list of str + The types of files, one per fp + processed_params_table_name : str + The name of the processed_params_ table to use + processed_params_id : int + The ID of the row in the processed_params_ table + preprocessed_data_id : int, optional + Defaults to ``None``. The ID of the row in the preprocessed_data table. + processed_date : str, optional + Defaults to ``None``. The date and time to use as the processing date. + Must be interpretable as a datetime object + + Returns + ------- + qiita_db.ProcessedData + The newly created `qiita_db.ProcessedData` object + """ + if len(fps) != len(fp_types): + raise ValueError("Please pass exactly one fp_type for each " + "and every fp") + + fp_types_dict = get_filepath_types() + fp_types = [fp_types_dict[x] for x in fp_types] + + if preprocessed_data_id is not None: + preprocessed_data = PreprocessedData(preprocessed_data_id) + else: + preprocessed_data = None + + if study_id is not None: + study = Study(study_id) + else: + study = None + + if processed_date is not None: + processed_date = parse(processed_date) + + return ProcessedData.create(processed_params_table_name, + processed_params_id, list(zip(fps, fp_types)), + preprocessed_data, study, processed_date) diff --git a/qiita_db/data.py b/qiita_db/data.py new file mode 100644 index 000000000..c9566b9be --- /dev/null +++ b/qiita_db/data.py @@ -0,0 +1,505 @@ +r""" +Data objects (:mod: `qiita_db.data`) +==================================== + +..currentmodule:: qiita_db.data + +This module provides functionality for inserting, querying and deleting +data stored in the database. There are three data classes available: `RawData`, +`PreprocessedData` and `ProcessedData`. + +Classes +------- + +..autosummary:: + :toctree: generated/ + + BaseData + RawData + PreprocessedData + ProcessedData + +Examples +-------- +Assume we have a raw data instance composed by two fastq files (the sequence +file 'seqs.fastq' and the barcodes file 'barcodes.fastq') that belongs to +study 1. + +Inserting the raw data into the database: + +>>> from qiita_db.data import RawData +>>> from qiita_db.study import Study +>>> study = Study(1) # doctest: +SKIP +>>> filepaths = [('seqs.fastq', 1), ('barcodes.fastq', 2)] +>>> rd = RawData.create(2, filepaths, study) # doctest: +SKIP +>>> print rd.id # doctest: +SKIP +2 + +Retrieve the filepaths associated with the raw data + +>>> rd.get_filepaths() # doctest: +SKIP +[('seqs.fastq', 1), ('barcodes.fastq', 2)] + +Assume we have preprocessed the previous raw data files using the parameters +under the first row in the 'preprocessed_sequence_illumina_params', and we +obtained to files: a fasta file 'seqs.fna' and a qual file 'seqs.qual'. + +Inserting the preprocessed data into the database + +>>> from qiita_db.data import PreprocessedData +>>> filepaths = [('seqs.fna', 4), ('seqs.qual', 5)] +>>> ppd = PreprocessedData.create(rd, "preprocessed_sequence_illumina_params", +... 1, filepaths) # doctest: +SKIP +>>> print ppd.id # doctest: +SKIP +2 + +Assume we have processed the previous preprocessed data on June 2nd 2014 at 5pm +using uclust and the first set of parameters, and we obtained a BIOM table. + +Inserting the processed data into the database: + +>>> from qiita_db.data import ProcessedData +>>> from datetime import datetime +>>> filepaths = [('foo/table.biom', 6)] +>>> date = datetime(2014, 6, 2, 5, 0, 0) +>>> pd = ProcessedData(ppd, "processed_params_uclust", 1, +... filepaths, date) # doctest: +SKIP +>>> print pd.id # doctest: +SKIP +2 +""" + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from __future__ import division +from datetime import datetime +from os.path import join +from functools import partial + + +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from .base import QiitaObject +from .sql_connection import SQLConnectionHandler +from .util import (exists_dynamic_table, get_db_files_base_dir, + insert_filepaths) + + +class BaseData(QiitaObject): + r"""Base class for the raw, preprocessed and processed data objects. + + Methods + ------- + get_filepaths + + See Also + -------- + RawData + PreprocessedData + PreprocessedData + """ + _filepath_table = "filepath" + + # These variables should be defined in the subclasses. They are useful in + # order to avoid code replication and be able to generalize the functions + # included in this BaseClass + _data_filepath_table = None + _data_filepath_column = None + + def _link_data_filepaths(self, fp_ids, conn_handler): + r"""Links the data `data_id` with its filepaths `fp_ids` in the DB + connected with `conn_handler` + + Parameters + ---------- + fp_ids : list of ints + The filepaths ids to connect the data + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + + Raises + ------ + IncompetentQiitaDeveloperError + If called directly from the BaseClass or one of the subclasses does + not define the class attributes _data_filepath_table and + _data_filepath_column + """ + # Create the list of SQL values to add + values = [(self.id, fp_id) for fp_id in fp_ids] + # Add all rows at once + conn_handler.executemany( + "INSERT INTO qiita.{0} ({1}, filepath_id) " + "VALUES (%s, %s)".format(self._data_filepath_table, + self._data_filepath_column), values) + + def _add_filepaths(self, filepaths, conn_handler): + r"""Populates the DB tables for storing the filepaths and connects the + `self` objects with these filepaths""" + self._check_subclass() + # Add the filepaths to the database + fp_ids = insert_filepaths(filepaths, self._id, self._table, + self._filepath_table, conn_handler) + # Connect the raw data with its filepaths + self._link_data_filepaths(fp_ids, conn_handler) + + def get_filepaths(self): + r"""Returns the filepath associated with the data object + + Returns + ------- + list of tuples + A list of (path, filetype id) with all the paths associated with + the current data + """ + self._check_subclass() + # We need a connection handler to the database + conn_handler = SQLConnectionHandler() + # Retrieve all the (path, id) tuples related with the current data + # object. We need to first check the _data_filepath_table to get the + # filepath ids of the filepath associated with the current data object. + # We then can query the filepath table to get those paths/ + db_paths = conn_handler.execute_fetchall( + "SELECT filepath, filepath_type_id FROM qiita.{0} WHERE " + "filepath_id IN (SELECT filepath_id FROM qiita.{1} WHERE " + "{2}=%(id)s)".format(self._filepath_table, + self._data_filepath_table, + self._data_filepath_column), {'id': self.id}) + base_fp = partial(join, join(get_db_files_base_dir(), self._table)) + return [(base_fp(fp), id) for fp, id in db_paths] + + def get_filepath_ids(self): + conn_handler = SQLConnectionHandler() + db_ids = conn_handler.execute_fetchall( + "SELECT filepath_id FROM qiita.{0} WHERE " + "{1}=%(id)s".format(self._data_filepath_table, + self._data_filepath_column), {'id': self.id}) + return [fp_id[0] for fp_id in db_ids] + + +class RawData(BaseData): + r"""Object for dealing with raw data + + Attributes + ---------- + studies + + Methods + ------- + create + + See Also + -------- + BaseData + """ + # Override the class variables defined in the base classes + _table = "raw_data" + _data_filepath_table = "raw_filepath" + _data_filepath_column = "raw_data_id" + # Define here the class name, so in case it changes in the database we + # only need to change it here + _study_raw_table = "study_raw_data" + + @classmethod + def create(cls, filetype, filepaths, studies): + r"""Creates a new object with a new id on the storage system + + Parameters + ---------- + filetype : int + The filetype identifier + filepaths : iterable of tuples (str, int) + The list of paths to the raw files and its filepath type identifier + studies : list of Study + The list of Study objects to which the raw data belongs to + + Returns + ------- + A new instance of `cls` to access to the RawData stored in the DB + """ + # Add the raw data to the database, and get the raw data id back + conn_handler = SQLConnectionHandler() + rd_id = conn_handler.execute_fetchone( + "INSERT INTO qiita.{0} (filetype_id) VALUES (%s) RETURNING " + "raw_data_id".format(cls._table), + (filetype, ))[0] + rd = cls(rd_id) + + # Connect the raw data with its studies + values = [(study.id, rd_id) for study in studies] + conn_handler.executemany( + "INSERT INTO qiita.{0} (study_id, raw_data_id) VALUES " + "(%s, %s)".format(rd._study_raw_table), values) + + rd._add_filepaths(filepaths, conn_handler) + + return rd + + @property + def studies(self): + r"""The list of study ids to which the raw data belongs to + + Returns + ------- + list of int + The list of study ids to which the raw data belongs to""" + conn_handler = SQLConnectionHandler() + ids = conn_handler.execute_fetchall( + "SELECT study_id FROM qiita.{0} WHERE " + "raw_data_id=%s".format(self._study_raw_table), + [self._id]) + return [id[0] for id in ids] + + +class PreprocessedData(BaseData): + r"""Object for dealing with preprocessed data + + Attributes + ---------- + raw_data + study + + Methods + ------- + create + is_submitted_to_insdc + + See Also + -------- + BaseData + """ + # Override the class variables defined in the base classes + _table = "preprocessed_data" + _data_filepath_table = "preprocessed_filepath" + _data_filepath_column = "preprocessed_data_id" + _study_preprocessed_table = "study_preprocessed_data" + _raw_preprocessed_table = "raw_preprocessed_data" + + @classmethod + def create(cls, study, preprocessed_params_table, preprocessed_params_id, + filepaths, raw_data=None, submitted_to_insdc=False): + r"""Creates a new object with a new id on the storage system + + Parameters + ---------- + study : Study + The study to which this preprocessed data belongs to + preprocessed_params_table : str + Name of the table that holds the preprocessing parameters used + preprocessed_params_id : int + Identifier of the parameters from the `preprocessed_params_table` + table used + filepaths : iterable of tuples (str, int) + The list of paths to the preprocessed files and its filepath type + identifier + submitted_to_insdc : bool, optional + If true, the raw data files have been submitted to insdc + raw_data : RawData, optional + The RawData object used as base to this preprocessed data + + Raises + ------ + IncompetentQiitaDeveloperError + If the table `preprocessed_params_table` does not exists + """ + conn_handler = SQLConnectionHandler() + # We first check that the preprocessed_params_table exists + if not exists_dynamic_table(preprocessed_params_table, "preprocessed_", + "_params", conn_handler): + raise IncompetentQiitaDeveloperError( + "Preprocessed params table '%s' does not exists!" + % preprocessed_params_table) + # Add the preprocessed data to the database, + # and get the preprocessed data id back + ppd_id = conn_handler.execute_fetchone( + "INSERT INTO qiita.{0} (preprocessed_params_table, " + "preprocessed_params_id, submitted_to_insdc) VALUES " + "(%(param_table)s, %(param_id)s, %(insdc)s) " + "RETURNING preprocessed_data_id".format(cls._table), + {'param_table': preprocessed_params_table, + 'param_id': preprocessed_params_id, + 'insdc': submitted_to_insdc})[0] + ppd = cls(ppd_id) + + # Connect the preprocessed data with its study + conn_handler.execute( + "INSERT INTO qiita.{0} (study_id, preprocessed_data_id) " + "VALUES (%s, %s)".format(ppd._study_preprocessed_table), + (study.id, ppd.id)) + + if raw_data is not None: + # Connect the preprocessed data with the raw data + conn_handler.execute( + "INSERT INTO qiita.{0} (raw_data_id, preprocessed_data_id) " + "VALUES (%s, %s)".format(cls._raw_preprocessed_table), + (raw_data.id, ppd_id)) + + ppd._add_filepaths(filepaths, conn_handler) + return ppd + + @property + def raw_data(self): + r"""The raw data id used to generate the preprocessed data""" + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( + "SELECT raw_data_id FROM qiita.{0} WHERE " + "preprocessed_data_id=%s".format(self._raw_preprocessed_table), + [self._id])[0] + + @property + def study(self): + r"""The study id to which this preprocessed data belongs to + + Returns + ------- + int + The study id to which this preprocessed data belongs to""" + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( + "SELECT study_id FROM qiita.{0} WHERE " + "preprocessed_data_id=%s".format(self._study_preprocessed_table), + [self._id])[0] + + def is_submitted_to_insdc(self): + r"""Tells if the raw data has been submitted to insdc + + Returns + ------- + bool + True if the raw data have been submitted to insdc. False otherwise + """ + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( + "SELECT submitted_to_insdc FROM qiita.{0} " + "WHERE preprocessed_data_id=%s".format(self._table), (self.id,))[0] + + +class ProcessedData(BaseData): + r"""Object for dealing with processed data + + Attributes + ---------- + preprocessed_data + + Methods + ------- + create + + See Also + -------- + BaseData + """ + # Override the class variables defined in the base classes + _table = "processed_data" + _data_filepath_table = "processed_filepath" + _data_filepath_column = "processed_data_id" + _study_processed_table = "study_processed_data" + _preprocessed_processed_table = "preprocessed_processed_data" + + @classmethod + def create(cls, processed_params_table, processed_params_id, filepaths, + preprocessed_data=None, study=None, processed_date=None): + r""" + Parameters + ---------- + processed_params_table : str + Name of the table that holds the preprocessing parameters used + processed_params_id : int + Identifier of the parameters from the `processed_params_table` + table used + filepaths : iterable of tuples (str, int) + The list of paths to the processed files and its filepath type + identifier + preprocessed_data : PreprocessedData, optional + The PreprocessedData object used as base to this processed data + study : Study, optional + If preprocessed_data is not provided, the study the processed data + belongs to + processed_date : datetime, optional + Date in which the data have been processed. Default: now + + Raises + ------ + IncompetentQiitaDeveloperError + If the table `processed_params_table` does not exists + If `preprocessed_data` and `study` are provided at the same time + If `preprocessed_data` and `study` are not provided + """ + if preprocessed_data is not None: + if study is not None: + raise IncompetentQiitaDeveloperError( + "You should provide either preprocessed_data or study, " + "but not both") + else: + if study is None: + raise IncompetentQiitaDeveloperError( + "You should provide either a preprocessed_data or a study") + + conn_handler = SQLConnectionHandler() + # We first check that the processed_params_table exists + if not exists_dynamic_table(processed_params_table, + "processed_params_", "", conn_handler): + raise IncompetentQiitaDeveloperError( + "Processed params table %s does not exists!" + % processed_params_table) + + # Check if we have received a date: + if processed_date is None: + processed_date = datetime.now() + + # Add the processed data to the database, + # and get the processed data id back + pd_id = conn_handler.execute_fetchone( + "INSERT INTO qiita.{0} (processed_params_table, " + "processed_params_id, processed_date) VALUES (%(param_table)s, " + "%(param_id)s, %(date)s) RETURNING " + "processed_data_id".format(cls._table), + {'param_table': processed_params_table, + 'param_id': processed_params_id, + 'date': processed_date})[0] + + pd = cls(pd_id) + + if preprocessed_data is not None: + conn_handler.execute( + "INSERT INTO qiita.{0} (preprocessed_data_id, " + "processed_data_id) VALUES " + "(%s, %s)".format(cls._preprocessed_processed_table), + (preprocessed_data.id, pd_id)) + study_id = preprocessed_data.study + else: + study_id = study.id + + # Connect the processed data with the study + conn_handler.execute( + "INSERT INTO qiita.{0} (study_id, processed_data_id) VALUES " + "(%s, %s)".format(cls._study_processed_table), + (study_id, pd_id)) + + pd._add_filepaths(filepaths, conn_handler) + return cls(pd_id) + + @property + def preprocessed_data(self): + r"""The preprocessed data id used to generate the processed data""" + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( + "SELECT preprocessed_data_id FROM qiita.{0} WHERE " + "processed_data_id=%s".format(self._preprocessed_processed_table), + [self._id])[0] + + @property + def data_type(self): + r"""The data_type of the data used""" + conn_handler = SQLConnectionHandler() + sql = ("SELECT DISTINCT DT.data_type FROM " + "qiita.preprocessed_processed_data PPD JOIN " + "qiita.raw_preprocessed_data RPD on PPD.preprocessed_data_id = " + "RPD.preprocessed_data_id JOIN qiita.common_prep_info CPI ON " + "RPD.raw_data_id = CPI.raw_data_id JOIN qiita.data_type DT ON " + "CPI.data_type_id = DT.data_type_id WHERE " + "PPD.processed_data_id = %s") + return conn_handler.execute_fetchone(sql, [self._id])[0] diff --git a/qiita_db/environment_manager.py b/qiita_db/environment_manager.py new file mode 100644 index 000000000..1fadc5ccf --- /dev/null +++ b/qiita_db/environment_manager.py @@ -0,0 +1,255 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- +from tempfile import mkdtemp +from tarfile import open as taropen +from gzip import open as gzopen +from os import remove +from os.path import abspath, dirname, join +from shutil import rmtree, move +from functools import partial +from future import standard_library +with standard_library.hooks(): + from urllib.request import urlretrieve + +from psycopg2 import connect +from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT + +from qiita_core.exceptions import QiitaEnvironmentError +from qiita_db.util import get_db_files_base_dir + +get_support_file = partial(join, join(dirname(abspath(__file__)), + 'support_files')) + +DFLT_BASE_DATA_FOLDER = get_support_file('test_data') +DFLT_BASE_WORK_FOLDER = get_support_file('work_data') +SETTINGS_FP = get_support_file('qiita-db-settings.sql') +LAYOUT_FP = get_support_file('qiita-db.sql') +INITIALIZE_FP = get_support_file('initialize.sql') +POPULATE_FP = get_support_file('populate_test_db.sql') +ENVIRONMENTS = {'demo': 'qiita_demo', 'test': 'qiita_test'} + + +def _check_db_exists(db, cursor): + r"""Checks if the database db exists on the postgres server + + Parameters + ---------- + db : str + The database + cursor : psycopg2.cursor + The cursor connected to the database + """ + cursor.execute('SELECT datname FROM pg_database') + # It's a list of tuples, so just create the tuple to check if exists + return (db,) in cursor.fetchall() + + +def make_environment(env, base_data_dir, base_work_dir, user, password, host): + r"""Creates the new environment `env` + + Parameters + ---------- + env : {demo, test} + The environment to create + + Raises + ------ + ValueError + If `env` not recognized + """ + if env not in ENVIRONMENTS: + raise ValueError("Environment %s not recognized. Available " + "environments are %s" % (env, ENVIRONMENTS.keys())) + # Connect to the postgres server + conn = connect(user=user, host=host, password=password) + # Set the isolation level to AUTOCOMMIT so we can execute a create database + # sql quary + conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) + # Get the cursor + cur = conn.cursor() + # Check that it does not already exists + if _check_db_exists(ENVIRONMENTS[env], cur): + print("Environment {0} already present on the system. You can drop " + "it by running `qiita_env drop_env --env {0}".format(env)) + else: + # Create the database + print('Creating database') + cur.execute('CREATE DATABASE %s' % ENVIRONMENTS[env]) + cur.close() + conn.close() + + # Connect to the postgres server, but this time to the just created db + conn = connect(user=user, host=host, password=password, + database=ENVIRONMENTS[env]) + cur = conn.cursor() + + print('Inserting database metadata') + # Build the SQL layout into the database + with open(SETTINGS_FP, 'U') as f: + cur.execute(f.read()) + + # Insert the settings values to the database + cur.execute("INSERT INTO settings (test, base_data_dir, base_work_dir)" + " VALUES (TRUE, '%s', '%s')" + % (base_data_dir, base_work_dir)) + + if env == 'demo': + # Create the schema + print('Building SQL layout') + with open(LAYOUT_FP, 'U') as f: + cur.execute(f.read()) + + print('Initializing database') + # Initialize the database + with open(INITIALIZE_FP, 'U') as f: + cur.execute(f.read()) + + # Commenting out right now - probably will ad later + # print('Populating database with demo data (1/2)') + # # Populate the database + # with open(POPULATE_FP, 'U') as f: + # cur.execute(f.read()) + + # Commit all the changes and close the connections + print('Populating database with demo data') + cur.execute( + "INSERT INTO qiita.qiita_user (email, user_level_id, password," + " name, affiliation, address, phone) VALUES " + "('demo@microbio.me', 4, " + "'$2a$12$gnUi8Qg.0tvW243v889BhOBhWLIHyIJjjgaG6dxuRJkUM8nXG9Efe" + "', 'Demo', 'Qitta Dev', '1345 Colorado Avenue', " + "'303-492-1984');") + + conn.commit() + cur.close() + conn.close() + + print('Downloading test files') + # Download tree file + url = ("https://raw.githubusercontent.com/biocore/Evident/master" + "/data/gg_97_otus_4feb2011.tre") + try: + urlretrieve(url, join(base_data_dir, "reference", + "gg_97_otus_4feb2011.tre")) + except: + raise IOError("Error: DOWNLOAD FAILED") + # # download files from thebeast + # url = ("ftp://thebeast.colorado.edu/pub/QIIME_DB_Public_Studies/" + # "study_1001_split_library_seqs_and_mapping.tgz") + # outdir = mkdtemp() + # basedir = join(outdir, + # "study_1001_split_library_seqs_and_mapping/") + # try: + # urlretrieve(url, join(outdir, "study_1001.tar.gz")) + # except: + # raise IOError("Error: DOWNLOAD FAILED") + # rmtree(outdir) + + # print('Extracting files') + # # untar the files + # with taropen(join(outdir, "study_1001.tar.gz")) as tar: + # tar.extractall(outdir) + # # un-gzip sequence file + # with gzopen(join(basedir, + # "study_1001_split_library_seqs.fna.gz")) as gz: + # with open(join(basedir, "seqs.fna"), 'w') as fout: + # fout.write(str(gz.read())) + + # print('Populating database with demo data') + # # copy the preprocessed and procesed data to the study + # remove(join(base_data_dir, + # "processed_data/" + # "study_1001_closed_reference_otu_table.biom")) + # remove(join(base_data_dir, "preprocessed_data/seqs.fna")) + # move(join(basedir, "study_1001_closed_reference_otu_table.biom"), + # join(base_data_dir, "processed_data")) + # move(join(basedir, "seqs.fna"), join(base_data_dir, + # "preprocessed_data")) + + # # clean up after ourselves + # rmtree(outdir) + print('Demo environment successfully created') + else: + # Commit all the changes and close the connections + conn.commit() + cur.close() + conn.close() + + +def drop_environment(env, user, password, host): + r"""Drops the `env` environment. + + Parameters + ---------- + env : {demo, test} + The environment to create + user : str + The postgres user to connect to the server + password : str + The password of the user + host : str + The host where the postgres server is running + """ + # Connect to the postgres server + conn = connect(user=user, host=host, password=password) + # Set the isolation level to AUTOCOMMIT so we can execute a + # drop database sql query + conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) + # Drop the database + cur = conn.cursor() + + if not _check_db_exists(ENVIRONMENTS[env], cur): + raise QiitaEnvironmentError( + "Test environment not present on the system. You can create it " + "by running 'qiita_env make_test_env'") + + if env == 'demo': + # wipe the overwriiten test files so empty as on repo + base = get_db_files_base_dir() + with open(join(base, "reference", + "gg_97_otus_4feb2011.tre"), 'w') as f: + f.write('\n') + # with open(join(base, "preprocessed_data/seqs.fna"), 'w') as fout: + # fout.write("\n") + # with open(join(base, "processed_data/study_1001_closed_reference" + # "_otu_table.biom"), 'w') as fout: + # fout.write("\n") + + cur.execute('DROP DATABASE %s' % ENVIRONMENTS[env]) + # Close cursor and connection + cur.close() + conn.close() + + +def clean_test_environment(user, password, host): + r"""Cleans the test database environment. + + In case that the test database is dirty (i.e. the 'qiita' schema is + present), this cleans it up by dropping the 'qiita' schema. + + Parameters + ---------- + user : str + The postgres user to connect to the server + password : str + The password of the user + host : str + The host where the postgres server is running + """ + # Connect to the postgres server + conn = connect(user=user, host=host, password=password, + database='qiita_test') + # Get the cursor + cur = conn.cursor() + # Drop the qiita schema + cur.execute("DROP SCHEMA qiita CASCADE") + # Commit the changes + conn.commit() + # Close cursor and connections + cur.close() + conn.close() diff --git a/qiita_db/exceptions.py b/qiita_db/exceptions.py new file mode 100644 index 000000000..015f287b0 --- /dev/null +++ b/qiita_db/exceptions.py @@ -0,0 +1,64 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from __future__ import division +from qiita_core.exceptions import QiitaError + + +class QiitaDBError(QiitaError): + """Base class for all qiita_db exceptions""" + pass + + +class QiitaDBNotImplementedError(QiitaDBError): + """""" + pass + + +class QiitaDBExecutionError(QiitaDBError): + """Exception for error when executing SQL queries""" + pass + + +class QiitaDBConnectionError(QiitaDBError): + """Exception for error when connecting to the db""" + pass + + +class QiitaDBColumnError(QiitaDBError): + """Exception when missing table information or excess information passed""" + pass + + +class QiitaDBDuplicateError(QiitaDBError): + """Exception when duplicating something in the database""" + def __init__(self, obj_name, attributes): + super(QiitaDBDuplicateError, self).__init__() + self.args = ("The '%s' object with attributes (%s) already exists." + % (obj_name, attributes),) + + +class QiitaDBStatusError(QiitaDBError): + """Exception when editing is done with an unallowed status""" + pass + + +class QiitaDBUnknownIDError(QiitaDBError): + """Exception for error when an object does not exists in the DB""" + def __init__(self, missing_id, table): + super(QiitaDBUnknownIDError, self).__init__() + self.args = ("The object with ID '%s' does not exists in table '%s'" + % (missing_id, table),) + + +class QiitaDBDuplicateHeaderError(QiitaDBError): + """Exception for error when a MetadataTemplate has duplicate columns""" + def __init__(self): + super(QiitaDBDuplicateHeaderError, self).__init__() + self.args = ("Duplicate headers found in MetadataTemplate. Note " + "that the headers are not case-sensitive",) diff --git a/qiita_db/investigation.py b/qiita_db/investigation.py new file mode 100644 index 000000000..76686f096 --- /dev/null +++ b/qiita_db/investigation.py @@ -0,0 +1,79 @@ +from __future__ import division + +""" +Objects for dealing with Qiita studies + +This module provides the implementation of the Investigation class. + + +Classes +------- +- `Investigation` -- A Qiita investigation class +""" +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from .base import QiitaStatusObject + +REQUIRED_KEYS = {"name", "description", "contact_person"} + + +class Investigation(QiitaStatusObject): + """ + Study object to access to the Qiita Study information + + Attributes + ---------- + name: str + name of the investigation + description: str + description of what the investigation is investigating + contact_person: StudyPerson object + studies: list of Study Objects + all studies that are part of the investigation + + Methods + ------- + add_study + Adds a study to the investigation + """ + _table = "investigation" + + @classmethod + def create(cls, owner, info, investigation=None): + """Creates a new investigation on the database""" + raise NotImplementedError() + + @classmethod + def delete(cls, id_): + """Deletes an investigation on the database""" + raise NotImplementedError() + + @property + def name(self): + raise NotImplementedError() + + @name.setter + def name(self, value): + raise NotImplementedError() + + @property + def description(self): + raise NotImplementedError() + + @description.setter + def description(self, value): + raise NotImplementedError() + + @property + def contact_person(self): + raise NotImplementedError() + + @contact_person.setter + def contact_person(self, value): + raise NotImplementedError() diff --git a/qiita_db/job.py b/qiita_db/job.py new file mode 100644 index 000000000..56b24e9fc --- /dev/null +++ b/qiita_db/job.py @@ -0,0 +1,363 @@ +""" +Objects for dealing with Qiita jobs + +This module provides the implementation of the Job class. + +Classes +------- +- `Job` -- A Qiita Job class +""" +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- +from __future__ import division +from json import dumps, loads +from os.path import join +from time import strftime +from datetime import date +from functools import partial + +from qiita_core.exceptions import IncompetentQiitaDeveloperError + +from .base import QiitaStatusObject +from .util import insert_filepaths, convert_to_id, get_db_files_base_dir +from .sql_connection import SQLConnectionHandler +from .logger import LogEntry +from .exceptions import QiitaDBStatusError + + +class Job(QiitaStatusObject): + """ + Job object to access to the Qiita Job information + + Attributes + ---------- + datatype + command + options + results + error + + Methods + ------- + set_error + add_results + """ + _table = "job" + + def _lock_job(self, conn_handler): + """Raises QiitaDBStatusError if study is public""" + if self.check_status(("completed", "error")): + raise QiitaDBStatusError("Can't change status of finished job!") + + def _status_setter_checks(self, conn_handler): + r"""Perform a check to make sure not setting status away from completed + or errored + """ + self._lock_job(conn_handler) + + @staticmethod + def get_commands(): + """returns commands available with the options as well + + Returns + ------- + list of command objects + """ + return Command.create_list() + + @classmethod + def exists(cls, datatype, command, options): + """Checks if the given job already exists + + Parameters + ---------- + datatype : str + Datatype the job is operating on + command : str + The name of the command run on the data + options : dict + Options for the command in the format {option: value} + + Returns + ------- + bool + Whether the job exists or not + """ + conn_handler = SQLConnectionHandler() + datatype_id = convert_to_id(datatype, "data_type", conn_handler) + sql = "SELECT command_id FROM qiita.command WHERE name = %s" + command_id = conn_handler.execute_fetchone(sql, (command, ))[0] + opts_json = dumps(options, sort_keys=True, separators=(',', ':')) + sql = ("SELECT EXISTS(SELECT * FROM qiita.{0} WHERE data_type_id = %s" + " AND command_id = %s AND options = %s)".format(cls._table)) + return conn_handler.execute_fetchone( + sql, (datatype_id, command_id, opts_json))[0] + + @classmethod + def create(cls, datatype, command, options, analysis): + """Creates a new job on the database + + Parameters + ---------- + datatype : str + The datatype in which this job applies + command : str + The identifier of the command executed in this job + options: dict + The options for the command in format {option: value} + analysis : Analysis object + The analysis which this job belongs to + + Returns + ------- + Job object + The newly created job + """ + # EXISTS IGNORED FOR DEMO, ISSUE #83 + # if cls.exists(datatype, command, options): + # raise QiitaDBDuplicateError( + # "Job", "datatype: %s, command: %s, options: %s" + # % (datatype, command, options)) + + # Get the datatype and command ids from the strings + conn_handler = SQLConnectionHandler() + datatype_id = convert_to_id(datatype, "data_type", conn_handler) + sql = "SELECT command_id FROM qiita.command WHERE name = %s" + command_id = conn_handler.execute_fetchone(sql, (command, ))[0] + + # JSON the options dictionary + opts_json = dumps(options, sort_keys=True, separators=(',', ':')) + # Create the job and return it + sql = ("INSERT INTO qiita.{0} (data_type_id, job_status_id, " + "command_id, options) VALUES " + "(%s, %s, %s, %s) RETURNING job_id").format(cls._table) + job_id = conn_handler.execute_fetchone(sql, (datatype_id, 1, + command_id, opts_json))[0] + + # add job to analysis + sql = ("INSERT INTO qiita.analysis_job (analysis_id, job_id) VALUES " + "(%s, %s)") + conn_handler.execute(sql, (analysis.id, job_id)) + + return cls(job_id) + + @property + def datatype(self): + sql = ("SELECT data_type from qiita.data_type WHERE data_type_id = " + "(SELECT data_type_id from qiita.{0} WHERE " + "job_id = %s)".format(self._table)) + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @property + def command(self): + """Returns the command of the job as (name, command) + + Returns + ------- + str + command run by the job + """ + sql = ("SELECT name, command from qiita.command WHERE command_id = " + "(SELECT command_id from qiita.{0} WHERE " + "job_id = %s)".format(self._table)) + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone(sql, (self._id, )) + + @property + def options(self): + """Options used in the job + + Returns + ------- + dict + options in the format {option: setting} + """ + sql = ("SELECT options FROM qiita.{0} WHERE " + "job_id = %s".format(self._table)) + conn_handler = SQLConnectionHandler() + try: + opts = loads(conn_handler.execute_fetchone(sql, (self._id, ))[0]) + except ValueError: + raise IncompetentQiitaDeveloperError("Malformed options for job " + "id %d" % self._id) + sql = ("SELECT command, output from qiita.command WHERE command_id = (" + "SELECT command_id from qiita.{0} WHERE " + "job_id = %s)".format(self._table)) + db_comm = conn_handler.execute_fetchone(sql, (self._id, )) + out_opt = loads(db_comm[1]) + basedir = get_db_files_base_dir(conn_handler) + join_f = partial(join, join(basedir, "job")) + for k in out_opt: + opts[k] = join_f("%s_%s_%s" % (self._id, db_comm[0], k.strip("-"))) + return opts + + @property + def results(self): + """List of job result filepaths + + Returns + ------- + list + Filepaths to the result files + """ + # Copy files to working dir, untar if necessary, then return filepaths + conn_handler = SQLConnectionHandler() + results = conn_handler.execute_fetchall( + "SELECT filepath FROM qiita.filepath WHERE filepath_id IN " + "(SELECT filepath_id FROM qiita.job_results_filepath " + "WHERE job_id = %s)", + (self._id, )) + # create new list, with relative paths from db base + return [join("job", fp[0]) for fp in results] + + @property + def error(self): + """String with an error message, if the job failed + + Returns + ------- + str or None + error message/traceback for a job, or None if none exists + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT log_id FROM qiita.{0} " + "WHERE job_id = %s".format(self._table)) + logging_id = conn_handler.execute_fetchone(sql, (self._id, ))[0] + if logging_id is None: + ret = None + else: + ret = LogEntry(logging_id) + + return ret + +# --- Functions --- + def set_error(self, msg, severity): + """Logs an error for the job + + Parameters + ---------- + msg : str + Error message/stacktrace if available + severity: int + Severity code of error + """ + conn_handler = SQLConnectionHandler() + log_entry = LogEntry.create(severity, msg) + self._lock_job(conn_handler) + + # attach the error to the job and set to error + sql = ("UPDATE qiita.{0} SET log_id = %s, job_status_id = 4 WHERE " + "job_id = %s".format(self._table)) + + conn_handler.execute(sql, (log_entry.id, self._id)) + + def add_results(self, results): + """Adds a list of results to the results + + Parameters + ---------- + results : list of tuples + filepath information to add to job, in format + [(filepath, type), ...] + Where type is the filepath type of the filepath passed + + Notes + ----- + Curently available file types are: + biom, directory, plain_text + """ + # add filepaths to the job + conn_handler = SQLConnectionHandler() + self._lock_job(conn_handler) + # convert all file type text to file type ids + res_ids = [(fp, convert_to_id(fptype, "filepath_type", conn_handler)) + for fp, fptype in results] + file_ids = insert_filepaths(res_ids, self._id, self._table, + "filepath", conn_handler, move_files=False) + + # associate filepaths with job + sql = ("INSERT INTO qiita.{0}_results_filepath (job_id, filepath_id) " + "VALUES (%s, %s)".format(self._table)) + conn_handler.executemany(sql, [(self._id, fid) for fid in file_ids]) + + +class Command(object): + """Holds all information on the commands available + + This will be an in-memory representation because the command table is + considerably more static than other objects tables, changing only with new + QIIME releases. + + Attributes + ---------- + name + command + input_opts + required_opts + optional_opts + output_opts + """ + @classmethod + def create_list(cls): + """Creates list of all available commands + + Returns + ------- + list of Command objects + """ + conn_handler = SQLConnectionHandler() + commands = conn_handler.execute_fetchall("SELECT * FROM qiita.command") + # create the list of command objects + return [cls(c["name"], c["command"], c["input"], c["required"], + c["optional"], c["output"]) for c in commands] + + def __eq__(self, other): + if type(self) != type(other): + return False + if self.name != other.name: + return False + if self.command != other.command: + return False + if self.input_opts != other.input_opts: + return False + if self.output_opts != other.output_opts: + return False + if self.required_opts != other.required_opts: + return False + if self.optional_opts != other.optional_opts: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __init__(self, name, command, input_opts, required_opts, + optional_opts, output_opts): + """Creates the command object + + Parameters: + name : str + Name of the command + command: str + python command to run + input_opts : str + JSON of input options for the command + required_opts : str + JSON of required options for the command + optional_opts : str + JSON of optional options for the command + output_opts : str + JSON of output options for the command + """ + self.name = name + self.command = command + self.input_opts = dumps(input_opts) + self.required_opts = dumps(required_opts) + self.optional_opts = dumps(optional_opts) + self.output_opts = dumps(output_opts) diff --git a/qiita_db/logger.py b/qiita_db/logger.py new file mode 100644 index 000000000..be0ded7d9 --- /dev/null +++ b/qiita_db/logger.py @@ -0,0 +1,187 @@ +r""" +Logging objects (:mod: `qiita_db.logger`) +==================================== + +..currentmodule:: qiita_db.logger + +This module provides objects for recording log information + +Classes +------- + +..autosummary:: + :toctree: generated/ + + LogEntry +""" + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from __future__ import division + +from json import loads, dumps +from datetime import datetime + +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from .sql_connection import SQLConnectionHandler +from .exceptions import QiitaDBNotImplementedError, QiitaDBUnknownIDError +from .base import QiitaObject + + +class LogEntry(QiitaObject): + """ + Attributes + ---------- + severity + time + info + message + + Methods + ------- + clear_info + add_info + """ + + _table = 'logging' + + @classmethod + def create(cls, severity_id, msg, info=None): + """Creates a new LogEntry object + + Parameters + ---------- + severity_id : int + The level of severity to use for the LogEntry. Refers to an entry + in the SEVERITY table. + msg : str + The message text + info : dict, optional + Defaults to ``None``. If supplied, the information will be added + as the first entry in a list of information dicts. If ``None``, + an empty dict will be added. + + Notes + ----- + - When `info` is added, keys can be of any type, but upon retrieval, + they will be of type str + """ + if info is None: + info = {} + + info = dumps([info]) + + conn_handler = SQLConnectionHandler() + sql = ("INSERT INTO qiita.{} (time, severity_id, msg, information) " + "VALUES (%s, %s, %s, %s) " + "RETURNING logging_id".format(cls._table)) + id_ = conn_handler.execute_fetchone(sql, (datetime.now(), severity_id, + msg, info))[0] + + return cls(id_) + + @property + def severity(self): + """Returns the severity_id associated with this LogEntry + + Returns + ------- + int + This is a key to the SEVERITY table + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT severity_id FROM qiita.{} WHERE " + "logging_id = %s".format(self._table)) + + return conn_handler.execute_fetchone(sql, (self.id,))[0] + + @property + def time(self): + """Returns the time that this LogEntry was created + + Returns + ------- + datetime + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT time FROM qiita.{} " + "WHERE logging_id = %s".format(self._table)) + timestamp = conn_handler.execute_fetchone(sql, (self.id,))[0] + + return timestamp + + @property + def info(self): + """Returns the info associated with this LogEntry + + Returns + ------- + list of dict + Each entry in the list is information that was added (the info + added upon creation will be index 0, and if additional info + was supplied subsequently, those entries will occupy subsequent + indices) + + Notes + ----- + - When `info` is added, keys can be of any type, but upon retrieval, + they will be of type str + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT information FROM qiita.{} " + "WHERE logging_id = %s".format(self._table)) + info = conn_handler.execute_fetchone(sql, (self.id,))[0] + + return loads(info) + + @property + def msg(self): + """Gets the message text for this LogEntry + + Returns + ------- + str + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT msg FROM qiita.{} " + "WHERE logging_id = %s".format(self._table)) + + return conn_handler.execute_fetchone(sql, (self.id,))[0] + + def clear_info(self): + """Resets the list of info dicts to be an empty list + """ + conn_handler = SQLConnectionHandler() + sql = ("UPDATE qiita.{} set information = %s " + "WHERE logging_id = %s".format(self._table)) + new_info = dumps([]) + + conn_handler.execute(sql, (new_info, self.id)) + + def add_info(self, info): + """Adds new information to the info associated with this LogEntry + + Parameters + ---------- + info : dict + The information to add. + + Notes + ----- + - When `info` is added, keys can be of any type, but upon retrieval, + they will be of type str + """ + conn_handler = SQLConnectionHandler() + current_info = self.info + current_info.append(info) + new_info = dumps(current_info) + + sql = ("UPDATE qiita.{} SET information = %s " + "WHERE logging_id = %s".format(self._table)) + conn_handler.execute(sql, (new_info, self.id)) diff --git a/qiita_db/metadata_template.py b/qiita_db/metadata_template.py new file mode 100644 index 000000000..9a1516ad7 --- /dev/null +++ b/qiita_db/metadata_template.py @@ -0,0 +1,943 @@ +r""" +Metadata template objects (:mod: `qiita_db.metadata_template) +============================================================= + +..currentmodule:: qiita_db.metadata_template + +This module provides the MetadataTemplate base class and the subclasses +SampleTemplate and PrepTemplate. + +Classes +------- + +..autosummary:: + :toctree: generated/ + + BaseSample + Sample + PrepSample + MetadataTemplate + SampleTemplate + PrepTemplate + +Methods +------- + +..autosummary:: + :toctree: generated/ + + sample_template_adder +""" + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from __future__ import division +from future.builtins import zip +from copy import deepcopy + +import pandas as pd +import numpy as np + +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from .exceptions import (QiitaDBDuplicateError, QiitaDBColumnError, + QiitaDBUnknownIDError, QiitaDBNotImplementedError, + QiitaDBDuplicateHeaderError) +from .base import QiitaObject +from .sql_connection import SQLConnectionHandler +from .util import exists_table, get_table_cols + + +def _get_datatypes(metadata_map): + r"""Returns the datatype of each metadata_map column + + Parameters + ---------- + metadata_map : DataFrame + The MetadataTemplate contents + + Returns + ------- + list of str + The SQL datatypes for each column, in column order + """ + datatypes = [] + for dtype in metadata_map.dtypes: + if dtype in [np.int8, np.int16, np.int32, np.int64]: + datatypes.append('integer') + elif dtype in [np.float16, np.float32, np.float64]: + datatypes.append('float8') + else: + datatypes.append('varchar') + return datatypes + + +def _as_python_types(metadata_map, headers): + r"""Converts the values of metadata_map pointed by headers from numpy types + to python types. + + Psycopg2 does not support the numpy types, so we should cast them to the + closest python type + + Parameters + ---------- + metadata_map : DataFrame + The MetadataTemplate contents + headers : list of str + The headers of the columns of metadata_map that needs to be converted + to a python type + + Returns + ------- + list of lists + The values of the columns in metadata_map pointed by headers casted to + python types. + """ + values = [] + for h in headers: + if isinstance(metadata_map[h][0], np.generic): + values.append(list(map(np.asscalar, metadata_map[h]))) + else: + values.append(list(metadata_map[h])) + return values + + +class BaseSample(QiitaObject): + r"""Sample object that accesses the db to get the information of a sample + belonging to a PrepTemplate or a SampleTemplate. + + Parameters + ---------- + sample_id : str + The sample id + md_template : MetadataTemplate + The metadata template obj to which the sample belongs to + + Methods + ------- + __eq__ + __len__ + __getitem__ + __setitem__ + __delitem__ + __iter__ + __contains__ + exists + keys + values + items + get + + See Also + -------- + QiitaObject + Sample + PrepSample + """ + # Used to find the right SQL tables - should be defined on the subclasses + _table_prefix = None + _column_table = None + _id_column = None + + def _check_template_class(self, md_template): + r"""Checks that md_template is of the correct type + + Parameters + ---------- + md_template : MetadataTemplate + The metadata template + + Raises + ------ + IncompetentQiitaDeveloperError + If its call directly from the Base class + If `md_template` doesn't have the correct type + """ + raise IncompetentQiitaDeveloperError() + + def __init__(self, sample_id, md_template): + r"""Initializes the object + + Parameters + ---------- + sample_id : str + The sample id + md_template : MetadataTemplate + The metadata template in which the sample is present + + Raises + ------ + QiitaDBUnknownIDError + If `sample_id` does not correspond to any sample in md_template + """ + # Check that we are not instantiating the base class + self._check_subclass() + # Check that the md_template is of the correct type + self._check_template_class(md_template) + # Check if the sample id is present on the passed metadata template + # This test will check that the sample id is actually present on the db + if sample_id not in md_template: + raise QiitaDBUnknownIDError(sample_id, self.__class__.__name__) + # Assign private attributes + self._id = sample_id + self._md_template = md_template + self._dynamic_table = "%s%d" % (self._table_prefix, + self._md_template.id) + + def __hash__(self): + r"""Defines the hash function so samples are hashable""" + return hash(self._id) + + def __eq__(self, other): + r"""Self and other are equal based on type and ids""" + if not isinstance(other, type(self)): + return False + if other._id != self._id: + return False + if other._md_template != self._md_template: + return False + return True + + @classmethod + def exists(cls, sample_id, md_template): + r"""Checks if already exists a MetadataTemplate for the provided object + + Parameters + ---------- + sample_id : str + The sample id + md_template : MetadataTemplate + The metadata template to which the sample belongs to + + Returns + ------- + bool + True if already exists. False otherwise. + """ + cls._check_subclass() + conn_handler = SQLConnectionHandler() + return conn_handler.execute_fetchone( + "SELECT EXISTS(SELECT * FROM qiita.{0} WHERE sample_id=%s AND " + "{1}=%s)".format(cls._table, cls._id_column), + (sample_id, md_template.id))[0] + + def _get_categories(self, conn_handler): + r"""Returns all the available metadata categories for the sample + + Parameters + ---------- + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + + Returns + ------- + set of str + The set of all available metadata categories + """ + # Get all the required columns + required_cols = get_table_cols(self._table, conn_handler) + # Get all the the columns in the dynamic table + dynamic_cols = get_table_cols(self._dynamic_table, conn_handler) + # Get the union of the two previous lists + cols = set(required_cols).union(dynamic_cols) + # Remove the sample_id column and the study_id/raw_data_id columns, + # as this columns are used internally for data storage and they don't + # actually belong to the metadata + cols.remove('sample_id') + cols.remove(self._id_column) + return cols + + def __len__(self): + r"""Returns the number of metadata categories + + Returns + ------- + int + The number of metadata categories + """ + conn_handler = SQLConnectionHandler() + # return the number of columns + return len(self._get_categories(conn_handler)) + + def __getitem__(self, key): + r"""Returns the value of the metadata category `key` + + Parameters + ---------- + key : str + The metadata category + + Returns + ------- + obj + The value of the metadata category `key` + + Raises + ------ + KeyError + If the metadata category `key` does not exists + + See Also + -------- + get + """ + conn_handler = SQLConnectionHandler() + key = key.lower() + if key in self._get_categories(conn_handler): + # Check if we have either to query the table with required columns + # or the dynamic table + if key in get_table_cols(self._table, conn_handler): + return conn_handler.execute_fetchone( + "SELECT {0} FROM qiita.{1} WHERE {2}=%s AND " + "sample_id=%s".format(key, self._table, self._id_column), + (self._md_template.id, self._id))[0] + else: + return conn_handler.execute_fetchone( + "SELECT {0} FROM qiita.{1} WHERE " + "sample_id=%s".format(key, self._dynamic_table), + (self._id, ))[0] + else: + # The key is not available for the sample, so raise a KeyError + raise KeyError("Metadata category %s does not exists for sample %s" + " in template %d" % + (key, self._id, self._md_template.id)) + + def __setitem__(self, key, value): + r"""Sets the metadata value for the category `key` + + Parameters + ---------- + key : str + The metadata category + value : obj + The new value for the category + """ + raise QiitaDBNotImplementedError() + + def __delitem__(self, key): + r"""Removes the sample with sample id `key` from the database + + Parameters + ---------- + key : str + The sample id + """ + raise QiitaDBNotImplementedError() + + def __iter__(self): + r"""Iterator over the metadata keys + + Returns + ------- + Iterator + Iterator over the sample ids + + See Also + -------- + keys + """ + conn_handler = SQLConnectionHandler() + return iter(self._get_categories(conn_handler)) + + def __contains__(self, key): + r"""Checks if the metadata category `key` is present + + Parameters + ---------- + key : str + The sample id + + Returns + ------- + bool + True if the metadata category `key` is present, false otherwise + """ + conn_handler = SQLConnectionHandler() + return key.lower() in self._get_categories(conn_handler) + + def keys(self): + r"""Iterator over the metadata categories + + Returns + ------- + Iterator + Iterator over the sample ids + + See Also + -------- + __iter__ + """ + return self.__iter__() + + def values(self): + r"""Iterator over the metadata values, in metadata category order + + Returns + ------- + Iterator + Iterator over metadata values + """ + conn_handler = SQLConnectionHandler() + values = conn_handler.execute_fetchone( + "SELECT * FROM qiita.{0} WHERE {1}=%s AND " + "sample_id=%s".format(self._table, self._id_column), + (self._md_template.id, self._id))[2:] + dynamic_values = conn_handler.execute_fetchone( + "SELECT * from qiita.{0} WHERE " + "sample_id=%s".format(self._dynamic_table), + (self._id, ))[1:] + values.extend(dynamic_values) + return iter(values) + + def items(self): + r"""Iterator over (category, value) tuples + + Returns + ------- + Iterator + Iterator over (category, value) tuples + """ + conn_handler = SQLConnectionHandler() + values = dict(conn_handler.execute_fetchone( + "SELECT * FROM qiita.{0} WHERE {1}=%s AND " + "sample_id=%s".format(self._table, self._id_column), + (self._md_template.id, self._id))) + dynamic_values = dict(conn_handler.execute_fetchone( + "SELECT * from qiita.{0} WHERE " + "sample_id=%s".format(self._dynamic_table), + (self._id, ))) + values.update(dynamic_values) + del values['sample_id'] + del values[self._id_column] + return values.items() + + def get(self, key): + r"""Returns the metadata value for category `key`, or None if the + category `key` is not present + + Parameters + ---------- + key : str + The metadata category + + Returns + ------- + Obj or None + The value object for the category `key`, or None if it is not + present + + See Also + -------- + __getitem__ + """ + try: + return self[key] + except KeyError: + return None + + +class PrepSample(BaseSample): + r"""Class that models a sample present in a PrepTemplate. + + See Also + -------- + BaseSample + Sample + """ + _table = "common_prep_info" + _table_prefix = "prep_" + _column_table = "raw_data_prep_columns" + _id_column = "raw_data_id" + + def _check_template_class(self, md_template): + r"""Checks that md_template is of the correct type + + Parameters + ---------- + md_template : PrepTemplate + The metadata template + + Raises + ------ + IncompetentQiitaDeveloperError + If `md_template` is not a PrepTemplate object + """ + if not isinstance(md_template, PrepTemplate): + raise IncompetentQiitaDeveloperError() + + +class Sample(BaseSample): + r"""Class that models a sample present in a SampleTemplate. + + See Also + -------- + BaseSample + PrepSample + """ + _table = "required_sample_info" + _table_prefix = "sample_" + _column_table = "study_sample_columns" + _id_column = "study_id" + + def _check_template_class(self, md_template): + r"""Checks that md_template is of the correct type + + Parameters + ---------- + md_template : SampleTemplate + The metadata template + + Raises + ------ + IncompetentQiitaDeveloperError + If `md_template` is not a SampleTemplate object + """ + if not isinstance(md_template, SampleTemplate): + raise IncompetentQiitaDeveloperError() + + +class MetadataTemplate(QiitaObject): + r"""Metadata map object that accesses the db to get the sample/prep + template information + + Attributes + ---------- + id + + Methods + ------- + create + exists + __len__ + __getitem__ + __setitem__ + __delitem__ + __iter__ + __contains__ + keys + values + items + get + to_file + + See Also + -------- + QiitaObject + SampleTemplate + PrepTemplate + """ + + # Used to find the right SQL tables - should be defined on the subclasses + _table_prefix = None + _column_table = None + _id_column = None + _strict = True + _sample_cls = None + + def _check_id(self, id_, conn_handler=None): + r"""Checks that the MetadataTemplate id_ exists on the database""" + self._check_subclass() + conn_handler = (conn_handler if conn_handler is not None + else SQLConnectionHandler()) + return conn_handler.execute_fetchone( + "SELECT EXISTS(SELECT * FROM qiita.{0} WHERE " + "{1}=%s)".format(self._table, self._id_column), + (id_, ))[0] + + @classmethod + def _table_name(cls, obj): + r"""Returns the dynamic table name + + Parameters + ---------- + obj : Study or RawData + The obj to which the metadata template belongs to. + + Returns + ------- + str + The table name + + Raises + ------ + IncompetentQiitaDeveloperError + If called from the base class directly + """ + if not cls._table_prefix: + raise IncompetentQiitaDeveloperError( + "_table_prefix should be defined in the subclasses") + return "%s%d" % (cls._table_prefix, obj.id) + + @classmethod + def create(cls, md_template, obj): + r"""Creates the metadata template in the database + + Parameters + ---------- + md_template : DataFrame + The metadata template file contents indexed by samples Ids + obj : Study or RawData + The obj to which the metadata template belongs to. Study in case + of SampleTemplate and RawData in case of PrepTemplate + """ + # Check that we don't have a MetadataTemplate for obj + if cls.exists(obj): + raise QiitaDBDuplicateError(cls.__name__, 'id: %d' % obj.id) + + # We are going to modify the md_template. We create a copy so + # we don't modify the user one + md_template = deepcopy(md_template) + # In the database, all the column headers are lowercase + md_template.columns = [c.lower() for c in md_template.columns] + + # Check that we don't have duplicate columns + if len(set(md_template.columns)) != len(md_template.columns): + raise QiitaDBDuplicateHeaderError() + + conn_handler = SQLConnectionHandler() + # Check that md_template have the required columns + db_cols = get_table_cols(cls._table, conn_handler) + # Remove the sample_id and study_id columns + db_cols.remove('sample_id') + db_cols.remove(cls._id_column) + headers = list(md_template.keys()) + sample_ids = list(md_template.index) + num_samples = len(sample_ids) + remaining = set(db_cols).difference(headers) + if remaining: + # If strict, raise an error, else default to None + if cls._strict: + raise QiitaDBColumnError("Missing columns: %s" % remaining) + else: + for col in remaining: + md_template[col] = pd.Series([None] * num_samples, + index=sample_ids) + # Insert values on required columns + values = _as_python_types(md_template, db_cols) + values.insert(0, sample_ids) + values.insert(0, [obj.id] * num_samples) + values = [v for v in zip(*values)] + conn_handler.executemany( + "INSERT INTO qiita.{0} ({1}, sample_id, {2}) " + "VALUES (%s, %s, {3})".format(cls._table, cls._id_column, + ', '.join(db_cols), + ', '.join(['%s'] * len(db_cols))), + values) + + # Insert rows on *_columns table + headers = list(set(headers).difference(db_cols)) + datatypes = _get_datatypes(md_template.ix[:, headers]) + # psycopg2 requires a list of tuples, in which each tuple is a set + # of values to use in the string formatting of the query. We have all + # the values in different lists (but in the same order) so use zip + # to create the list of tuples that psycopg2 requires. + values = [v for v in zip([obj.id] * len(headers), headers, datatypes)] + conn_handler.executemany( + "INSERT INTO qiita.{0} ({1}, column_name, column_type) " + "VALUES (%s, %s, %s)".format(cls._column_table, cls._id_column), + values) + + # Create table with custom columns + table_name = cls._table_name(obj) + column_datatype = ["%s %s" % (col, dtype) + for col, dtype in zip(headers, datatypes)] + conn_handler.execute( + "CREATE TABLE qiita.{0} (sample_id varchar, {1})".format( + table_name, ', '.join(column_datatype))) + + # Insert values on custom table + values = _as_python_types(md_template, headers) + values.insert(0, sample_ids) + values = [v for v in zip(*values)] + conn_handler.executemany( + "INSERT INTO qiita.{0} (sample_id, {1}) " + "VALUES (%s, {2})".format(table_name, ", ".join(headers), + ', '.join(["%s"] * len(headers))), + values) + + return cls(obj.id) + + @classmethod + def exists(cls, obj): + r"""Checks if already exists a MetadataTemplate for the provided object + + Parameters + ---------- + obj : QiitaObject + The object to test if a MetadataTemplate exists for + + Returns + ------- + bool + True if already exists. False otherwise. + """ + cls._check_subclass() + return exists_table(cls._table_name(obj), SQLConnectionHandler()) + + def _get_sample_ids(self, conn_handler): + r"""Returns all the available samples for the metadata template + + Parameters + ---------- + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + + Returns + ------- + set of str + The set of all available sample ids + """ + sample_ids = conn_handler.execute_fetchall( + "SELECT sample_id FROM qiita.{0} WHERE " + "{1}=%s".format(self._table, self._id_column), + (self._id, )) + return set(sample_id[0] for sample_id in sample_ids) + + def __len__(self): + r"""Returns the number of samples in the metadata template + + Returns + ------- + int + The number of samples in the metadata template + """ + conn_handler = SQLConnectionHandler() + return len(self._get_sample_ids(conn_handler)) + + def __getitem__(self, key): + r"""Returns the metadata values for sample id `key` + + Parameters + ---------- + key : str + The sample id + + Returns + ------- + Sample + The sample object for the sample id `key` + + Raises + ------ + KeyError + If the sample id `key` is not present in the metadata template + + See Also + -------- + get + """ + if key in self: + return self._sample_cls(key, self) + else: + raise KeyError("Sample id %s does not exists in template %d" + % (key, self._id)) + + def __setitem__(self, key, value): + r"""Sets the metadata values for sample id `key` + + Parameters + ---------- + key : str + The sample id + value : Sample + The sample obj holding the new sample values + """ + raise QiitaDBNotImplementedError() + + def __delitem__(self, key): + r"""Removes the sample with sample id `key` from the database + + Parameters + ---------- + key : str + The sample id + """ + raise QiitaDBNotImplementedError() + + def __iter__(self): + r"""Iterator over the sample ids + + Returns + ------- + Iterator + Iterator over the sample ids + + See Also + -------- + keys + """ + conn_handler = SQLConnectionHandler() + return iter(self._get_sample_ids(conn_handler)) + + def __contains__(self, key): + r"""Checks if the sample id `key` is present in the metadata template + + Parameters + ---------- + key : str + The sample id + + Returns + ------- + bool + True if the sample id `key` is in the metadata template, false + otherwise + """ + conn_handler = SQLConnectionHandler() + return key in self._get_sample_ids(conn_handler) + + def keys(self): + r"""Iterator over the sorted sample ids + + Returns + ------- + Iterator + Iterator over the sample ids + + See Also + -------- + __iter__ + """ + return self.__iter__() + + def values(self): + r"""Iterator over the metadata values + + Returns + ------- + Iterator + Iterator over Sample obj + """ + conn_handler = SQLConnectionHandler() + return iter(self._sample_cls(sample_id, self) + for sample_id in self._get_sample_ids(conn_handler)) + + def items(self): + r"""Iterator over (sample_id, values) tuples, in sample id order + + Returns + ------- + Iterator + Iterator over (sample_ids, values) tuples + """ + conn_handler = SQLConnectionHandler() + return iter((sample_id, self._sample_cls(sample_id, self)) + for sample_id in self._get_sample_ids(conn_handler)) + + def get(self, key): + r"""Returns the metadata values for sample id `key`, or None if the + sample id `key` is not present in the metadata map + + Parameters + ---------- + key : str + The sample id + + Returns + ------- + Sample or None + The sample object for the sample id `key`, or None if it is not + present + + See Also + -------- + __getitem__ + """ + try: + return self[key] + except KeyError: + return None + + def _transform_to_dict(self, values): + r"""Transforms `values` to a dict keyed by sample id + + Parameters + ---------- + values : object + The object returned from a execute_fetchall call + + Returns + ------- + dict + """ + result = {} + for row in values: + # Transform the row to a dictionary + values_dict = dict(row) + # Get the sample id of this row + sid = values_dict['sample_id'] + del values_dict['sample_id'] + # Remove _id_column from this row (if present) + if self._id_column in values_dict: + del values_dict[self._id_column] + result[sid] = values_dict + + return result + + def to_file(self, fp): + r"""Writes the MetadataTemplate to the file `fp` in tab-delimited + format + + Parameters + ---------- + fp : str + Path to the output file + """ + conn_handler = SQLConnectionHandler() + metadata_map = self._transform_to_dict(conn_handler.execute_fetchall( + "SELECT * FROM qiita.{0} WHERE {1}=%s".format(self._table, + self._id_column), + (self.id,))) + dyn_vals = self._transform_to_dict(conn_handler.execute_fetchall( + "SELECT * FROM qiita.{0}".format(self._table_name(self)))) + + for k in metadata_map: + metadata_map[k].update(dyn_vals[k]) + + headers = sorted(list(metadata_map.values())[0].keys()) + with open(fp, 'w') as f: + # First write the headers + f.write("#SampleID\t%s\n" % '\t'.join(headers)) + # Write the values for each sample id + for sid, d in sorted(metadata_map.items()): + values = [str(d[h]) for h in headers] + values.insert(0, sid) + f.write("%s\n" % '\t'.join(values)) + + +class SampleTemplate(MetadataTemplate): + r"""Represent the SampleTemplate of a study. Provides access to the + tables in the DB that holds the sample metadata information. + + See Also + -------- + MetadataTemplate + PrepTemplate + """ + _table = "required_sample_info" + _table_prefix = "sample_" + _column_table = "study_sample_columns" + _id_column = "study_id" + _sample_cls = Sample + + +class PrepTemplate(MetadataTemplate): + r"""Represent the PrepTemplate of a raw dat. Provides access to the + tables in the DB that holds the sample preparation information. + + See Also + -------- + MetadataTemplate + SampleTemplate + """ + _table = "common_prep_info" + _table_prefix = "prep_" + _column_table = "raw_data_prep_columns" + _id_column = "raw_data_id" + _strict = False + _sample_cls = PrepSample diff --git a/qiita_db/sql_connection.py b/qiita_db/sql_connection.py new file mode 100644 index 000000000..7de2fc7a8 --- /dev/null +++ b/qiita_db/sql_connection.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from contextlib import contextmanager +from collections import Iterable + +from psycopg2 import connect, Error as PostgresError +from psycopg2.extras import DictCursor + +from .exceptions import QiitaDBExecutionError, QiitaDBConnectionError +from qiita_core.qiita_settings import qiita_config + + +class SQLConnectionHandler(object): + """Encapsulates the DB connection with the Postgres DB""" + def __init__(self): + self._connection = connect(user=qiita_config.user, + password=qiita_config.password, + database=qiita_config.database, + host=qiita_config.host, + port=qiita_config.port) + + def __del__(self): + self._connection.close() + + @contextmanager + def get_postgres_cursor(self): + """ Returns a Postgres cursor + + Returns + ------- + pgcursor : psycopg2.cursor + + Raises a QiitaDBConnectionError if the cursor cannot be created + """ + try: + with self._connection.cursor(cursor_factory=DictCursor) as cur: + yield cur + except PostgresError as e: + raise QiitaDBConnectionError("Cannot get postgres cursor! %s" % e) + + def _check_sql_args(self, sql_args): + """ Checks that sql_args have the correct type + + Inputs: + sql_args: SQL arguments + + Raises a TypeError if sql_args does not have the correct type, + otherwise it just returns the execution to the caller + """ + # Check that sql arguments have the correct type + if sql_args and type(sql_args) not in [tuple, list, dict]: + raise TypeError("sql_args should be tuple, list or dict. Found %s " + % type(sql_args)) + + @contextmanager + def _sql_executor(self, sql, sql_args=None, many=False): + """Executes an SQL query + + Parameters + ---------- + sql: str + The SQL query + sql_args: tuple or list, optional + The arguments for the SQL query + many: bool, optional + If true, performs an execute many call + + Returns + ------- + pgcursor : psycopg2.cursor + The cursor in which the SQL query was executed + + Raises + ------ + QiitaDBExecutionError + If there is some error executing the SQL query + """ + # Check that sql arguments have the correct type + if many: + for args in sql_args: + self._check_sql_args(args) + else: + self._check_sql_args(sql_args) + + # Execute the query + with self.get_postgres_cursor() as cur: + try: + if many: + cur.executemany(sql, sql_args) + else: + cur.execute(sql, sql_args) + yield cur + self._connection.commit() + except PostgresError as e: + self._connection.rollback() + try: + if not isinstance(sql_args[0], Iterable): + err_sql = cur.mogrify(sql, sql_args) + else: + err_sql = cur.mogrify(sql, sql_args[0]) + except IndexError: + err_sql = sql + raise QiitaDBExecutionError(("\nError running SQL query: %s" + "\nError: %s" % (err_sql, e))) + + def execute_fetchall(self, sql, sql_args=None): + """ Executes a fetchall SQL query + + Parameters + ---------- + sql: str + The SQL query + sql_args: tuple or list, optional + The arguments for the SQL query + + Returns + ------ + list of tuples + The results of the fetchall query + + Raises + ------ + QiitaDBExecutionError + If there is some error executing the SQL query + + Note: from psycopg2 documentation, only variable values should be bound + via sql_args, it shouldn't be used to set table or field names. For + those elements, ordinary string formatting should be used before + running execute. + """ + with self._sql_executor(sql, sql_args) as pgcursor: + result = pgcursor.fetchall() + return result + + def execute_fetchone(self, sql, sql_args=None): + """ Executes a fetchone SQL query + + Parameters + ---------- + sql: str + The SQL query + sql_args: tuple or list, optional + The arguments for the SQL query + + Returns + ------- + Tuple + The results of the fetchone query + + Raises + ------ + QiitaDBExecutionError + if there is some error executing the SQL query + + Note: from psycopg2 documentation, only variable values should be bound + via sql_args, it shouldn't be used to set table or field names. For + those elements, ordinary string formatting should be used before + running execute. + """ + with self._sql_executor(sql, sql_args) as pgcursor: + result = pgcursor.fetchone() + return result + + def execute(self, sql, sql_args=None): + """ Executes an SQL query with no results + + Parameters + ---------- + sql: str + The SQL query + sql_args: tuple or list, optional + The arguments for the SQL query + + Raises + ------ + QiitaDBExecutionError + if there is some error executing the SQL query + + Note: from psycopg2 documentation, only variable values should be bound + via sql_args, it shouldn't be used to set table or field names. For + those elements, ordinary string formatting should be used before + running execute. + """ + with self._sql_executor(sql, sql_args): + pass + + def executemany(self, sql, sql_args_list): + """ Executes an executemany SQL query with no results + + Parameters + ---------- + sql: str + The SQL query + sql_args: list of tuples + The arguments for the SQL query + + Raises + ------ + QiitaDBExecutionError + If there is some error executing the SQL query + + Note: from psycopg2 documentation, only variable values should be bound + via sql_args, it shouldn't be used to set table or field names. For + those elements, ordinary string formatting should be used before + running execute. + """ + with self._sql_executor(sql, sql_args_list, True): + pass diff --git a/qiita_db/study.py b/qiita_db/study.py new file mode 100644 index 000000000..be3282e1c --- /dev/null +++ b/qiita_db/study.py @@ -0,0 +1,654 @@ +r""" +Study and StudyPerson objects (:mod:`qiita_db.study`) +===================================================== + +.. currentmodule:: qiita_db.study + +This module provides the implementation of the Study and StudyPerson classes. +The study class allows access to all basic information including name and +pmids associated with the study, as well as returning ids for the data, +sample template, owner, and shared users. It is the central hub for creating, +deleting, and accessing a study in the database. + +Contacts are taken care of by the StudyPerson class. This holds the contact's +name, email, address, and phone of the various persons in a study, e.g. The PI +or lab contact. + +Classes +------- + +.. autosummary:: + :toctree: generated/ + + Study + StudyPerson + +Examples +-------- +Studies contain contact people (PIs, Lab members, and EBI contacts). These +people have names, emails, addresses, and phone numbers. The email and name are +the minimum required information. + +>>> from qiita_db.study import StudyPerson # doctest: +SKIP +>>> person = StudyPerson.create('Some Dude', 'somedude@foo.bar', +... address='111 fake street', +... phone='111-121-1313') # doctest: +SKIP +>>> person.name # doctest: +SKIP +Some dude +>>> person.email # doctest: +SKIP +somedude@foobar +>>> person.address # doctest: +SKIP +111 fake street +>>> person.phone # doctest: +SKIP +111-121-1313 + +A study requres a minimum of information to be created. Note that the people +must be passed as StudyPerson objects and the owner as a User object. + +>>> from qiita_db.study import Study # doctest: +SKIP +>>> from qiita_db.user import User # doctest: +SKIP +>>> info = { +... "timeseries_type_id": 1, +... "metadata_complete": True, +... "mixs_compliant": True, +... "number_samples_collected": 25, +... "number_samples_promised": 28, +... "portal_type_id": 3, +... "study_alias": "TST", +... "study_description": "Some description of the study goes here", +... "study_abstract": "Some abstract goes here", +... "emp_person_id": StudyPerson(2), +... "principal_investigator_id": StudyPerson(3), +... "lab_person_id": StudyPerson(1)} # doctest: +SKIP +>>> owner = User('owner@foo.bar') # doctest: +SKIP +>>> Study(owner, "New Study Title", 1, info) # doctest: +SKIP + +You can also add a study to an investigation by passing the investigation +object while creating the study. + +>>> from qiita_db.study import Study # doctest: +SKIP +>>> from qiita_db.user import User # doctest: +SKIP +>>> from qiita_db.study import Investigation # doctest: +SKIP +>>> info = { +... "timeseries_type_id": 1, +... "metadata_complete": True, +... "mixs_compliant": True, +... "number_samples_collected": 25, +... "number_samples_promised": 28, +... "portal_type_id": 3, +... "study_alias": "TST", +... "study_description": "Some description of the study goes here", +... "study_abstract": "Some abstract goes here", +... "emp_person_id": StudyPerson(2), +... "principal_investigator_id": StudyPerson(3), +... "lab_person_id": StudyPerson(1)} # doctest: +SKIP +>>> owner = User('owner@foo.bar') # doctest: +SKIP +>>> investigation = Investigation(1) # doctest: +SKIP +>>> Study(owner, "New Study Title", 1, info, investigation) # doctest: +SKIP +""" + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from __future__ import division +from future.utils import viewitems +from datetime import date +from copy import deepcopy + +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from .base import QiitaStatusObject, QiitaObject +from .exceptions import (QiitaDBStatusError, QiitaDBColumnError) +from .util import check_required_columns, check_table_cols +from .sql_connection import SQLConnectionHandler + + +class Study(QiitaStatusObject): + r"""Study object to access to the Qiita Study information + + Attributes + ---------- + name + info + status + efo + shared_with + pmids + investigation + sample_template + raw_data + preprocessed_data + processed_data + + Methods + ------- + add_pmid + + Notes + ----- + All setters raise QiitaDBStatusError if trying to change a public study. + You should not be doing that. + """ + _table = "study" + # The following columns are considered not part of the study info + _non_info = {"email", "study_id", "study_status_id", "study_title"} + + def _lock_public(self, conn_handler): + """Raises QiitaDBStatusError if study is public""" + if self.check_status(("public", )): + raise QiitaDBStatusError("Can't change status of public study!") + + def _status_setter_checks(self, conn_handler): + r"""Perform a check to make sure not setting status away from public + """ + self._lock_public(conn_handler) + + @classmethod + def get_public(cls): + """Returns study for all public Studies + + Returns + ------- + list of Study objects + All public studies in the database + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT study_id FROM qiita.{0} WHERE " + "{0}_status_id = %s".format(cls._table)) + # MAGIC NUMBER 2: status id for a public study + return [cls(x[0]) for x in + conn_handler.execute_fetchall(sql, (2,))] + + @classmethod + def create(cls, owner, title, efo, info, investigation=None): + """Creates a new study on the database + + Parameters + ---------- + owner : User object + the study's owner + title : str + Title of the study + efo : list + Experimental Factor Ontology id(s) for the study + info : dict + the information attached to the study. All "*_id" keys must pass + the objects associated with them. + investigation : Investigation object, optional + If passed, the investigation to associate with. Defaults to None. + + Raises + ------ + QiitaDBColumnError + Non-db columns in info dictionary + All required keys not passed + IncompetentQiitaDeveloperError + email, study_id, study_status_id, or study_title passed as a key + empty efo list passed + + Notes + ----- + All keys in info, except the efo, must be equal to columns in + qiita.study table in the database. + """ + # make sure not passing non-info columns in the info dict + if cls._non_info.intersection(info): + raise QiitaDBColumnError("non info keys passed: %s" % + cls._non_info.intersection(info)) + + # make sure efo info passed + if not efo: + raise IncompetentQiitaDeveloperError("Need EFO information!") + + # add default values to info + insertdict = deepcopy(info) + if "first_contact" not in insertdict: + insertdict['first_contact'] = date.today().isoformat() + insertdict['email'] = owner.id + insertdict['study_title'] = title + if "reprocess" not in insertdict: + insertdict['reprocess'] = False + # default to waiting_approval status + insertdict['study_status_id'] = 1 + + conn_handler = SQLConnectionHandler() + # make sure dictionary only has keys for available columns in db + check_table_cols(conn_handler, insertdict, cls._table) + # make sure reqired columns in dictionary + check_required_columns(conn_handler, insertdict, cls._table) + + # Insert study into database + sql = ("INSERT INTO qiita.{0} ({1}) VALUES ({2}) RETURNING " + "study_id".format(cls._table, ','.join(insertdict), + ','.join(['%s'] * len(insertdict)))) + # make sure data in same order as sql column names, and ids are used + data = [] + for col in insertdict: + if isinstance(insertdict[col], QiitaObject): + data.append(insertdict[col].id) + else: + data.append(insertdict[col]) + study_id = conn_handler.execute_fetchone(sql, data)[0] + + # insert efo information into database + sql = ("INSERT INTO qiita.{0}_experimental_factor (study_id, " + "efo_id) VALUES (%s, %s)".format(cls._table)) + conn_handler.executemany(sql, [(study_id, e) for e in efo]) + + # add study to investigation if necessary + if investigation: + sql = ("INSERT INTO qiita.investigation_study (investigation_id, " + "study_id) VALUES (%s, %s)") + conn_handler.execute(sql, (investigation.id, study_id)) + + return cls(study_id) + +# --- Attributes --- + @property + def title(self): + """Returns the title of the study + + Returns + ------- + str + Title of study + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT study_title FROM qiita.{0} WHERE " + "study_id = %s".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @title.setter + def title(self, title): + """Sets the title of the study + + Parameters + ---------- + title : str + The new study title + """ + conn_handler = SQLConnectionHandler() + self._lock_public(conn_handler) + sql = ("UPDATE qiita.{0} SET study_title = %s WHERE " + "study_id = %s".format(self._table)) + return conn_handler.execute(sql, (title, self._id)) + + @property + def info(self): + """Dict with all information attached to the study + + Returns + ------- + dict + info of study keyed to column names + """ + conn_handler = SQLConnectionHandler() + sql = "SELECT * FROM qiita.{0} WHERE study_id = %s".format(self._table) + info = dict(conn_handler.execute_fetchone(sql, (self._id, ))) + # remove non-info items from info + for item in self._non_info: + info.pop(item) + return info + + @info.setter + def info(self, info): + """Updates the information attached to the study + + Parameters + ---------- + info : dict + information to change/update for the study, keyed to column name + + Raises + ------ + IncompetentQiitaDeveloperError + Empty dict passed + QiitaDBColumnError + Unknown column names passed + """ + if not info: + raise IncompetentQiitaDeveloperError("Need entries in info dict!") + + if self._non_info.intersection(info): + raise QiitaDBColumnError("non info keys passed: %s" % + self._non_info.intersection(info)) + + conn_handler = SQLConnectionHandler() + self._lock_public(conn_handler) + + # make sure dictionary only has keys for available columns in db + check_table_cols(conn_handler, info, self._table) + + sql_vals = [] + data = [] + # build query with data values in correct order for SQL statement + for key, val in viewitems(info): + sql_vals.append("{0} = %s".format(key)) + if isinstance(val, QiitaObject): + data.append(val.id) + else: + data.append(val) + data.append(self._id) + + sql = ("UPDATE qiita.{0} SET {1} WHERE " + "study_id = %s".format(self._table, ','.join(sql_vals))) + conn_handler.execute(sql, data) + + @property + def efo(self): + conn_handler = SQLConnectionHandler() + sql = ("SELECT efo_id FROM qiita.{0}_experimental_factor WHERE " + "study_id = %s".format(self._table)) + return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id, ))] + + @efo.setter + def efo(self, efo_vals): + """Sets the efo for the study + + Parameters + ---------- + efo_vals : list + Id(s) for the new efo values + + Raises + ------ + IncompetentQiitaDeveloperError + Empty efo list passed + """ + if not efo_vals: + raise IncompetentQiitaDeveloperError("Need EFO information!") + conn_handler = SQLConnectionHandler() + self._lock_public(conn_handler) + # wipe out any EFOs currently attached to study + sql = ("DELETE FROM qiita.{0}_experimental_factor WHERE " + "study_id = %s".format(self._table)) + conn_handler.execute(sql, (self._id, )) + # insert new EFO information into database + sql = ("INSERT INTO qiita.{0}_experimental_factor (study_id, " + "efo_id) VALUES (%s, %s)".format(self._table)) + conn_handler.executemany(sql, [(self._id, efo) for efo in efo_vals]) + + @property + def shared_with(self): + """list of users the study is shared with + + Returns + ------- + list of User ids + Users the study is shared with + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT email FROM qiita.{0}_users WHERE " + "study_id = %s".format(self._table)) + return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id,))] + + @property + def pmids(self): + """ Returns list of paper PMIDs from this study + + Returns + ------- + list of str + list of all the PMIDs + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT pmid FROM qiita.{0}_pmid WHERE " + "study_id = %s".format(self._table)) + return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id, ))] + + @property + def investigation(self): + """ Returns Investigation this study is part of + + Returns + ------- + Investigation id + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT investigation_id FROM qiita.investigation_study WHERE " + "study_id = %s") + inv = conn_handler.execute_fetchone(sql, (self._id, )) + return inv[0] if inv is not None else inv + + @property + def sample_template(self): + """ Returns sample_template information id + + Returns + ------- + SampleTemplate id + """ + return self._id + + @property + def data_types(self): + """Returns list of the data types for this study + + Returns + ------- + list of str + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT DISTINCT DT.data_type FROM qiita.study_raw_data SRD " + "JOIN qiita.common_prep_info CPI ON SRD.raw_data_id = " + "CPI.raw_data_id JOIN qiita.data_type DT ON CPI.data_type_id = " + "DT.data_type_id WHERE SRD.study_id = %s") + return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id,))] + + @property + def raw_data(self): + """ Returns list of data ids for raw data info + + Returns + ------- + list of RawData ids + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT raw_data_id FROM qiita.study_raw_data WHERE " + "study_id = %s") + return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id,))] + + @property + def preprocessed_data(self): + """ Returns list of data ids for preprocessed data info + + Returns + ------- + list of PreprocessedData ids + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT preprocessed_data_id FROM qiita.study_preprocessed_data" + " WHERE study_id = %s") + return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id,))] + + @property + def processed_data(self): + """ Returns list of data ids for processed data info + + Returns + ------- + list of ProcessedData ids + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT processed_data_id FROM " + "qiita.preprocessed_processed_data WHERE " + "preprocessed_data_id IN (SELECT preprocessed_data_id FROM " + "qiita.study_preprocessed_data where study_id = %s)") + return [x[0] for x in conn_handler.execute_fetchall(sql, (self._id,))] + +# --- methods --- + def add_pmid(self, pmid): + """Adds PMID to study + + Parameters + ---------- + pmid : str + pmid to associate with study + """ + conn_handler = SQLConnectionHandler() + sql = ("INSERT INTO qiita.{0}_pmid (study_id, pmid) " + "VALUES (%s, %s)".format(self._table)) + conn_handler.execute(sql, (self._id, pmid)) + + +class StudyPerson(QiitaObject): + r"""Object handling information pertaining to people involved in a study + + Attributes + ---------- + name : str + name of the person + email : str + email of the person + address : str or None + address of the person + phone : str or None + phone number of the person + """ + _table = "study_person" + + @classmethod + def exists(cls, name, email): + """Checks if a person exists + + Parameters + ---------- + name: str + Name of the person + email: str + Email of the person + + Returns + ------- + bool + True if person exists else false + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT exists(SELECT * FROM qiita.{0} WHERE " + "name = %s AND email = %s)".format(cls._table)) + return conn_handler.execute_fetchone(sql, (name, email))[0] + + @classmethod + def create(cls, name, email, address=None, phone=None): + """Create a StudyPerson object, checking if person already exists. + + Parameters + ---------- + name : str + name of person + email : str + email of person + address : str, optional + address of person + phone : str, optional + phone number of person + + Returns + ------- + New StudyPerson object + + """ + if cls.exists(name, email): + sql = ("SELECT study_person_id from qiita.{0} WHERE name = %s and" + " email = %s".format(cls._table)) + conn_handler = SQLConnectionHandler() + spid = conn_handler.execute_fetchone(sql, (name, email)) + + # Doesn't exist so insert new person + else: + sql = ("INSERT INTO qiita.{0} (name, email, address, phone) VALUES" + " (%s, %s, %s, %s) RETURNING " + "study_person_id".format(cls._table)) + conn_handler = SQLConnectionHandler() + spid = conn_handler.execute_fetchone(sql, (name, email, address, + phone)) + return cls(spid[0]) + + # Properties + @property + def name(self): + """Returns the name of the person + + Returns + ------- + str + Name of person + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT name FROM qiita.{0} WHERE " + "study_person_id = %s".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @property + def email(self): + """Returns the email of the person + + Returns + ------- + str + Email of person + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT email FROM qiita.{0} WHERE " + "study_person_id = %s".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @property + def address(self): + """Returns the address of the person + + Returns + ------- + str or None + address or None if no address in database + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT address FROM qiita.{0} WHERE study_person_id =" + " %s".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @address.setter + def address(self, value): + """Set/update the address of the person + + Parameters + ---------- + value : str + New address for person + """ + conn_handler = SQLConnectionHandler() + sql = ("UPDATE qiita.{0} SET address = %s WHERE " + "study_person_id = %s".format(self._table)) + conn_handler.execute(sql, (value, self._id)) + + @property + def phone(self): + """Returns the phone number of the person + + Returns + ------- + str or None + phone or None if no address in database + """ + conn_handler = SQLConnectionHandler() + sql = ("SELECT phone FROM qiita.{0} WHERE " + "study_person_id = %s".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @phone.setter + def phone(self, value): + """Set/update the phone number of the person + + Parameters + ---------- + value : str + New phone number for person + """ + conn_handler = SQLConnectionHandler() + sql = ("UPDATE qiita.{0} SET phone = %s WHERE " + "study_person_id = %s".format(self._table)) + conn_handler.execute(sql, (value, self._id)) diff --git a/qiita_db/support_files/initialize.sql b/qiita_db/support_files/initialize.sql new file mode 100644 index 000000000..11b464655 --- /dev/null +++ b/qiita_db/support_files/initialize.sql @@ -0,0 +1,44 @@ +-- Populate user_level table +INSERT INTO qiita.user_level (name, description) VALUES ('admin', 'Can access and do all the things'), ('dev', 'Can access all data and info about errors'), ('superuser', 'Can see all studies, can run analyses'), ('user', 'Can see own and public data, can run analyses'), ('unverified', 'Email not verified'), ('guest', 'Can view & download public data'); + +-- Populate analysis_status table +INSERT INTO qiita.analysis_status (status) VALUES ('in_construction'), ('queued'), ('running'), ('completed'), ('error'), ('public'); + +-- Populate job_status table +INSERT INTO qiita.job_status (status) VALUES ('queued'), ('running'), ('completed'), ('error'); + +-- Populate data_type table +INSERT INTO qiita.data_type (data_type) VALUES ('16S'), ('18S'), ('ITS'), ('Proteomic'), ('Metabolomic'), ('Metagenomic'); + +-- Populate filetype table +INSERT INTO qiita.filetype (type) VALUES ('FASTA'), ('FASTQ'), ('SPECTRA'); + +-- Populate emp_status table +INSERT INTO qiita.emp_status (emp_status) VALUES ('EMP'), ('EMP_Processed'), ('NOT_EMP'); + +-- Populate study_status table +INSERT INTO qiita.study_status (status, description) VALUES ('waiting_approval', 'Awaiting approval of metadata'), ('public', 'Anyone can see this study'), ('private', 'Only owner and shared users can see this study'); + +-- Populate timeseries_type table +INSERT INTO qiita.timeseries_type (timeseries_type) VALUES ('NOT_TIMESERIES'), ('TIMESERIES_1'), ('TIMESERIES_2'); + +-- Populate severity table +INSERT INTO qiita.severity (severity) VALUES ('Warning'), ('Runtime'), ('Fatal'); + +-- Populate portal_type table +INSERT INTO qiita.portal_type (portal, description) VALUES ('QIIME', 'QIIME portal'), ('EMP', 'EMP portal'), ('QIIME_EMP', 'QIIME and EMP portals'); + +-- Populate sample_status table +INSERT INTO qiita.required_sample_info_status (status) VALUES ('received'), ('in_preparation'), ('running'), ('completed'); + +-- Populate filepath_type table +INSERT INTO qiita.filepath_type (filepath_type) VALUES ('raw_sequences'), ('raw_barcodes'), ('raw_spectra'), ('preprocessed_sequences'), ('preprocessed_sequences_qual'), ('biom'), ('directory'), ('plain_text'); + +-- Populate checksum_algorithm table +INSERT INTO qiita.checksum_algorithm (name) VALUES ('crc32'); + +-- Populate commands available +INSERT INTO qiita.command (name, command, input, required, optional, output) VALUES +('Summarize Taxa', 'summarize_taxa_through_plots.py', '{"--otu_table_fp":null}', '{}', '{"--mapping_category":null, "--mapping_fp":null,"--sort":null}', '{"--output_dir":null}'), +('Beta Diversity', 'beta_diversity_through_plots.py', '{"--otu_table_fp":null,"--mapping_fp":null}', '{}', '{"--tree_fp":null,"--color_by_all_fields":null,"--seqs_per_sample":null}', '{"--output_dir":null}'), +('Alpha Rarefaction', 'alpha_rarefaction.py', '{"--otu_table_fp":null,"--mapping_fp":null}', '{}', '{"--tree_fp":null,"--num_steps":null,"--min_rare_depth":null,"--max_rare_depth":null,"--retain_intermediate_files":false}', '{"--output_dir":null}'); diff --git a/qiita_db/support_files/populate_test_db.sql b/qiita_db/support_files/populate_test_db.sql new file mode 100644 index 000000000..275fdec6e --- /dev/null +++ b/qiita_db/support_files/populate_test_db.sql @@ -0,0 +1,348 @@ +-- Insert some users in the system. Passwords are 'password' for all users +INSERT INTO qiita.qiita_user (email, user_level_id, password, name, + affiliation, address, phone) VALUES + ('test@foo.bar', 4, + '$2a$12$gnUi8Qg.0tvW243v889BhOBhWLIHyIJjjgaG6dxuRJkUM8nXG9Efe', 'Dude', + 'Nowhere University', '123 fake st, Apt 0, Faketown, CO 80302', + '111-222-3344'), + ('shared@foo.bar', 4, + '$2a$12$gnUi8Qg.0tvW243v889BhOBhWLIHyIJjjgaG6dxuRJkUM8nXG9Efe', 'Shared', + 'Nowhere University', '123 fake st, Apt 0, Faketown, CO 80302', + '111-222-3344'), + ('admin@foo.bar', 4, + '$2a$12$gnUi8Qg.0tvW243v889BhOBhWLIHyIJjjgaG6dxuRJkUM8nXG9Efe', 'Admin', + 'Owner University', '312 noname st, Apt K, Nonexistantown, CO 80302', + '222-444-6789'), + ('demo@microbio.me', 4, + '$2a$12$gnUi8Qg.0tvW243v889BhOBhWLIHyIJjjgaG6dxuRJkUM8nXG9Efe', 'Demo', + 'Qitta Dev', '1345 Colorado Avenue', '303-492-1984'); + +-- Insert some study persons +INSERT INTO qiita.study_person (name, email, address, phone) VALUES + ('LabDude', 'lab_dude@foo.bar', '123 lab street', '121-222-3333'), + ('empDude', 'emp_dude@foo.bar', NULL, '444-222-3333'), + ('PIDude', 'PI_dude@foo.bar', '123 PI street', NULL); + +-- Insert a study: EMP 1001 +INSERT INTO qiita.study (email, study_status_id, emp_person_id, first_contact, + funding, timeseries_type_id, lab_person_id, metadata_complete, + mixs_compliant, most_recent_contact, number_samples_collected, + number_samples_promised, portal_type_id, principal_investigator_id, reprocess, + spatial_series, study_title, study_alias, study_description, + study_abstract, vamps_id) VALUES + ('test@foo.bar', 2, 2, '2014-05-19 16:10', NULL, 1, 1, TRUE, TRUE, + '2014-05-19 16:11', 27, 27, 2, 3, FALSE, FALSE, + 'Identification of the Microbiomes for Cannabis Soils', 'Cannabis Soils', 'Analysis of the Cannabis Plant Microbiome', + 'This is a preliminary study to examine the microbiota associated with the Cannabis plant. Soils samples from the bulk soil, soil associated with the roots, and the rhizosphere were extracted and the DNA sequenced. Roots from three independent plants of different strains were examined. These roots were obtained November 11, 2011 from plants that had been harvested in the summer. Future studies will attempt to analyze the soils and rhizospheres from the same location at different time points in the plant lifecycle.', + NULL); + +-- Insert study_users (share study 1 with shared user) +INSERT INTO qiita.study_users (study_id, email) VALUES (1, 'shared@foo.bar'); +INSERT INTO qiita.study_users (study_id, email) VALUES (1, 'demo@microbio.me'); + +-- Insert PMIDs for study +INSERT INTO qiita.study_pmid (study_id, pmid) VALUES (1, '123456'), (1, '7891011'); + +-- Insert an investigation +INSERT INTO qiita.investigation (name, description, contact_person_id) VALUES + ('TestInvestigation', 'An investigation for testing purposes', 3); + +-- Insert investigation_study (link study 1 with investigation 1) +INSERT INTO qiita.investigation_study (investigation_id, study_id) VALUES (1, 1); + +-- Insert the study experimental factor for study 1 +INSERT INTO qiita.study_experimental_factor (study_id, efo_id) VALUES (1, 1); + +-- Insert the raw data filepaths for study 1 +INSERT INTO qiita.filepath (filepath, filepath_type_id, checksum, checksum_algorithm_id) VALUES ('1_s_G1_L001_sequences.fastq.gz', 1, '852952723', 1), ('1_s_G1_L001_sequences_barcodes.fastq.gz', 2, '852952723', 1), ('2_sequences.fastq.gz', 1, '852952723', 1), ('2_sequences_barcodes.fastq.gz', 2, '852952723', 1); + +-- Insert the raw data information for study 1 +INSERT INTO qiita.raw_data (filetype_id) VALUES (2), (2); + +-- Insert (link) the raw data with the raw filepaths +INSERT INTO qiita.raw_filepath (raw_data_id, filepath_id) VALUES (1, 1), (1, 2), (2, 3), (2, 4); + +-- Insert (link) the study with the raw data +INSERT INTO qiita.study_raw_data (study_id, raw_data_id) VALUES (1, 1), (1, 2); + +-- Add the required_sample_info for study 1 +INSERT INTO qiita.required_sample_info (study_id, sample_id, physical_location, has_physical_specimen, has_extracted_data, sample_type, required_sample_info_status_id, collection_timestamp, host_subject_id, description) VALUES + (1, 'SKB8.640193', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M7', 'Cannabis Soil Microbiome'), + (1, 'SKD8.640184', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D9', 'Cannabis Soil Microbiome'), + (1, 'SKB7.640196', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M8', 'Cannabis Soil Microbiome'), + (1, 'SKM9.640192', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B8', 'Cannabis Soil Microbiome'), + (1, 'SKM4.640180', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D2', 'Cannabis Soil Microbiome'), + (1, 'SKM5.640177', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M3', 'Cannabis Soil Microbiome'), + (1, 'SKB5.640181', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M4', 'Cannabis Soil Microbiome'), + (1, 'SKD6.640190', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B9', 'Cannabis Soil Microbiome'), + (1, 'SKB2.640194', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B4', 'Cannabis Soil Microbiome'), + (1, 'SKD2.640178', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B5', 'Cannabis Soil Microbiome'), + (1, 'SKM7.640188', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B6', 'Cannabis Soil Microbiome'), + (1, 'SKB1.640202', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M2', 'Cannabis Soil Microbiome'), + (1, 'SKD1.640179', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M5', 'Cannabis Soil Microbiome'), + (1, 'SKD3.640198', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B1', 'Cannabis Soil Microbiome'), + (1, 'SKM8.640201', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D8', 'Cannabis Soil Microbiome'), + (1, 'SKM2.640199', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D4', 'Cannabis Soil Microbiome'), + (1, 'SKB9.640200', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B3', 'Cannabis Soil Microbiome'), + (1, 'SKD5.640186', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M1', 'Cannabis Soil Microbiome'), + (1, 'SKM3.640197', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B7', 'Cannabis Soil Microbiome'), + (1, 'SKD9.640182', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D3', 'Cannabis Soil Microbiome'), + (1, 'SKB4.640189', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D7', 'Cannabis Soil Microbiome'), + (1, 'SKD7.640191', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D6', 'Cannabis Soil Microbiome'), + (1, 'SKM6.640187', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:B2', 'Cannabis Soil Microbiome'), + (1, 'SKD4.640185', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M9', 'Cannabis Soil Microbiome'), + (1, 'SKB3.640195', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:M6', 'Cannabis Soil Microbiome'), + (1, 'SKB6.640176', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D5', 'Cannabis Soil Microbiome'), + (1, 'SKM1.640183', 'ANL', TRUE, TRUE, 'ENVO:soil', 4, '2011-11-11 13:00', '1001:D1', 'Cannabis Soil Microbiome'); + +-- Add the study sample columns for study 1 +INSERT INTO qiita.study_sample_columns (study_id, column_name, column_type) VALUES + (1, 'sample_id', 'varchar'), + (1, 'SEASON_ENVIRONMENT', 'varchar'), + (1, 'ASSIGNED_FROM_GEO', 'varchar'), + (1, 'TEXTURE', 'varchar'), + (1, 'TAXON_ID', 'varchar'), + (1, 'DEPTH', 'float8'), + (1, 'HOST_TAXID', 'varchar'), + (1, 'COMMON_NAME', 'varchar'), + (1, 'WATER_CONTENT_SOIL', 'float8'), + (1, 'ELEVATION', 'float8'), + (1, 'TEMP', 'float8'), + (1, 'TOT_NITRO', 'float8'), + (1, 'SAMP_SALINITY', 'float8'), + (1, 'ALTITUDE', 'float8'), + (1, 'ENV_BIOME', 'varchar'), + (1, 'COUNTRY', 'varchar'), + (1, 'PH', 'float8'), + (1, 'ANONYMIZED_NAME', 'varchar'), + (1, 'TOT_ORG_CARB', 'float8'), + (1, 'LONGITUDE', 'float8'), + (1, 'Description_duplicate', 'varchar'), + (1, 'ENV_FEATURE', 'varchar'), + (1, 'LATITUDE', 'float8'); + +-- Crate the sample_1 dynamic table +CREATE TABLE qiita.sample_1 ( + sample_id varchar, + SEASON_ENVIRONMENT varchar, + ASSIGNED_FROM_GEO varchar, + TEXTURE varchar, + TAXON_ID varchar, + DEPTH float8, + HOST_TAXID varchar, + COMMON_NAME varchar, + WATER_CONTENT_SOIL float8, + ELEVATION float8, + TEMP float8, + TOT_NITRO float8, + SAMP_SALINITY float8, + ALTITUDE float8, + ENV_BIOME varchar, + COUNTRY varchar, + PH float8, + ANONYMIZED_NAME varchar, + TOT_ORG_CARB float8, + LONGITUDE float8, + Description_duplicate varchar, + ENV_FEATURE varchar, + LATITUDE float8, + CONSTRAINT pk_sample_1 PRIMARY KEY ( sample_id ), + CONSTRAINT fk_sample_1_sample_id FOREIGN KEY (sample_id) REFERENCES qiita.required_sample_info( sample_id ) +); + +-- Populates the sample_1 dynamic table +INSERT INTO qiita.sample_1 (sample_id, SEASON_ENVIRONMENT, ASSIGNED_FROM_GEO, TEXTURE, TAXON_ID, DEPTH, HOST_TAXID, COMMON_NAME, WATER_CONTENT_SOIL, ELEVATION, TEMP, TOT_NITRO, SAMP_SALINITY, ALTITUDE, ENV_BIOME, COUNTRY, PH, ANONYMIZED_NAME, TOT_ORG_CARB, LONGITUDE, Description_duplicate, ENV_FEATURE, LATITUDE) VALUES + ('SKM7.640188', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '1118232', 0.15, '3483', 'root metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM7', 3.31, -117.241111, 'Bucu Roots', 'ENVO:plant-associated habitat', 33.193611), + ('SKD9.640182', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '1118232', 0.15, '3483', 'root metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKD9', 4.32, -117.241111, 'Diesel Root', 'ENVO:plant-associated habitat', 33.193611), + ('SKM8.640201', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '1118232', 0.15, '3483', 'root metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM8', 3.31, -117.241111, 'Bucu Roots', 'ENVO:plant-associated habitat', 33.193611), + ('SKB8.640193', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '1118232', 0.15, '3483', 'root metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.94, 'SKB8', 5, -117.241111, 'Burmese root', 'ENVO:plant-associated habitat', 33.193611), + ('SKD2.640178', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '410658', 0.15, '3483', 'soil metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKD2', 4.32, -117.241111, 'Diesel bulk', 'ENVO:plant-associated habitat', 33.193611), + ('SKM3.640197', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '410658', 0.15, '3483', 'soil metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM3', 3.31, -117.241111, 'Bucu bulk', 'ENVO:plant-associated habitat', 33.193611), + ('SKM4.640180', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM4', 3.31, -117.241111, 'Bucu Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKB9.640200', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '1118232', 0.15, '3483', 'root metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKB9', 5, -117.241111, 'Burmese root', 'ENVO:plant-associated habitat', 33.193611), + ('SKB4.640189', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.94, 'SKB4', 5, -117.241111, 'Burmese Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKB5.640181', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.94, 'SKB5', 5, -117.241111, 'Burmese Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKB6.640176', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.94, 'SKB6', 5, -117.241111, 'Burmese Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKM2.640199', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '410658', 0.15, '3483', 'soil metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM2', 3.31, -117.241111, 'Bucu bulk', 'ENVO:plant-associated habitat', 33.193611), + ('SKM5.640177', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM5', 3.31, -117.241111, 'Bucu Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKB1.640202', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '410658', 0.15, '3483', 'soil metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.94, 'SKB1', 5, -117.241111, 'Burmese bulk', 'ENVO:plant-associated habitat', 33.193611), + ('SKD8.640184', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '1118232', 0.15, '3483', 'root metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKD8', 4.32, -117.241111, 'Diesel Root', 'ENVO:plant-associated habitat', 33.193611), + ('SKD4.640185', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKD4', 4.32, -117.241111, 'Diesel Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKB3.640195', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '410658', 0.15, '3483', 'soil metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.94, 'SKB3', 5, -117.241111, 'Burmese bulk', 'ENVO:plant-associated habitat', 33.193611), + ('SKM1.640183', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '410658', 0.15, '3483', 'soil metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM1', 3.31, -117.241111, 'Bucu bulk', 'ENVO:plant-associated habitat', 33.193611), + ('SKB7.640196', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '1118232', 0.15, '3483', 'root metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.94, 'SKB7', 5, -117.241111, 'Burmese root', 'ENVO:plant-associated habitat', 33.193611), + ('SKD3.640198', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '410658', 0.15, '3483', 'soil metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKD3', 4.32, -117.241111, 'Diesel bulk', 'ENVO:plant-associated habitat', 33.193611), + ('SKD7.640191', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '1118232', 0.15, '3483', 'root metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKD7', 4.32, -117.241111, 'Diesel Root', 'ENVO:plant-associated habitat', 33.193611), + ('SKD6.640190', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKD6', 4.32, -117.241111, 'Diesel Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKB2.640194', 'winter', 'n', '64.6 sand, 17.6 silt, 17.8 clay', '410658', 0.15, '3483', 'soil metagenome', 0.164, 114, 15, 1.41, 7.15, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.94, 'SKB2', 5, -117.241111, 'Burmese bulk', 'ENVO:plant-associated habitat', 33.193611), + ('SKM9.640192', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '1118232', 0.15, '3483', 'root metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM9', 3.31, -117.241111, 'Bucu Roots', 'ENVO:plant-associated habitat', 33.193611), + ('SKM6.640187', 'winter', 'n', '63.1 sand, 17.7 silt, 19.2 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.101, 114, 15, 1.3, 7.44, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.82, 'SKM6', 3.31, -117.241111, 'Bucu Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKD5.640186', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '939928', 0.15, '3483', 'rhizosphere metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKD5', 4.32, -117.241111, 'Diesel Rhizo', 'ENVO:plant-associated habitat', 33.193611), + ('SKD1.640179', 'winter', 'n', '66 sand, 16.3 silt, 17.7 clay', '410658', 0.15, '3483', 'soil metagenome', 0.178, 114, 15, 1.51, 7.1, 0, 'ENVO:Temperate grasslands, savannas, and shrubland biome', 'GAZ:United States of America', 6.8, 'SKD1', 4.32, -117.241111, 'Diesel bulk', 'ENVO:plant-associated habitat', 33.193611); + +-- Add the common prep info for study 1 +INSERT INTO qiita.common_prep_info (raw_data_id, sample_id, center_name, center_project_name, ebi_submission_accession, ebi_study_accession, emp_status_id, data_type_id) VALUES + (1, 'SKB8.640193', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD8.640184', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKB7.640196', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM9.640192', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM4.640180', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM5.640177', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKB5.640181', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD6.640190', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKB2.640194', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD2.640178', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM7.640188', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKB1.640202', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD1.640179', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD3.640198', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM8.640201', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM2.640199', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKB9.640200', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD5.640186', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM3.640197', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD9.640182', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKB4.640189', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD7.640191', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM6.640187', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKD4.640185', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKB3.640195', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKB6.640176', 'ANL', NULL, NULL, NULL, 1, 2), + (1, 'SKM1.640183', 'ANL', NULL, NULL, NULL, 1, 2); + +-- Add raw data prep columns +INSERT INTO qiita.raw_data_prep_columns (raw_data_id, column_name, column_type) VALUES + (1, 'sample_id', 'varchar'), + (1, 'BarcodeSequence', 'varchar'), + (1, 'LIBRARY_CONSTRUCTION_PROTOCOL', 'varchar'), + (1, 'LinkerPrimerSequence', 'varchar'), + (1, 'TARGET_SUBFRAGMENT', 'varchar'), + (1, 'target_gene', 'varchar'), + (1, 'RUN_CENTER', 'varchar'), + (1, 'RUN_PREFIX', 'varchar'), + (1, 'RUN_DATE', 'varchar'), + (1, 'EXPERIMENT_CENTER', 'varchar'), + (1, 'EXPERIMENT_DESIGN_DESCRIPTION', 'varchar'), + (1, 'EXPERIMENT_TITLE', 'varchar'), + (1, 'PLATFORM', 'varchar'), + (1, 'SAMP_SIZE', 'varchar'), + (1, 'SEQUENCING_METH', 'varchar'), + (1, 'illumina_technology', 'varchar'), + (1, 'SAMPLE_CENTER', 'varchar'), + (1, 'pcr_primers', 'varchar'), + (1, 'STUDY_CENTER', 'varchar'); + +-- Crate the prep_1 dynamic table +CREATE TABLE qiita.prep_1 ( + sample_id varchar, + BarcodeSequence varchar, + LIBRARY_CONSTRUCTION_PROTOCOL varchar, + LinkerPrimerSequence varchar, + TARGET_SUBFRAGMENT varchar, + target_gene varchar, + RUN_CENTER varchar, + RUN_PREFIX varchar, + RUN_DATE varchar, + EXPERIMENT_CENTER varchar, + EXPERIMENT_DESIGN_DESCRIPTION varchar, + EXPERIMENT_TITLE varchar, + PLATFORM varchar, + SAMP_SIZE varchar, + SEQUENCING_METH varchar, + illumina_technology varchar, + SAMPLE_CENTER varchar, + pcr_primers varchar, + STUDY_CENTER varchar, + CONSTRAINT pk_prep_1 PRIMARY KEY ( sample_id ), + CONSTRAINT fk_prep_1_sample_id FOREIGN KEY (sample_id) REFERENCES qiita.required_sample_info( sample_id ) +); + +-- Populates the prep_1 dynamic table +INSERT INTO qiita.prep_1 (sample_id, BarcodeSequence, LIBRARY_CONSTRUCTION_PROTOCOL, LinkerPrimerSequence, TARGET_SUBFRAGMENT, target_gene, RUN_CENTER, RUN_PREFIX, RUN_DATE, EXPERIMENT_CENTER, EXPERIMENT_DESIGN_DESCRIPTION, EXPERIMENT_TITLE, PLATFORM, SAMP_SIZE, SEQUENCING_METH, illumina_technology, SAMPLE_CENTER, pcr_primers, STUDY_CENTER) VALUES + ('SKB1.640202', 'GTCCGCAAGTTA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKB2.640194', 'CGTAGAGCTCTC', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKB3.640195', 'CCTCTGAGAGCT', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKB4.640189', 'CCTCGATGCAGT', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKB5.640181', 'GCGGACTATTCA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKB6.640176', 'CGTGCACAATTG', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKB7.640196', 'CGGCCTAAGTTC', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKB8.640193', 'AGCGCTCACATC', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKB9.640200', 'TGGTTATGGCAC', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD1.640179', 'CGAGGTTCTGAT', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD2.640178', 'AACTCCTGTGGA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD3.640198', 'TAATGGTCGTAG', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD4.640185', 'TTGCACCGTCGA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD5.640186', 'TGCTACAGACGT', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD6.640190', 'ATGGCCTGACTA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD7.640191', 'ACGCACATACAA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD8.640184', 'TGAGTGGTCTGT', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKD9.640182', 'GATAGCACTCGT', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM1.640183', 'TAGCGCGAACTT', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM2.640199', 'CATACACGCACC', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM3.640197', 'ACCTCAGTCAAG', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM4.640180', 'TCGACCAAACAC', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM5.640177', 'CCACCCAGTAAC', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM6.640187', 'ATATCGCGATGA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM7.640188', 'CGCCGGTAATCT', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM8.640201', 'CCGATGCCTTGA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'), + ('SKM9.640192', 'AGCAGGCACGAA', 'This analysis was done as in Caporaso et al 2011 Genome research. The PCR primers (F515/R806) were developed against the V4 region of the 16S rRNA (both bacteria and archaea), which we determined would yield optimal community clustering with reads of this length using a procedure similar to that of ref. 15. [For reference, this primer pair amplifies the region 533_786 in the Escherichia coli strain 83972 sequence (greengenes accession no. prokMSA_id:470367).] The reverse PCR primer is barcoded with a 12-base error-correcting Golay code to facilitate multiplexing of up to 1,500 samples per lane, and both PCR primers contain sequencer adapter regions.', 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', 's_G1_L001_sequences', '8/1/12', 'ANL', 'micro biome of soil and rhizosphere of cannabis plants from CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', 'Sequencing by synthesis', 'MiSeq', 'ANL', 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'); + +-- Insert preprocessed information for raw data 1 +INSERT INTO qiita.preprocessed_data (preprocessed_params_table, preprocessed_params_id, submitted_to_insdc) VALUES ('preprocessed_sequence_illumina_params', 1, TRUE), ('preprocessed_sequence_illumina_params', 2, FALSE); + +-- Link the new preprocessed data with the raw data +INSERT INTO qiita.raw_preprocessed_data (raw_data_id, preprocessed_data_id) VALUES (1, 1), (1, 2); + +-- Insert (link) preprocessed information to study 1 +INSERT INTO qiita.study_preprocessed_data (preprocessed_data_id, study_id) VALUES (1, 1), (2, 1); + +-- Insert the preprocessed filepath for raw data 1 +INSERT INTO qiita.filepath (filepath, filepath_type_id, checksum, checksum_algorithm_id) VALUES ('1_seqs.fna', 4, '852952723', 1), ('1_seqs.qual', 5, '852952723', 1); + +-- Insert (link) the preprocessed data with the preprocessed filepaths +INSERT INTO qiita.preprocessed_filepath (preprocessed_data_id, filepath_id) VALUES (1, 5), (1, 6); + +-- Insert the preprocessed illumina params used for raw data 1 +INSERT INTO qiita.preprocessed_sequence_illumina_params (trim_length) VALUES (151), (100); + +-- Insert processed information for study 0 and processed data 1 +INSERT INTO qiita.processed_data (processed_params_table, processed_params_id, processed_date) VALUES ('processed_params_uclust', 1, 'Mon Oct 1 09:30:27 2012'); + +-- Link the processed data with the preprocessed data +INSERT INTO qiita.preprocessed_processed_data (preprocessed_data_id, processed_data_id) VALUES (1, 1); + +-- Populate the reference table +INSERT INTO qiita.reference (reference_name, reference_version, sequence_filepath, taxonomy_filepath, tree_filepath) VALUES ('GreenGenes', '4feb2011', 'gg_97_otus_4feb2011.fasta', 'greengenes_tax.txt', 'gg_97_otus_4feb2011.tre'); + +-- Insert the processed params uclust used for preprocessed data 1 +INSERT INTO qiita.processed_params_uclust (similarity, enable_rev_strand_match, suppress_new_clusters, reference_id) VALUES (0.97, TRUE, TRUE, 1); + +-- Insert the biom table filepath for processed data 1 +INSERT INTO qiita.filepath (filepath, filepath_type_id, checksum, checksum_algorithm_id) VALUES ('1_study_1001_closed_reference_otu_table.biom', 6, '852952723', 1); + +-- Insert (link) the processed data with the processed filepath +INSERT INTO qiita.processed_filepath (processed_data_id, filepath_id) VALUES (1, 7); + +-- Insert filepath for job results files +INSERT INTO qiita.filepath (filepath, filepath_type_id, checksum, checksum_algorithm_id) VALUES ('1_job_result.txt', 8, '852952723', 1), ('2_test_folder', 7, '852952723', 1); + +-- Insert jobs +INSERT INTO qiita.job (data_type_id, job_status_id, command_id, options) VALUES (1, 1, 1, '{"--otu_table_fp":1}'), (1, 3, 2, '{"--otu_table_fp":1,"--mapping_fp":1}'), (1, 1, 2, '{"--otu_table_fp":1,"--mapping_fp":1}'); + +-- Insert Analysis +INSERT INTO qiita.analysis (email, name, description, analysis_status_id, pmid) VALUES ('test@foo.bar', 'SomeAnalysis', 'A test analysis', 1, '121112'), ('test@foo.bar', 'SomeSecondAnalysis', 'Another test analysis', 1, '22221112'); + +-- Attach jobs to analysis +INSERT INTO qiita.analysis_job (analysis_id, job_id) VALUES (1, 1), (1, 2), (2, 3); + +-- Attach filepath to analysis +INSERT INTO qiita.analysis_filepath (analysis_id, filepath_id) VALUES (1, 7); + +-- Attach samples to analysis +INSERT INTO qiita.analysis_sample (analysis_id, processed_data_id, sample_id) VALUES (1,1,'SKB8.640193'), (1,1,'SKD8.640184'), (1,1,'SKB7.640196'), (1,1,'SKM9.640192'), (1,1,'SKM4.640180'), (2,1,'SKB8.640193'), (2,1,'SKD8.640184'), (2,1,'SKB7.640196'); + +--Share analysis with shared user +INSERT INTO qiita.analysis_users (analysis_id, email) VALUES (1, 'shared@foo.bar'); + +-- Add job results +INSERT INTO qiita.job_results_filepath (job_id, filepath_id) VALUES (1, 8), (2, 9); diff --git a/qiita_db/support_files/qiita-db-settings.sql b/qiita_db/support_files/qiita-db-settings.sql new file mode 100644 index 000000000..091608dfe --- /dev/null +++ b/qiita_db/support_files/qiita-db-settings.sql @@ -0,0 +1,5 @@ +CREATE TABLE settings ( + test bool DEFAULT True NOT NULL, + base_data_dir varchar NOT NULL, + base_work_dir varchar NOT NULL + ); \ No newline at end of file diff --git a/qiita_db/support_files/qiita-db.dbs b/qiita_db/support_files/qiita-db.dbs new file mode 100644 index 000000000..945e5105e --- /dev/null +++ b/qiita_db/support_files/qiita-db.dbs @@ -0,0 +1,1379 @@ + + + Qiita-DB layout + + + hHolds analysis information + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + Keeps track of the chain of analysis edits. Tracks what previous analysis a given analysis came from. +If a given analysis is not in child_id, it is the root of the chain. + + + + + + + + + + + + + + + + + + +
+ + Stores link between analysis and the data file used for the analysis. + + + + + + + + + + + + + + + + + + +
+ + Holds information for a one-to-many relation of analysis to the jobs in it + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + Links analyses to the users they are shared with + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + +
+ + + + + + +
+ + Table relates a column with a controlled vocabulary. + + + + + + + + + + + + + + + + + + +
+ + This table relates a column with an ontology. + + + + + + + + + + + + + + +
+ + Available commands for jobs + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + +
+ + + + + + + + + + + + + + + + +
+ + All possible statuses for projects relating to EMP. Whether they are part of, processed in accordance to, or not part of EMP. + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + Type of file (FASTA, FASTQ, SPECTRA, etc) + + + + + +
+ + Overarching investigation information. +An investigation comprises one or more individual studies. + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + Holds connection between jobs and the result filepaths + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+ + + + + + + + + + + + + +
+ + What portals are available to show a study in + + + + + + +
+ + Information on how raw data y was prepared (prep template) +Linked by y being raw_data_id from raw data table. + + + + + + + +
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + Parameters used for processing sequence data. + + + + + +
+ + Parameters used for processing illumina sequence data. + + + + 3 + + + 0.75 + + + 0 + + + + +
+ + Parameters used for processing spectra data. + + + + + +
+ + + + + + + + + + + + +
+ + + + + + + + + + + + + +
+ + Parameters used for processing data using method uclust + + + + + + 0.97 + + + TRUE + + + TRUE + + + + + + + + + + +
+ + Holds all user information + + + 5 + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + +
+ + Holds the columns available for a given raw data prep + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+ + + + + + +
+ + Required info for each sample. One row is one sample. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + data for samples in study x (sample template) +x is the study_id from study table + +MAKE SURE sample_id IS FK TO sample_id IN required_sample_info TABLE + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + EFO ontological link of experimental factors to studies + + + + + + + + + + + + +
+ + Contact information for the various people involved in a study + + + + + + + + +
+ + Links a study to all PMIDs for papers created from study + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + links study to its raw data + + + + + + + + + + + + + + + +
+ + Holds information on which metadata columns are available for the study sample template + + + + + + + + + + + + + + +
+ + + + + + + +
+ + Links shared studies to users they are shared with + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + 'false' + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + Holds available user levels + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + analysis tables + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
diff --git a/qiita_db/support_files/qiita-db.html b/qiita_db/support_files/qiita-db.html new file mode 100644 index 000000000..b335a021f --- /dev/null +++ b/qiita_db/support_files/qiita-db.html @@ -0,0 +1,4612 @@ + + + + +Database Documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Project qiita + Layout qiita + Tip: Move the mouse over columns to read the comments + Generated using DbSchema + + + + + +Group_analyses + + + + +Group_users + + + + +Group_study + + + + +Group_vocabularies + + + + +Group_ontology + + + + +Group_logging + + + + +Group_filepaths + + + + Foreign Key fk_term_synonym_term + term_synonym references term ( term_id ) + +term_id + Foreign Key fk_term_synonym_type_term + term_synonym references term ( synonym_id -> term_id ) + +synonym_id + Foreign Key fk_controlled_vocab_values + controlled_vocab_values references controlled_vocabularies ( controlled_vocab_id ) + +controlled_vocab_id + Foreign Key fk_investigation_study_person + investigation references study_person ( contact_person_id -> study_person_id ) + +contact_person_id + Foreign Key fk_investigation_study + investigation_study references investigation ( investigation_id ) + +investigation_id + Foreign Key fk_investigation_study_study + investigation_study references study ( study_id ) + +study_id + Foreign Key fk_study_experimental_factor + study_experimental_factor references study ( study_id ) + +study_id + Foreign Key fk_study_pmid_study + study_pmid references study ( study_id ) + +study_id + Foreign Key fk_analysis_user + analysis references qiita_user ( email ) + +email + Foreign Key fk_analysis_analysis_status + analysis references analysis_status ( analysis_status_id ) + +analysis_status_id + Foreign Key fk_analysis_filepath + analysis_filepath references analysis ( analysis_id ) + +analysis_id + Foreign Key fk_analysis_filepath_0 + analysis_filepath references filepath ( filepath_id ) + +filepath_id + Foreign Key fk_job_results_filepath + job_results_filepath references job ( job_id ) + +job_id + Foreign Key fk_job_results_filepath_0 + job_results_filepath references filepath ( filepath_id ) + +filepath_id + Foreign Key fk_job_function + job references command ( command_id ) + +command_id + Foreign Key fk_job_job_status_id + job references job_status ( job_status_id ) + +job_status_id + Foreign Key fk_job_data_type + job references data_type ( data_type_id ) + +data_type_id + Foreign Key fk_job + job references logging ( log_id -> logging_id ) + +log_id + Foreign Key fk_analysis_job_analysis + analysis_job references analysis ( analysis_id ) + +analysis_id + Foreign Key fk_analysis_job_job + analysis_job references job ( job_id ) + +job_id + Foreign Key fk_analysis_chain + analysis_chain references analysis ( parent_id -> analysis_id ) + +parent_id + Foreign Key fk_analysis_chain_0 + analysis_chain references analysis ( child_id -> analysis_id ) + +child_id + Foreign Key fk_analysis_sample_analysis + analysis_sample references analysis ( analysis_id ) + +analysis_id + Foreign Key fk_analysis_sample + analysis_sample references processed_data ( processed_data_id ) + +processed_data_id + Foreign Key fk_analysis_sample_0 + analysis_sample references required_sample_info ( sample_id ) + +sample_id + Foreign Key fk_column_controlled_vocabularies + column_controlled_vocabularies references mixs_field_description ( column_name ) + +column_name + Foreign Key fk_column_controlled_vocab2 + column_controlled_vocabularies references controlled_vocabularies ( controlled_vocab_id ) + +controlled_vocab_id + Foreign Key fk_column_ontology + column_ontology references mixs_field_description ( column_name ) + +column_name + Foreign Key fk_term_relationship_subj_term + term_relationship references term ( subject_term_id -> term_id ) + +subject_term_id + Foreign Key fk_term_relationship_pred_term + term_relationship references term ( predicate_term_id -> term_id ) + +predicate_term_id + Foreign Key fk_term_relationship_obj_term + term_relationship references term ( object_term_id -> term_id ) + +object_term_id + Foreign Key fk_term_relationship_ontology + term_relationship references ontology ( ontology_id ) + +ontology_id + Foreign Key fk_term_path_ontology + term_path references ontology ( ontology_id ) + +ontology_id + Foreign Key fk_term_path_relationship_type + term_path references relationship_type ( relationship_type_id ) + +relationship_type_id + Foreign Key fk_term_path_term_subject + term_path references term ( subject_term_id -> term_id ) + +subject_term_id + Foreign Key fk_term_path_term_predicate + term_path references term ( predicate_term_id -> term_id ) + +predicate_term_id + Foreign Key fk_term_path_term_object + term_path references term ( object_term_id -> term_id ) + +object_term_id + Foreign Key fk_term_ontology + term references ontology ( ontology_id ) + +ontology_id + Foreign Key fk_annotation_term + annotation references term ( term_id ) + +term_id + Foreign Key fk_dbxref_term + dbxref references term ( term_id ) + +term_id + Foreign Key fk_analysis_users_analysis + analysis_users references analysis ( analysis_id ) + +analysis_id + Foreign Key fk_analysis_users_user + analysis_users references qiita_user ( email ) + +email + Foreign Key fk_user_user_level + qiita_user references user_level ( user_level_id ) + +user_level_id + Foreign Key fk_filepath + filepath references filepath_type ( filepath_type_id ) + +filepath_type_id + Foreign Key fk_filepath_0 + filepath references checksum_algorithm ( checksum_algorithm_id ) + +checksum_algorithm_id + Foreign Key fk_study_preprocessed_data + study_preprocessed_data references study ( study_id ) + +study_id + Foreign Key fk_study_preprocessed_data_0 + study_preprocessed_data references preprocessed_data ( preprocessed_data_id ) + +preprocessed_data_id + Foreign Key fk_study_user + study references qiita_user ( email ) + +email + Foreign Key fk_study_study_status + study references study_status ( study_status_id ) + +study_status_id + Foreign Key fk_study_study_emp_person + study references study_person ( emp_person_id -> study_person_id ) + +emp_person_id + Foreign Key fk_study_study_lab_person + study references study_person ( lab_person_id -> study_person_id ) + +lab_person_id + Foreign Key fk_study_study_pi_person + study references study_person ( principal_investigator_id -> study_person_id ) + +principal_investigator_id + Foreign Key fk_study_timeseries_type + study references timeseries_type ( timeseries_type_id ) + +timeseries_type_id + Foreign Key fk_study + study references portal_type ( portal_type_id ) + +portal_type_id + Foreign Key fk_study_users_study + study_users references study ( study_id ) + +study_id + Foreign Key fk_study_users_user + study_users references qiita_user ( email ) + +email + Foreign Key fk_required_sample_info_study + required_sample_info references study ( study_id ) + +study_id + Foreign Key fk_required_sample_info + required_sample_info references required_sample_info_status ( required_sample_info_status_id ) + +required_sample_info_status_id + Foreign Key fk_study_mapping_columns_study + study_sample_columns references study ( study_id ) + +study_id + Foreign Key fk_required_prep_info_raw_data + common_prep_info references raw_data ( raw_data_id ) + +raw_data_id + Foreign Key fk_required_prep_info_emp_status + common_prep_info references emp_status ( emp_status_id ) + +emp_status_id + Foreign Key fk_required_prep_info + common_prep_info references required_sample_info ( sample_id ) + +sample_id + Foreign Key fk_required_prep_info_0 + common_prep_info references data_type ( data_type_id ) + +data_type_id + Foreign Key fk_prep_columns_raw_data + raw_data_prep_columns references raw_data ( raw_data_id ) + +raw_data_id + Foreign Key fk_raw_filepath + raw_filepath references filepath ( filepath_id ) + +filepath_id + Foreign Key fk_raw_filepath_0 + raw_filepath references raw_data ( raw_data_id ) + +raw_data_id + Foreign Key fk_preprocessed_processed_data + preprocessed_processed_data references preprocessed_data ( preprocessed_data_id ) + +preprocessed_data_id + Foreign Key fk_preprocessed_processed_data_0 + preprocessed_processed_data references processed_data ( processed_data_id ) + +processed_data_id + Foreign Key fk_study_raw_data_study + study_raw_data references study ( study_id ) + +study_id + Foreign Key fk_study_raw_data_raw_data + study_raw_data references raw_data ( raw_data_id ) + +raw_data_id + Foreign Key fk_raw_data_filetype + raw_data references filetype ( filetype_id ) + +filetype_id + Foreign Key fk_raw_preprocessed_data + raw_preprocessed_data references raw_data ( raw_data_id ) + +raw_data_id + Foreign Key fk_raw_preprocessed_data_0 + raw_preprocessed_data references preprocessed_data ( preprocessed_data_id ) + +preprocessed_data_id + Foreign Key fk_preprocessed_filepath + preprocessed_filepath references preprocessed_data ( preprocessed_data_id ) + +preprocessed_data_id + Foreign Key fk_preprocessed_filepath_0 + preprocessed_filepath references filepath ( filepath_id ) + +filepath_id + Foreign Key fk_processed_data_filepath + processed_filepath references processed_data ( processed_data_id ) + +processed_data_id + Foreign Key fk_processed_data_filepath_0 + processed_filepath references filepath ( filepath_id ) + +filepath_id + Foreign Key fk_logging_severity + logging references severity ( severity_id ) + +severity_id + Foreign Key fk_processed_params_uclust + processed_params_uclust references reference ( reference_id ) + +reference_id + Foreign Key fk_study_processed_data + study_processed_data references study ( study_id ) + +study_id + Foreign Key fk_study_processed_data_0 + study_processed_data references processed_data ( processed_data_id ) + +processed_data_id + + +term_synonymTable qiita.term_synonym + Primary Key ( synonym_id ) +synonym_idsynonym_id bigserial not null +References term ( synonym_id -> term_id ) + Index ( term_id ) +term_idterm_id bigint not null +References term ( term_id ) + synonym_valuesynonym_value varchar not null + synonym_type_idsynonym_type_id bigint not null + + + + +controlled_vocab_valuesTable qiita.controlled_vocab_values + Primary Key ( vocab_value_id ) +vocab_value_idvocab_value_id bigserial not null + Index ( controlled_vocab_id ) +controlled_vocab_idcontrolled_vocab_id bigint not null +References controlled_vocabularies ( controlled_vocab_id ) + termterm varchar not null + order_byorder_by varchar not null + default_itemdefault_item varchar + + + + +controlled_vocabulariesTable qiita.controlled_vocabularies + Primary Key ( controlled_vocab_id ) +controlled_vocab_idcontrolled_vocab_id bigserial not null +Referred by column_controlled_vocabularies ( controlled_vocab_id ) +Referred by controlled_vocab_values ( controlled_vocab_id ) + vocab_namevocab_name varchar not null + + + + +severityTable qiita.severity + Primary Key ( severity_id ) +severity_idseverity_id serial not null +Referred by logging ( severity_id ) + severityseverity varchar not null + + + + +study_personTable qiita.study_person +Contact information for the various people involved in a study + Primary Key ( study_person_id ) +study_person_idstudy_person_id bigserial not null +Referred by investigation ( contact_person_id -> study_person_id ) +Referred by study ( emp_person_id -> study_person_id ) +Referred by study ( lab_person_id -> study_person_id ) +Referred by study ( principal_investigator_id -> study_person_id ) + namename varchar not null + emailemail varchar not null + addressaddress varchar(100) + phonephone varchar + + + + +investigationTable qiita.investigation +Overarching investigation information.
An investigation comprises one or more individual studies.
+ Primary Key ( investigation_id ) +investigation_idinvestigation_id bigserial not null +Referred by investigation_study ( investigation_id ) + namename varchar not null + descriptiondescription varchar not null +Describes the overarching goal of the investigation + Index ( contact_person_id ) +contact_person_idcontact_person_id bigint +References study_person ( contact_person_id -> study_person_id ) + + + + +investigation_studyTable qiita.investigation_study + Primary Key ( investigation_id, study_id ) Index ( investigation_id ) +investigation_idinvestigation_id bigint not null +References investigation ( investigation_id ) + Primary Key ( investigation_id, study_id ) Index ( study_id ) +study_idstudy_id bigint not null +References study ( study_id ) + + + + +study_statusTable qiita.study_status + Primary Key ( study_status_id ) +study_status_idstudy_status_id bigserial not null +Referred by study ( study_status_id ) + statusstatus varchar not null + descriptiondescription varchar not null + + + + +study_experimental_factorTable qiita.study_experimental_factor +EFO ontological link of experimental factors to studies + Primary Key ( study_id, efo_id ) Index ( study_id ) +study_idstudy_id bigint not null +References study ( study_id ) + Primary Key ( study_id, efo_id ) +efo_idefo_id bigint not null + + + + +study_pmidTable qiita.study_pmid +Links a study to all PMIDs for papers created from study + Primary Key ( study_id, pmid ) Index ( study_id ) +study_idstudy_id bigint not null +References study ( study_id ) + Primary Key ( study_id, pmid ) +pmidpmid varchar not null + + + + +analysis_statusTable qiita.analysis_status + Primary Key ( analysis_status_id ) +analysis_status_idanalysis_status_id bigserial not null +Referred by analysis ( analysis_status_id ) + statusstatus varchar not null + + + + +analysisTable qiita.analysis +hHolds analysis information + Primary Key ( analysis_id ) +analysis_idanalysis_id bigserial not null +Unique identifier for analysis +Referred by analysis_chain ( parent_id -> analysis_id ) +Referred by analysis_chain ( child_id -> analysis_id ) +Referred by analysis_filepath ( analysis_id ) +Referred by analysis_job ( analysis_id ) +Referred by analysis_sample ( analysis_id ) +Referred by analysis_users ( analysis_id ) + Index ( email ) +emailemail varchar not null +Email for user who owns the analysis +References qiita_user ( email ) + namename varchar not null +Name of the analysis + descriptiondescription varchar not null + Index ( analysis_status_id ) +analysis_status_idanalysis_status_id bigint not null +References analysis_status ( analysis_status_id ) + pmidpmid varchar +PMID of paper from the analysis + + + + +analysis_filepathTable qiita.analysis_filepath +Stores link between analysis and the data file used for the analysis. + Index ( analysis_id ) Primary Key ( analysis_id, filepath_id ) +analysis_idanalysis_id bigint not null +References analysis ( analysis_id ) + Index ( filepath_id ) Primary Key ( analysis_id, filepath_id ) +filepath_idfilepath_id bigint not null +References filepath ( filepath_id ) + + + + +job_results_filepathTable qiita.job_results_filepath +Holds connection between jobs and the result filepaths + Primary Key ( job_id, filepath_id ) Index ( job_id ) +job_idjob_id bigint not null +References job ( job_id ) + Primary Key ( job_id, filepath_id ) Index ( filepath_id ) +filepath_idfilepath_id bigint not null +References filepath ( filepath_id ) + + + + +jobTable qiita.job + Primary Key ( job_id ) +job_idjob_id bigserial not null +Unique identifier for job +Referred by analysis_job ( job_id ) +Referred by job_results_filepath ( job_id ) + Index ( data_type_id ) +data_type_iddata_type_id bigint not null +What datatype (16s, metabolome, etc) job is run on. +References data_type ( data_type_id ) + Index ( job_status_id ) +job_status_idjob_status_id bigint not null +References job_status ( job_status_id ) + Index ( command_id ) +command_idcommand_id bigint not null +The Qiime or other function being run (alpha diversity, etc) +References command ( command_id ) + optionsoptions varchar +Holds all options set for the job as a json string + Index ( log_id ) +log_idlog_id bigint +Reference to error if status is error +References logging ( log_id -> logging_id ) + + + + +analysis_jobTable qiita.analysis_job +Holds information for a one-to-many relation of analysis to the jobs in it + Primary Key ( analysis_id, job_id ) Index ( analysis_id ) +analysis_idanalysis_id bigint not null +Id of the analysis +References analysis ( analysis_id ) + Primary Key ( analysis_id, job_id ) Index ( job_id ) +job_idjob_id bigint not null +Id for a job that is part of the analysis +References job ( job_id ) + + + + +analysis_chainTable qiita.analysis_chain +Keeps track of the chain of analysis edits. Tracks what previous analysis a given analysis came from.
If a given analysis is not in child_id, it is the root of the chain.
+ Index ( parent_id ) Primary Key ( parent_id, child_id ) +parent_idparent_id bigint not null +References analysis ( parent_id -> analysis_id ) + Index ( child_id ) Primary Key ( parent_id, child_id ) +child_idchild_id bigint not null +References analysis ( child_id -> analysis_id ) + + + + +job_statusTable qiita.job_status + Primary Key ( job_status_id ) +job_status_idjob_status_id bigserial not null +Referred by job ( job_status_id ) + statusstatus varchar not null + + + + +analysis_sampleTable qiita.analysis_sample + Index ( analysis_id ) +analysis_idanalysis_id bigint not null +References analysis ( analysis_id ) + Index ( processed_data_id ) +processed_data_idprocessed_data_id bigint not null +References processed_data ( processed_data_id ) + Index ( sample_id ) +sample_idsample_id varchar not null +References required_sample_info ( sample_id ) + + + + +column_controlled_vocabulariesTable qiita.column_controlled_vocabularies +Table relates a column with a controlled vocabulary. + Primary Key ( controlled_vocab_id, column_name ) Index ( controlled_vocab_id ) +controlled_vocab_idcontrolled_vocab_id bigserial not null +References controlled_vocabularies ( controlled_vocab_id ) + Primary Key ( controlled_vocab_id, column_name ) Index ( column_name ) +column_namecolumn_name varchar not null +References mixs_field_description ( column_name ) + + + + +mixs_field_descriptionTable qiita.mixs_field_description + Primary Key ( column_name ) +column_namecolumn_name varchar not null +Referred by column_controlled_vocabularies ( column_name ) +Referred by column_ontology ( column_name ) + data_typedata_type varchar not null + desc_or_valuedesc_or_value varchar not null + definitiondefinition varchar not null + min_lengthmin_length integer + activeactive integer not null + + + + +column_ontologyTable qiita.column_ontology +This table relates a column with an ontology. + Primary Key ( column_name, ontology_short_name ) Index ( column_name ) +column_namecolumn_name varchar not null +References mixs_field_description ( column_name ) + Primary Key ( column_name, ontology_short_name ) +ontology_short_nameontology_short_name varchar not null + bioportal_idbioportal_id integer not null + ontology_branch_idontology_branch_id integer not null + + + + +term_relationshipTable qiita.term_relationship + Primary Key ( term_relationship_id ) +term_relationship_idterm_relationship_id bigserial not null + Index ( subject_term_id ) +subject_term_idsubject_term_id bigint not null +References term ( subject_term_id -> term_id ) + Index ( predicate_term_id ) +predicate_term_idpredicate_term_id bigint not null +References term ( predicate_term_id -> term_id ) + Index ( object_term_id ) +object_term_idobject_term_id bigint not null +References term ( object_term_id -> term_id ) + Index ( ontology_id ) +ontology_idontology_id bigint not null +References ontology ( ontology_id ) + + + + +term_pathTable qiita.term_path + Primary Key ( term_path_id ) +term_path_idterm_path_id bigserial not null + Index ( subject_term_id ) +subject_term_idsubject_term_id bigint not null +References term ( subject_term_id -> term_id ) + Index ( predicate_term_id ) +predicate_term_idpredicate_term_id bigint not null +References term ( predicate_term_id -> term_id ) + Index ( object_term_id ) +object_term_idobject_term_id bigint not null +References term ( object_term_id -> term_id ) + Index ( ontology_id ) +ontology_idontology_id bigint not null +References ontology ( ontology_id ) + Index ( relationship_type_id ) +relationship_type_idrelationship_type_id integer not null +References relationship_type ( relationship_type_id ) + distancedistance integer + + + + +ontologyTable qiita.ontology + Primary Key ( ontology_id ) +ontology_idontology_id bigserial not null +Referred by term ( ontology_id ) +Referred by term_path ( ontology_id ) +Referred by term_relationship ( ontology_id ) + shortnameshortname varchar not null + fully_loadedfully_loaded bool not null + fullnamefullname varchar + query_urlquery_url varchar + source_urlsource_url varchar + definitiondefinition text + load_dateload_date date not null + versionversion varchar + + + + +termTable qiita.term + Primary Key ( term_id ) +term_idterm_id bigserial not null +Referred by annotation ( term_id ) +Referred by dbxref ( term_id ) +Referred by term_path ( subject_term_id -> term_id ) +Referred by term_path ( predicate_term_id -> term_id ) +Referred by term_path ( object_term_id -> term_id ) +Referred by term_relationship ( subject_term_id -> term_id ) +Referred by term_relationship ( predicate_term_id -> term_id ) +Referred by term_relationship ( object_term_id -> term_id ) +Referred by term_synonym ( term_id ) +Referred by term_synonym ( synonym_id -> term_id ) + Unique Index ( ontology_id ) +ontology_idontology_id bigint not null +References ontology ( ontology_id ) + term_nameterm_name varchar not null + identifieridentifier varchar + definitiondefinition varchar + namespacenamespace varchar + is_obsoleteis_obsolete bool not null default 'false' + is_root_termis_root_term bool not null + is_leafis_leaf bool not null + + + + +annotationTable qiita.annotation + Primary Key ( annotation_id ) +annotation_idannotation_id bigserial not null + Index ( term_id ) +term_idterm_id bigint not null +References term ( term_id ) + annotation_nameannotation_name varchar not null + annotation_num_valueannotation_num_value bigint + annotation_str_valueannotation_str_value varchar + + + + +dbxrefTable qiita.dbxref + Primary Key ( dbxref_id ) +dbxref_iddbxref_id bigserial not null + Index ( term_id ) +term_idterm_id bigint not null +References term ( term_id ) + dbnamedbname varchar not null + accessionaccession varchar not null + descriptiondescription varchar not null + xref_typexref_type varchar not null + + + + +relationship_typeTable qiita.relationship_type + Primary Key ( relationship_type_id ) +relationship_type_idrelationship_type_id bigserial not null +Referred by term_path ( relationship_type_id ) + relationship_typerelationship_type varchar not null + + + + +analysis_usersTable qiita.analysis_users +Links analyses to the users they are shared with + Primary Key ( analysis_id, email ) Index ( analysis_id ) +analysis_idanalysis_id bigint not null +References analysis ( analysis_id ) + Primary Key ( analysis_id, email ) Index ( email ) +emailemail varchar not null +References qiita_user ( email ) + + + + +user_levelTable qiita.user_level +Holds available user levels + Primary Key ( user_level_id ) +user_level_iduser_level_id serial not null +Referred by qiita_user ( user_level_id ) + namename varchar not null +One of the user levels (admin, user, guest, etc) + descriptiondescription text not null + + + + +qiita_userTable qiita.qiita_user +Holds all user information + Primary Key ( email ) +emailemail varchar not null +Referred by analysis ( email ) +Referred by analysis_users ( email ) +Referred by study ( email ) +Referred by study_users ( email ) + Index ( user_level_id ) +user_level_iduser_level_id integer not null default 5 +user level +References user_level ( user_level_id ) + passwordpassword varchar not null + namename varchar + affiliationaffiliation varchar + addressaddress varchar + phonephone varchar + user_verify_codeuser_verify_code varchar +Code for initial user email verification + pass_reset_codepass_reset_code varchar +Randomly generated code for password reset + pass_reset_timestamppass_reset_timestamp timestamp +Time the reset code was generated + + + + +filepath_typeTable qiita.filepath_type + Primary Key ( filepath_type_id ) +filepath_type_idfilepath_type_id bigserial not null +Referred by filepath ( filepath_type_id ) + filepath_typefilepath_type varchar + + + + +checksum_algorithmTable qiita.checksum_algorithm + Primary Key ( checksum_algorithm_id ) +checksum_algorithm_idchecksum_algorithm_id bigserial not null +Referred by filepath ( checksum_algorithm_id ) + namename varchar not null + + + + +data_typeTable qiita.data_type + Primary Key ( data_type_id ) +data_type_iddata_type_id bigserial not null +Referred by common_prep_info ( data_type_id ) +Referred by job ( data_type_id ) + data_typedata_type varchar not null +Data type (16S, metabolome, etc) the job will use + + + + +filepathTable qiita.filepath + Primary Key ( filepath_id ) +filepath_idfilepath_id bigserial not null +Referred by analysis_filepath ( filepath_id ) +Referred by job_results_filepath ( filepath_id ) +Referred by preprocessed_filepath ( filepath_id ) +Referred by processed_filepath ( filepath_id ) +Referred by raw_filepath ( filepath_id ) + filepathfilepath varchar not null + Index ( filepath_type_id ) +filepath_type_idfilepath_type_id bigint not null +References filepath_type ( filepath_type_id ) + checksumchecksum varchar not null + checksum_algorithm_idchecksum_algorithm_id bigint not null +References checksum_algorithm ( checksum_algorithm_id ) + + + + +study_preprocessed_dataTable qiita.study_preprocessed_data + Primary Key ( study_id, preprocessed_data_id ) Index ( study_id ) +study_idstudy_id bigint not null +References study ( study_id ) + Primary Key ( study_id, preprocessed_data_id ) Index ( preprocessed_data_id ) +preprocessed_data_idpreprocessed_data_id bigint not null +References preprocessed_data ( preprocessed_data_id ) + + + + +studyTable qiita.study + Primary Key ( study_id ) +study_idstudy_id bigserial not null +Unique name for study +Referred by investigation_study ( study_id ) +Referred by required_sample_info ( study_id ) +Referred by study_experimental_factor ( study_id ) +Referred by study_pmid ( study_id ) +Referred by study_preprocessed_data ( study_id ) +Referred by study_processed_data ( study_id ) +Referred by study_raw_data ( study_id ) +Referred by study_sample_columns ( study_id ) +Referred by study_users ( study_id ) + Index ( email ) +emailemail varchar not null +Email of study owner +References qiita_user ( email ) + Index ( study_status_id ) +study_status_idstudy_status_id bigint not null +References study_status ( study_status_id ) + Index ( emp_person_id ) +emp_person_idemp_person_id bigint +References study_person ( emp_person_id -> study_person_id ) + first_contactfirst_contact varchar not null + fundingfunding varchar + Index ( timeseries_type_id ) +timeseries_type_idtimeseries_type_id bigint not null +What type of timeseries this study is (or is not)
Controlled Vocabulary
+References timeseries_type ( timeseries_type_id ) + Index ( lab_person_id ) +lab_person_idlab_person_id bigint +References study_person ( lab_person_id -> study_person_id ) + metadata_completemetadata_complete bool not null + mixs_compliantmixs_compliant bool not null + most_recent_contactmost_recent_contact varchar + number_samples_collectednumber_samples_collected integer not null + number_samples_promisednumber_samples_promised integer not null + Index ( portal_type_id ) +portal_type_idportal_type_id bigint not null +References portal_type ( portal_type_id ) + Index ( principal_investigator_id ) +principal_investigator_idprincipal_investigator_id bigint not null +References study_person ( principal_investigator_id -> study_person_id ) + reprocessreprocess bool not null + spatial_seriesspatial_series bool + study_titlestudy_title varchar not null + study_aliasstudy_alias varchar not null + study_descriptionstudy_description text not null + study_abstractstudy_abstract text not null + vamps_idvamps_id varchar + + + + +study_usersTable qiita.study_users +Links shared studies to users they are shared with + Primary Key ( study_id, email ) Index ( study_id ) +study_idstudy_id bigint not null +References study ( study_id ) + Primary Key ( study_id, email ) Index ( email ) +emailemail varchar not null +References qiita_user ( email ) + + + + +required_sample_infoTable qiita.required_sample_info +Required info for each sample. One row is one sample. + Primary Key ( study_id, sample_id ) Index ( study_id ) +study_idstudy_id bigint not null +References study ( study_id ) + Primary Key ( study_id, sample_id ) Unique Index ( sample_id ) +sample_idsample_id varchar not null +Referred by analysis_sample ( sample_id ) +Referred by common_prep_info ( sample_id ) + physical_locationphysical_location varchar not null +Where the sample itself is stored + has_physical_specimenhas_physical_specimen bool not null +Whether we have the full speciment or just DNA + has_extracted_datahas_extracted_data bool not null + sample_typesample_type varchar not null +Controlled vocabulary of sample types + Index ( required_sample_info_status_id ) +required_sample_info_status_idrequired_sample_info_status_id bigint not null +What step of the pipeline the samples are in +References required_sample_info_status ( required_sample_info_status_id ) + collection_timestampcollection_timestamp timestamp not null + host_subject_idhost_subject_id varchar not null + descriptiondescription varchar not null + + + + +sample_xTable qiita.sample_x +data for samples in study x (sample template)
x is the study_id from study table

MAKE SURE sample_id IS FK TO sample_id IN required_sample_info TABLE
+ Primary Key ( sample_id ) +sample_idsample_id varchar not null + descriptiondescription varchar not null + other_mapping_columnsother_mapping_columns varchar +Represents whatever other columns go with this study + + + + +study_sample_columnsTable qiita.study_sample_columns +Holds information on which metadata columns are available for the study sample template + Primary Key ( study_id, column_name, column_type ) Index ( study_id ) +study_idstudy_id bigint not null +References study ( study_id ) + Primary Key ( study_id, column_name, column_type ) +column_namecolumn_name varchar(100) not null + Primary Key ( study_id, column_name, column_type ) +column_typecolumn_type varchar not null + + + + +required_sample_info_statusTable qiita.required_sample_info_status + Primary Key ( required_sample_info_status_id ) +required_sample_info_status_idrequired_sample_info_status_id bigserial not null +Referred by required_sample_info ( required_sample_info_status_id ) + statusstatus varchar + + + + +prep_yTable qiita.prep_y +Information on how raw data y was prepared (prep template)
Linked by y being raw_data_id from raw data table.
+ Primary Key ( sample_id ) +sample_idsample_id varchar not null + datadata bigint +STUFFFFF + + + + +common_prep_infoTable qiita.common_prep_info + Index ( raw_data_id ) Primary Key ( raw_data_id, sample_id ) +raw_data_idraw_data_id bigserial not null +References raw_data ( raw_data_id ) + Primary Key ( raw_data_id, sample_id ) Index ( sample_id ) +sample_idsample_id varchar not null +References required_sample_info ( sample_id ) + center_namecenter_name varchar + center_project_namecenter_project_name varchar + ebi_submission_accessionebi_submission_accession varchar + ebi_study_accessionebi_study_accession varchar + Index ( emp_status_id ) +emp_status_idemp_status_id bigint not null +References emp_status ( emp_status_id ) + Index ( data_type_id ) +data_type_iddata_type_id bigint not null +References data_type ( data_type_id ) + + + + +emp_statusTable qiita.emp_status +All possible statuses for projects relating to EMP. Whether they are part of, processed in accordance to, or not part of EMP. + Primary Key ( emp_status_id ) +emp_status_idemp_status_id bigserial not null +Referred by common_prep_info ( emp_status_id ) + emp_statusemp_status varchar not null + + + + +raw_data_prep_columnsTable qiita.raw_data_prep_columns +Holds the columns available for a given raw data prep + Primary Key ( raw_data_id, column_name, column_type ) Index ( raw_data_id ) +raw_data_idraw_data_id bigint not null +References raw_data ( raw_data_id ) + Primary Key ( raw_data_id, column_name, column_type ) +column_namecolumn_name varchar not null + Primary Key ( raw_data_id, column_name, column_type ) +column_typecolumn_type varchar not null + + + + +filetypeTable qiita.filetype +Type of file (FASTA, FASTQ, SPECTRA, etc) + Primary Key ( filetype_id ) +filetype_idfiletype_id bigserial not null +Referred by raw_data ( filetype_id ) + typetype varchar not null + + + + +raw_filepathTable qiita.raw_filepath + Primary Key ( raw_data_id, filepath_id ) Index ( raw_data_id ) +raw_data_idraw_data_id bigint not null +References raw_data ( raw_data_id ) + Primary Key ( raw_data_id, filepath_id ) Index ( filepath_id ) +filepath_idfilepath_id bigint not null +References filepath ( filepath_id ) + + + + +preprocessed_processed_dataTable qiita.preprocessed_processed_data + Primary Key ( preprocessed_data_id, processed_data_id ) Index ( preprocessed_data_id ) +preprocessed_data_idpreprocessed_data_id bigint not null +References preprocessed_data ( preprocessed_data_id ) + Primary Key ( preprocessed_data_id, processed_data_id ) Index ( processed_data_id ) +processed_data_idprocessed_data_id bigint not null +References processed_data ( processed_data_id ) + + + + +study_raw_dataTable qiita.study_raw_data +links study to its raw data + Index ( study_id ) Primary Key ( study_id, raw_data_id ) +study_idstudy_id bigint not null +References study ( study_id ) + Primary Key ( study_id, raw_data_id ) +raw_data_idraw_data_id bigint not null +References raw_data ( raw_data_id ) + + + + +timeseries_typeTable qiita.timeseries_type + Primary Key ( timeseries_type_id ) +timeseries_type_idtimeseries_type_id bigserial not null +Referred by study ( timeseries_type_id ) + timeseries_typetimeseries_type varchar not null + + + + +portal_typeTable qiita.portal_type +What portals are available to show a study in + Primary Key ( portal_type_id ) +portal_type_idportal_type_id bigserial not null +Referred by study ( portal_type_id ) + portalportal varchar not null + descriptiondescription varchar not null + + + + +raw_dataTable qiita.raw_data + Unique Index ( raw_data_id ) +raw_data_idraw_data_id bigserial not null +Referred by common_prep_info ( raw_data_id ) +Referred by raw_data_prep_columns ( raw_data_id ) +Referred by raw_filepath ( raw_data_id ) +Referred by raw_preprocessed_data ( raw_data_id ) +Referred by study_raw_data ( raw_data_id ) + Index ( filetype_id ) +filetype_idfiletype_id bigint not null +References filetype ( filetype_id ) + + + + +raw_preprocessed_dataTable qiita.raw_preprocessed_data + Primary Key ( raw_data_id, preprocessed_data_id ) Index ( raw_data_id ) +raw_data_idraw_data_id bigint not null +References raw_data ( raw_data_id ) + Primary Key ( raw_data_id, preprocessed_data_id ) Index ( preprocessed_data_id ) +preprocessed_data_idpreprocessed_data_id bigint not null +References preprocessed_data ( preprocessed_data_id ) + + + + +preprocessed_filepathTable qiita.preprocessed_filepath + Primary Key ( preprocessed_data_id, filepath_id ) Index ( preprocessed_data_id ) +preprocessed_data_idpreprocessed_data_id bigint not null +References preprocessed_data ( preprocessed_data_id ) + Primary Key ( preprocessed_data_id, filepath_id ) Index ( filepath_id ) +filepath_idfilepath_id bigint not null +References filepath ( filepath_id ) + + + + +preprocessed_dataTable qiita.preprocessed_data + Primary Key ( preprocessed_data_id ) +preprocessed_data_idpreprocessed_data_id bigserial not null +Referred by preprocessed_filepath ( preprocessed_data_id ) +Referred by preprocessed_processed_data ( preprocessed_data_id ) +Referred by raw_preprocessed_data ( preprocessed_data_id ) +Referred by study_preprocessed_data ( preprocessed_data_id ) + preprocessed_params_tablepreprocessed_params_table varchar not null +Name of table holding the params + preprocessed_params_idpreprocessed_params_id bigint not null + submitted_to_insdcsubmitted_to_insdc bool not null + + + + +processed_filepathTable qiita.processed_filepath + Primary Key ( processed_data_id, filepath_id ) +processed_data_idprocessed_data_id bigint not null +References processed_data ( processed_data_id ) + Primary Key ( processed_data_id, filepath_id ) +filepath_idfilepath_id bigint not null +References filepath ( filepath_id ) + + + + +commandTable qiita.command +Available commands for jobs + Primary Key ( command_id ) +command_idcommand_id bigserial not null +Unique identifier for function +Referred by job ( command_id ) + namename varchar not null + commandcommand varchar not null +What command to call to run this function + inputinput varchar not null +JSON of input options for the command + requiredrequired varchar not null +JSON of required options for the command + optionaloptional varchar not null +JSON of optional options for command + outputoutput varchar not null +JSON of output options for the command + + + + +loggingTable qiita.logging + Primary Key ( logging_id ) +logging_idlogging_id bigserial not null +Referred by job ( log_id -> logging_id ) + timetime timestamp not null +Time the error was thrown + Index ( severity_id ) +severity_idseverity_id integer not null +References severity ( severity_id ) + msgmsg varchar not null +Error message thrown + informationinformation varchar +Other applicable information (depending on error) + + + + +processed_params_uclustTable qiita.processed_params_uclust +Parameters used for processing data using method uclust + Primary Key ( processed_params_id ) +processed_params_idprocessed_params_id bigserial not null + Index ( reference_id ) +reference_idreference_id bigint not null +What version of reference or type of reference used +References reference ( reference_id ) + similaritysimilarity float8 not null default 0.97 + enable_rev_strand_matchenable_rev_strand_match bool not null default TRUE + suppress_new_clusterssuppress_new_clusters bool not null default TRUE + + + + +referenceTable qiita.reference + Primary Key ( reference_id ) +reference_idreference_id bigserial not null +Referred by processed_params_uclust ( reference_id ) + reference_namereference_name varchar not null + reference_versionreference_version varchar + sequence_filepathsequence_filepath varchar not null + taxonomy_filepathtaxonomy_filepath varchar + tree_filepathtree_filepath varchar + + + + +preprocessed_spectra_paramsTable qiita.preprocessed_spectra_params +Parameters used for processing spectra data. + Primary Key ( preprocessed_params_id ) +preprocessed_params_idpreprocessed_params_id bigserial not null + colcol varchar + + + + +preprocessed_sequence_454_paramsTable qiita.preprocessed_sequence_454_params +Parameters used for processing sequence data. + Primary Key ( preprocessed_params_id ) +preprocessed_params_idpreprocessed_params_id bigserial not null + trim_lengthtrim_length integer not null + + + + +preprocessed_sequence_illumina_paramsTable qiita.preprocessed_sequence_illumina_params +Parameters used for processing illumina sequence data. + Primary Key ( preprocessed_params_id ) +preprocessed_params_idpreprocessed_params_id bigserial not null + trim_lengthtrim_length integer not null + max_bad_run_lengthmax_bad_run_length integer not null default 3 + min_per_read_length_fractionmin_per_read_length_fraction real not null default 0.75 + sequence_max_nsequence_max_n integer not null default 0 + + + + +processed_dataTable qiita.processed_data + Primary Key ( processed_data_id ) +processed_data_idprocessed_data_id bigserial not null +Referred by analysis_sample ( processed_data_id ) +Referred by preprocessed_processed_data ( processed_data_id ) +Referred by processed_filepath ( processed_data_id ) +Referred by study_processed_data ( processed_data_id ) + processed_params_tableprocessed_params_table varchar not null +Name of table holding processing params + processed_params_idprocessed_params_id bigint not null +Link to a table with the parameters used to generate processed data + processed_dateprocessed_date timestamp not null + + + + +study_processed_dataTable qiita.study_processed_data + Primary Key ( study_id, processed_data_id ) Index ( study_id ) +study_idstudy_id bigint not null +References study ( study_id ) + Primary Key ( study_id, processed_data_id ) Unique Index ( processed_data_id ) +processed_data_idprocessed_data_id bigint not null +References processed_data ( processed_data_id ) + +
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
term_synonym
synonym_id bigserial NOT NULL
term_id bigint NOT NULL
synonym_value varchar NOT NULL
synonym_type_id bigint NOT NULL
Indexes
pk_term_synonym primary key ON synonym_id
idx_term_synonym ON term_id
Foreign Keys
fk_term_synonym_term ( term_id ) ref term (term_id)
fk_term_synonym_type_term ( synonym_id ) ref term (term_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
controlled_vocab_values
vocab_value_id bigserial NOT NULL
controlled_vocab_id bigint NOT NULL
term varchar NOT NULL
order_by varchar NOT NULL
default_item varchar
Indexes
pk_controlled_vocab_values primary key ON vocab_value_id
idx_controlled_vocab_values ON controlled_vocab_id
Foreign Keys
fk_controlled_vocab_values ( controlled_vocab_id ) ref controlled_vocabularies (controlled_vocab_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + +
controlled_vocabularies
controlled_vocab_id bigserial NOT NULL
vocab_name varchar NOT NULL
Indexes
pk_controlled_vocabularies primary key ON controlled_vocab_id
+ +

+ + + + + + + + + + + + + + + + + + + + + +
severity
severity_id serial NOT NULL
severity varchar NOT NULL
Indexes
pk_severity primary key ON severity_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study_person
Contact information for the various people involved in a study
study_person_id bigserial NOT NULL
name varchar NOT NULL
email varchar NOT NULL
address varchar( 100 )
phone varchar
Indexes
pk_study_person primary key ON study_person_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
investigation
Overarching investigation information. +An investigation comprises one or more individual studies.
investigation_id bigserial NOT NULL
name varchar NOT NULL
description varchar NOT NULL Describes the overarching goal of the investigation
contact_person_id bigint
Indexes
pk_investigation primary key ON investigation_id
idx_investigation ON contact_person_id
Foreign Keys
fk_investigation_study_person ( contact_person_id ) ref study_person (study_person_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
investigation_study
investigation_id bigint NOT NULL
study_id bigint NOT NULL
Indexes
idx_investigation_study primary key ON investigation_id, study_id
idx_investigation_study_investigation ON investigation_id
idx_investigation_study_study ON study_id
Foreign Keys
fk_investigation_study ( investigation_id ) ref investigation (investigation_id)
fk_investigation_study_study ( study_id ) ref study (study_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + +
study_status
study_status_id bigserial NOT NULL
status varchar NOT NULL
description varchar NOT NULL
Indexes
pk_study_status primary key ON study_status_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study_experimental_factor
EFO ontological link of experimental factors to studies
study_id bigint NOT NULL
efo_id bigint NOT NULL
Indexes
idx_study_experimental_factor primary key ON study_id, efo_id
idx_study_experimental_factor_0 ON study_id
Foreign Keys
fk_study_experimental_factor ( study_id ) ref study (study_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study_pmid
Links a study to all PMIDs for papers created from study
study_id bigint NOT NULL
pmid varchar NOT NULL
Indexes
idx_study_pmid primary key ON study_id, pmid
idx_study_pmid_0 ON study_id
Foreign Keys
fk_study_pmid_study ( study_id ) ref study (study_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + +
analysis_status
analysis_status_id bigserial NOT NULL
status varchar NOT NULL
Indexes
pk_analysis_status primary key ON analysis_status_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
analysis
hHolds analysis information
analysis_id bigserial NOT NULL Unique identifier for analysis
email varchar NOT NULL Email for user who owns the analysis
name varchar NOT NULL Name of the analysis
description varchar NOT NULL
analysis_status_id bigint NOT NULL
pmid varchar PMID of paper from the analysis
Indexes
pk_analysis primary key ON analysis_id
idx_analysis_email ON email
idx_analysis_status_id ON analysis_status_id
Foreign Keys
fk_analysis_user ( email ) ref qiita_user (email)
fk_analysis_analysis_status ( analysis_status_id ) ref analysis_status (analysis_status_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
analysis_filepath
Stores link between analysis and the data file used for the analysis.
analysis_id bigint NOT NULL
filepath_id bigint NOT NULL
Indexes
idx_analysis_filepath ON analysis_id
idx_analysis_filepath_0 ON filepath_id
idx_analysis_filepath_1 primary key ON analysis_id, filepath_id
Foreign Keys
fk_analysis_filepath ( analysis_id ) ref analysis (analysis_id)
fk_analysis_filepath_0 ( filepath_id ) ref filepath (filepath_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
job_results_filepath
Holds connection between jobs and the result filepaths
job_id bigint NOT NULL
filepath_id bigint NOT NULL
Indexes
idx_job_results_filepath primary key ON job_id, filepath_id
idx_job_results_filepath_0 ON job_id
idx_job_results_filepath_1 ON filepath_id
Foreign Keys
fk_job_results_filepath ( job_id ) ref job (job_id)
fk_job_results_filepath_0 ( filepath_id ) ref filepath (filepath_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
job
job_id bigserial NOT NULL Unique identifier for job
data_type_id bigint NOT NULL What datatype (16s, metabolome, etc) job is run on.
job_status_id bigint NOT NULL
command_id bigint NOT NULL The Qiime or other function being run (alpha diversity, etc)
options varchar Holds all options set for the job as a json string
log_id bigint Reference to error if status is error
Indexes
pk_job primary key ON job_id
idx_job_command ON command_id
idx_job_status ON job_status_id
idx_job_type ON data_type_id
idx_job ON log_id
Foreign Keys
fk_job_function ( command_id ) ref command (command_id)
fk_job_job_status_id ( job_status_id ) ref job_status (job_status_id)
fk_job_data_type ( data_type_id ) ref data_type (data_type_id)
fk_job ( log_id ) ref logging (logging_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
analysis_job
Holds information for a one-to-many relation of analysis to the jobs in it
analysis_id bigint NOT NULL Id of the analysis
job_id bigint NOT NULL Id for a job that is part of the analysis
Indexes
idx_analysis_jobs primary key ON analysis_id, job_id
idx_analysis_job ON analysis_id
idx_analysis_job_0 ON job_id
Foreign Keys
fk_analysis_job_analysis ( analysis_id ) ref analysis (analysis_id)
fk_analysis_job_job ( job_id ) ref job (job_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
analysis_chain
Keeps track of the chain of analysis edits. Tracks what previous analysis a given analysis came from. +If a given analysis is not in child_id, it is the root of the chain.
parent_id bigint NOT NULL
child_id bigint NOT NULL
Indexes
idx_analysis_chain ON parent_id
idx_analysis_chain_0 ON child_id
idx_analysis_chain_1 primary key ON parent_id, child_id
Foreign Keys
fk_analysis_chain ( parent_id ) ref analysis (analysis_id)
fk_analysis_chain_0 ( child_id ) ref analysis (analysis_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + +
job_status
job_status_id bigserial NOT NULL
status varchar NOT NULL
Indexes
pk_job_status primary key ON job_status_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
analysis_sample
analysis_id bigint NOT NULL
processed_data_id bigint NOT NULL
sample_id varchar NOT NULL
Indexes
idx_analysis_sample ON analysis_id
idx_analysis_sample_0 ON processed_data_id
idx_analysis_sample_1 ON sample_id
Foreign Keys
fk_analysis_sample_analysis ( analysis_id ) ref analysis (analysis_id)
fk_analysis_sample ( processed_data_id ) ref processed_data (processed_data_id)
fk_analysis_sample_0 ( sample_id ) ref required_sample_info (sample_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
column_controlled_vocabularies
Table relates a column with a controlled vocabulary.
controlled_vocab_id bigserial NOT NULL
column_name varchar NOT NULL
Indexes
idx_column_controlled_vocabularies primary key ON controlled_vocab_id, column_name
idx_column_controlled_vocabularies_0 ON column_name
idx_column_controlled_vocabularies_1 ON controlled_vocab_id
Foreign Keys
fk_column_controlled_vocabularies ( column_name ) ref mixs_field_description (column_name)
fk_column_controlled_vocab2 ( controlled_vocab_id ) ref controlled_vocabularies (controlled_vocab_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
mixs_field_description
column_name varchar NOT NULL
data_type varchar NOT NULL
desc_or_value varchar NOT NULL
definition varchar NOT NULL
min_length integer
active integer NOT NULL
Indexes
pk_mixs_field_description primary key ON column_name
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
column_ontology
This table relates a column with an ontology.
column_name varchar NOT NULL
ontology_short_name varchar NOT NULL
bioportal_id integer NOT NULL
ontology_branch_id integer NOT NULL
Indexes
idx_column_ontology primary key ON column_name, ontology_short_name
idx_column_ontology_0 ON column_name
Foreign Keys
fk_column_ontology ( column_name ) ref mixs_field_description (column_name)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
term_relationship
term_relationship_id bigserial NOT NULL
subject_term_id bigint NOT NULL
predicate_term_id bigint NOT NULL
object_term_id bigint NOT NULL
ontology_id bigint NOT NULL
Indexes
pk_term_relationship primary key ON term_relationship_id
idx_term_relationship_subject ON subject_term_id
idx_term_relationship_predicate ON predicate_term_id
idx_term_relationship_object ON object_term_id
idx_term_relationship_ontology ON ontology_id
Foreign Keys
fk_term_relationship_subj_term ( subject_term_id ) ref term (term_id)
fk_term_relationship_pred_term ( predicate_term_id ) ref term (term_id)
fk_term_relationship_obj_term ( object_term_id ) ref term (term_id)
fk_term_relationship_ontology ( ontology_id ) ref ontology (ontology_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
term_path
term_path_id bigserial NOT NULL
subject_term_id bigint NOT NULL
predicate_term_id bigint NOT NULL
object_term_id bigint NOT NULL
ontology_id bigint NOT NULL
relationship_type_id integer NOT NULL
distance integer
Indexes
pk_term_path primary key ON term_path_id
idx_term_path ON ontology_id
idx_term_path_relatonship ON relationship_type_id
idx_term_path_subject ON subject_term_id
idx_term_path_predicate ON predicate_term_id
idx_term_path_object ON object_term_id
Foreign Keys
fk_term_path_ontology ( ontology_id ) ref ontology (ontology_id)
fk_term_path_relationship_type ( relationship_type_id ) ref relationship_type (relationship_type_id)
fk_term_path_term_subject ( subject_term_id ) ref term (term_id)
fk_term_path_term_predicate ( predicate_term_id ) ref term (term_id)
fk_term_path_term_object ( object_term_id ) ref term (term_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ontology
ontology_id bigserial NOT NULL
shortname varchar NOT NULL
fully_loaded bool NOT NULL
fullname varchar
query_url varchar
source_url varchar
definition text
load_date date NOT NULL
version varchar
Indexes
pk_ontology primary key ON ontology_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
term
term_id bigserial NOT NULL
ontology_id bigint NOT NULL
term_name varchar NOT NULL
identifier varchar
definition varchar
namespace varchar
is_obsolete bool NOT NULL DEFO 'false'
is_root_term bool NOT NULL
is_leaf bool NOT NULL
Indexes
pk_term primary key ON term_id
idx_term unique ON ontology_id
Foreign Keys
fk_term_ontology ( ontology_id ) ref ontology (ontology_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
annotation
annotation_id bigserial NOT NULL
term_id bigint NOT NULL
annotation_name varchar NOT NULL
annotation_num_value bigint
annotation_str_value varchar
Indexes
pk_annotation primary key ON annotation_id
idx_annotation ON term_id
Foreign Keys
fk_annotation_term ( term_id ) ref term (term_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
dbxref
dbxref_id bigserial NOT NULL
term_id bigint NOT NULL
dbname varchar NOT NULL
accession varchar NOT NULL
description varchar NOT NULL
xref_type varchar NOT NULL
Indexes
pk_dbxref primary key ON dbxref_id
idx_dbxref ON term_id
Foreign Keys
fk_dbxref_term ( term_id ) ref term (term_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + +
relationship_type
relationship_type_id bigserial NOT NULL
relationship_type varchar NOT NULL
Indexes
pk_relationship_type primary key ON relationship_type_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
analysis_users
Links analyses to the users they are shared with
analysis_id bigint NOT NULL
email varchar NOT NULL
Indexes
idx_analysis_users primary key ON analysis_id, email
idx_analysis_users_analysis ON analysis_id
idx_analysis_users_email ON email
Foreign Keys
fk_analysis_users_analysis ( analysis_id ) ref analysis (analysis_id)
fk_analysis_users_user ( email ) ref qiita_user (email)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
user_level
Holds available user levels
user_level_id serial NOT NULL
name varchar NOT NULL One of the user levels (admin, user, guest, etc)
description text NOT NULL
Indexes
pk_user_level primary key ON user_level_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
qiita_user
Holds all user information
email varchar NOT NULL
user_level_id integer NOT NULL DEFO 5 user level
password varchar NOT NULL
name varchar
affiliation varchar
address varchar
phone varchar
user_verify_code varchar Code for initial user email verification
pass_reset_code varchar Randomly generated code for password reset
pass_reset_timestamp timestamp Time the reset code was generated
Indexes
pk_user primary key ON email
idx_user ON user_level_id
Foreign Keys
fk_user_user_level ( user_level_id ) ref user_level (user_level_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + +
filepath_type
filepath_type_id bigserial NOT NULL
filepath_type varchar
Indexes
pk_filepath_type primary key ON filepath_type_id
+ +

+ + + + + + + + + + + + + + + + + + + + + +
checksum_algorithm
checksum_algorithm_id bigserial NOT NULL
name varchar NOT NULL
Indexes
pk_checksum_algorithm primary key ON checksum_algorithm_id
+ +

+ + + + + + + + + + + + + + + + + + + + + +
data_type
data_type_id bigserial NOT NULL
data_type varchar NOT NULL Data type (16S, metabolome, etc) the job will use
Indexes
pk_data_type primary key ON data_type_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
filepath
filepath_id bigserial NOT NULL
filepath varchar NOT NULL
filepath_type_id bigint NOT NULL
checksum varchar NOT NULL
checksum_algorithm_id bigint NOT NULL
Indexes
pk_filepath primary key ON filepath_id
idx_filepath ON filepath_type_id
Foreign Keys
fk_filepath ( filepath_type_id ) ref filepath_type (filepath_type_id)
fk_filepath_0 ( checksum_algorithm_id ) ref checksum_algorithm (checksum_algorithm_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study_preprocessed_data
study_id bigint NOT NULL
preprocessed_data_id bigint NOT NULL
Indexes
idx_study_preprocessed_data primary key ON study_id, preprocessed_data_id
idx_study_preprocessed_data_0 ON study_id
idx_study_preprocessed_data_1 ON preprocessed_data_id
Foreign Keys
fk_study_preprocessed_data ( study_id ) ref study (study_id)
fk_study_preprocessed_data_0 ( preprocessed_data_id ) ref preprocessed_data (preprocessed_data_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study
study_id bigserial NOT NULL Unique name for study
email varchar NOT NULL Email of study owner
study_status_id bigint NOT NULL
emp_person_id bigint
first_contact varchar NOT NULL
funding varchar
timeseries_type_id bigint NOT NULL What type of timeseries this study is (or is not) +Controlled Vocabulary
lab_person_id bigint
metadata_complete bool NOT NULL
mixs_compliant bool NOT NULL
most_recent_contact varchar
number_samples_collected integer NOT NULL
number_samples_promised integer NOT NULL
portal_type_id bigint NOT NULL
principal_investigator_id bigint NOT NULL
reprocess bool NOT NULL
spatial_series bool
study_title varchar NOT NULL
study_alias varchar NOT NULL
study_description text NOT NULL
study_abstract text NOT NULL
vamps_id varchar
Indexes
pk_study primary key ON study_id
idx_study ON email
idx_study_0 ON study_status_id
idx_study_1 ON emp_person_id
idx_study_2 ON lab_person_id
idx_study_3 ON principal_investigator_id
idx_study_4 ON timeseries_type_id
idx_study_5 ON portal_type_id
Foreign Keys
fk_study_user ( email ) ref qiita_user (email)
fk_study_study_status ( study_status_id ) ref study_status (study_status_id)
fk_study_study_emp_person ( emp_person_id ) ref study_person (study_person_id)
fk_study_study_lab_person ( lab_person_id ) ref study_person (study_person_id)
fk_study_study_pi_person ( principal_investigator_id ) ref study_person (study_person_id)
fk_study_timeseries_type ( timeseries_type_id ) ref timeseries_type (timeseries_type_id)
fk_study ( portal_type_id ) ref portal_type (portal_type_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study_users
Links shared studies to users they are shared with
study_id bigint NOT NULL
email varchar NOT NULL
Indexes
idx_study_users primary key ON study_id, email
idx_study_users_0 ON study_id
idx_study_users_1 ON email
Foreign Keys
fk_study_users_study ( study_id ) ref study (study_id)
fk_study_users_user ( email ) ref qiita_user (email)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
required_sample_info
Required info for each sample. One row is one sample.
study_id bigint NOT NULL
sample_id varchar NOT NULL
physical_location varchar NOT NULL Where the sample itself is stored
has_physical_specimen bool NOT NULL Whether we have the full speciment or just DNA
has_extracted_data bool NOT NULL
sample_type varchar NOT NULL Controlled vocabulary of sample types
required_sample_info_status_id bigint NOT NULL What step of the pipeline the samples are in
collection_timestamp timestamp NOT NULL
host_subject_id varchar NOT NULL
description varchar NOT NULL
Indexes
idx_common_sample_information primary key ON study_id, sample_id
idx_required_sample_info ON study_id
idx_required_sample_info_0 ON required_sample_info_status_id
pk_required_sample_info unique ON sample_id
Foreign Keys
fk_required_sample_info_study ( study_id ) ref study (study_id)
fk_required_sample_info ( required_sample_info_status_id ) ref required_sample_info_status (required_sample_info_status_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
sample_x
data for samples in study x (sample template) +x is the study_id from study table + +MAKE SURE sample_id IS FK TO sample_id IN required_sample_info TABLE
sample_id varchar NOT NULL
description varchar NOT NULL
other_mapping_columns varchar Represents whatever other columns go with this study
Indexes
pk_study_x_y primary key ON sample_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study_sample_columns
Holds information on which metadata columns are available for the study sample template
study_id bigint NOT NULL
column_name varchar( 100 ) NOT NULL
column_type varchar NOT NULL
Indexes
idx_study_mapping_columns primary key ON study_id, column_name, column_type
idx_study_mapping_columns_study_id ON study_id
Foreign Keys
fk_study_mapping_columns_study ( study_id ) ref study (study_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + +
required_sample_info_status
required_sample_info_status_id bigserial NOT NULL
status varchar
Indexes
pk_sample_status primary key ON required_sample_info_status_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + +
prep_y
Information on how raw data y was prepared (prep template) +Linked by y being raw_data_id from raw data table.
sample_id varchar NOT NULL
data bigint STUFFFFF
Indexes
pk_prep_y primary key ON sample_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
common_prep_info
raw_data_id bigserial NOT NULL
sample_id varchar NOT NULL
center_name varchar
center_project_name varchar
ebi_submission_accession varchar
ebi_study_accession varchar
emp_status_id bigint NOT NULL
data_type_id bigint NOT NULL
Indexes
idx_required_prep_info ON raw_data_id
idx_required_prep_info_0 ON emp_status_id
idx_required_prep_info_1 primary key ON raw_data_id, sample_id
idx_required_prep_info_2 ON sample_id
idx_required_prep_info_3 ON data_type_id
Foreign Keys
fk_required_prep_info_raw_data ( raw_data_id ) ref raw_data (raw_data_id)
fk_required_prep_info_emp_status ( emp_status_id ) ref emp_status (emp_status_id)
fk_required_prep_info ( sample_id ) ref required_sample_info (sample_id)
fk_required_prep_info_0 ( data_type_id ) ref data_type (data_type_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + +
emp_status
All possible statuses for projects relating to EMP. Whether they are part of, processed in accordance to, or not part of EMP.
emp_status_id bigserial NOT NULL
emp_status varchar NOT NULL
Indexes
pk_emp_status primary key ON emp_status_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
raw_data_prep_columns
Holds the columns available for a given raw data prep
raw_data_id bigint NOT NULL
column_name varchar NOT NULL
column_type varchar NOT NULL
Indexes
idx_raw_data_prep_columns primary key ON raw_data_id, column_name, column_type
idx_prep_columns ON raw_data_id
Foreign Keys
fk_prep_columns_raw_data ( raw_data_id ) ref raw_data (raw_data_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + +
filetype
Type of file (FASTA, FASTQ, SPECTRA, etc)
filetype_id bigserial NOT NULL
type varchar NOT NULL
Indexes
pk_filetype primary key ON filetype_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
raw_filepath
raw_data_id bigint NOT NULL
filepath_id bigint NOT NULL
Indexes
idx_raw_filepath primary key ON raw_data_id, filepath_id
idx_raw_filepath_0 ON filepath_id
idx_raw_filepath_1 ON raw_data_id
Foreign Keys
fk_raw_filepath ( filepath_id ) ref filepath (filepath_id)
fk_raw_filepath_0 ( raw_data_id ) ref raw_data (raw_data_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
preprocessed_processed_data
preprocessed_data_id bigint NOT NULL
processed_data_id bigint NOT NULL
Indexes
idx_preprocessed_processed_data primary key ON preprocessed_data_id, processed_data_id
idx_preprocessed_processed_data_0 ON preprocessed_data_id
idx_preprocessed_processed_data_1 ON processed_data_id
Foreign Keys
fk_preprocessed_processed_data ( preprocessed_data_id ) ref preprocessed_data (preprocessed_data_id)
fk_preprocessed_processed_data_0 ( processed_data_id ) ref processed_data (processed_data_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study_raw_data
links study to its raw data
study_id bigint NOT NULL
raw_data_id bigint NOT NULL
Indexes
idx_study_raw_data ON study_id
idx_study_raw_data_0 primary key ON study_id, raw_data_id
Foreign Keys
fk_study_raw_data_study ( study_id ) ref study (study_id)
fk_study_raw_data_raw_data ( raw_data_id ) ref raw_data (raw_data_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + +
timeseries_type
timeseries_type_id bigserial NOT NULL
timeseries_type varchar NOT NULL
Indexes
pk_timeseries_type primary key ON timeseries_type_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
portal_type
What portals are available to show a study in
portal_type_id bigserial NOT NULL
portal varchar NOT NULL
description varchar NOT NULL
Indexes
pk_portal_type primary key ON portal_type_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
raw_data
raw_data_id bigserial NOT NULL
filetype_id bigint NOT NULL
Indexes
pk_raw_data unique ON raw_data_id
idx_raw_data ON filetype_id
Foreign Keys
fk_raw_data_filetype ( filetype_id ) ref filetype (filetype_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
raw_preprocessed_data
raw_data_id bigint NOT NULL
preprocessed_data_id bigint NOT NULL
Indexes
idx_raw_preprocessed_data primary key ON raw_data_id, preprocessed_data_id
idx_raw_preprocessed_data_0 ON raw_data_id
idx_raw_preprocessed_data_1 ON preprocessed_data_id
Foreign Keys
fk_raw_preprocessed_data ( raw_data_id ) ref raw_data (raw_data_id)
fk_raw_preprocessed_data_0 ( preprocessed_data_id ) ref preprocessed_data (preprocessed_data_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
preprocessed_filepath
preprocessed_data_id bigint NOT NULL
filepath_id bigint NOT NULL
Indexes
idx_preprocessed_filepath primary key ON preprocessed_data_id, filepath_id
idx_preprocessed_filepath_0 ON preprocessed_data_id
idx_preprocessed_filepath_1 ON filepath_id
Foreign Keys
fk_preprocessed_filepath ( preprocessed_data_id ) ref preprocessed_data (preprocessed_data_id)
fk_preprocessed_filepath_0 ( filepath_id ) ref filepath (filepath_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
preprocessed_data
preprocessed_data_id bigserial NOT NULL
preprocessed_params_table varchar NOT NULL Name of table holding the params
preprocessed_params_id bigint NOT NULL
submitted_to_insdc bool NOT NULL
Indexes
pk_preprocessed_data primary key ON preprocessed_data_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
processed_filepath
processed_data_id bigint NOT NULL
filepath_id bigint NOT NULL
Indexes
idx_processed_filepath primary key ON processed_data_id, filepath_id
Foreign Keys
fk_processed_data_filepath ( processed_data_id ) ref processed_data (processed_data_id)
fk_processed_data_filepath_0 ( filepath_id ) ref filepath (filepath_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
command
Available commands for jobs
command_id bigserial NOT NULL Unique identifier for function
name varchar NOT NULL
command varchar NOT NULL What command to call to run this function
input varchar NOT NULL JSON of input options for the command
required varchar NOT NULL JSON of required options for the command
optional varchar NOT NULL JSON of optional options for command
output varchar NOT NULL JSON of output options for the command
Indexes
pk_command primary key ON command_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
logging
logging_id bigserial NOT NULL
time timestamp NOT NULL Time the error was thrown
severity_id integer NOT NULL
msg varchar NOT NULL Error message thrown
information varchar Other applicable information (depending on error)
Indexes
idx_logging_0 ON severity_id
pk_logging primary key ON logging_id
Foreign Keys
fk_logging_severity ( severity_id ) ref severity (severity_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
processed_params_uclust
Parameters used for processing data using method uclust
processed_params_id bigserial NOT NULL
reference_id bigint NOT NULL What version of reference or type of reference used
similarity float8 NOT NULL DEFO 0.97
enable_rev_strand_match bool NOT NULL DEFO TRUE
suppress_new_clusters bool NOT NULL DEFO TRUE
Indexes
pk_processed_params_uclust primary key ON processed_params_id
idx_processed_params_uclust ON reference_id
Foreign Keys
fk_processed_params_uclust ( reference_id ) ref reference (reference_id)
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
reference
reference_id bigserial NOT NULL
reference_name varchar NOT NULL
reference_version varchar
sequence_filepath varchar NOT NULL
taxonomy_filepath varchar
tree_filepath varchar
Indexes
pk_reference primary key ON reference_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + +
preprocessed_spectra_params
Parameters used for processing spectra data.
preprocessed_params_id bigserial NOT NULL
col varchar
Indexes
pk_preprocessed_spectra_params primary key ON preprocessed_params_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + +
preprocessed_sequence_454_params
Parameters used for processing sequence data.
preprocessed_params_id bigserial NOT NULL
trim_length integer NOT NULL
Indexes
pk_preprocessed_sequence_454_params primary key ON preprocessed_params_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
preprocessed_sequence_illumina_params
Parameters used for processing illumina sequence data.
preprocessed_params_id bigserial NOT NULL
trim_length integer NOT NULL
max_bad_run_length integer NOT NULL DEFO 3
min_per_read_length_fraction real NOT NULL DEFO 0.75
sequence_max_n integer NOT NULL DEFO 0
Indexes
pk_preprocessed_sequence_illumina_params primary key ON preprocessed_params_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
processed_data
processed_data_id bigserial NOT NULL
processed_params_table varchar NOT NULL Name of table holding processing params
processed_params_id bigint NOT NULL Link to a table with the parameters used to generate processed data
processed_date timestamp NOT NULL
Indexes
pk_processed_data primary key ON processed_data_id
+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
study_processed_data
study_id bigint NOT NULL
processed_data_id bigint NOT NULL
Indexes
idx_study_processed_data primary key ON study_id, processed_data_id
idx_study_processed_data_0 ON study_id
pk_study_processed_data unique ON processed_data_id
Foreign Keys
fk_study_processed_data ( study_id ) ref study (study_id)
fk_study_processed_data_0 ( processed_data_id ) ref processed_data (processed_data_id)
+ + \ No newline at end of file diff --git a/qiita_db/support_files/qiita-db.sql b/qiita_db/support_files/qiita-db.sql new file mode 100644 index 000000000..0ce13a8bd --- /dev/null +++ b/qiita_db/support_files/qiita-db.sql @@ -0,0 +1,882 @@ +CREATE SCHEMA qiita; + +CREATE TABLE qiita.analysis_status ( + analysis_status_id bigserial NOT NULL, + status varchar NOT NULL, + CONSTRAINT pk_analysis_status PRIMARY KEY ( analysis_status_id ) + ); + +CREATE TABLE qiita.checksum_algorithm ( + checksum_algorithm_id bigserial NOT NULL, + name varchar NOT NULL, + CONSTRAINT pk_checksum_algorithm PRIMARY KEY ( checksum_algorithm_id ) + ); + +CREATE TABLE qiita.command ( + command_id bigserial NOT NULL, + name varchar NOT NULL, + command varchar NOT NULL, + input varchar NOT NULL, + required varchar NOT NULL, + optional varchar NOT NULL, + output varchar NOT NULL, + CONSTRAINT pk_command PRIMARY KEY ( command_id ) + ); + +COMMENT ON TABLE qiita.command IS 'Available commands for jobs'; + +COMMENT ON COLUMN qiita.command.command_id IS 'Unique identifier for function'; + +COMMENT ON COLUMN qiita.command.command IS 'What command to call to run this function'; + +COMMENT ON COLUMN qiita.command.input IS 'JSON of input options for the command'; + +COMMENT ON COLUMN qiita.command.required IS 'JSON of required options for the command'; + +COMMENT ON COLUMN qiita.command.optional IS 'JSON of optional options for command'; + +COMMENT ON COLUMN qiita.command.output IS 'JSON of output options for the command'; + +CREATE TABLE qiita.controlled_vocabularies ( + controlled_vocab_id bigserial NOT NULL, + vocab_name varchar NOT NULL, + CONSTRAINT pk_controlled_vocabularies PRIMARY KEY ( controlled_vocab_id ) + ); + +CREATE TABLE qiita.data_type ( + data_type_id bigserial NOT NULL, + data_type varchar NOT NULL, + CONSTRAINT pk_data_type PRIMARY KEY ( data_type_id ) + ); + +COMMENT ON COLUMN qiita.data_type.data_type IS 'Data type (16S, metabolome, etc) the job will use'; + +CREATE TABLE qiita.emp_status ( + emp_status_id bigserial NOT NULL, + emp_status varchar NOT NULL, + CONSTRAINT pk_emp_status PRIMARY KEY ( emp_status_id ) + ); + +COMMENT ON TABLE qiita.emp_status IS 'All possible statuses for projects relating to EMP. Whether they are part of, processed in accordance to, or not part of EMP.'; + +CREATE TABLE qiita.filepath_type ( + filepath_type_id bigserial NOT NULL, + filepath_type varchar , + CONSTRAINT pk_filepath_type PRIMARY KEY ( filepath_type_id ) + ); + +CREATE TABLE qiita.filetype ( + filetype_id bigserial NOT NULL, + type varchar NOT NULL, + CONSTRAINT pk_filetype PRIMARY KEY ( filetype_id ) + ); + +COMMENT ON TABLE qiita.filetype IS 'Type of file (FASTA, FASTQ, SPECTRA, etc)'; + +CREATE TABLE qiita.job_status ( + job_status_id bigserial NOT NULL, + status varchar NOT NULL, + CONSTRAINT pk_job_status PRIMARY KEY ( job_status_id ) + ); + +CREATE TABLE qiita.mixs_field_description ( + column_name varchar NOT NULL, + data_type varchar NOT NULL, + desc_or_value varchar NOT NULL, + definition varchar NOT NULL, + min_length integer , + active integer NOT NULL, + CONSTRAINT pk_mixs_field_description PRIMARY KEY ( column_name ) + ); + +CREATE TABLE qiita.ontology ( + ontology_id bigserial NOT NULL, + shortname varchar NOT NULL, + fully_loaded bool NOT NULL, + fullname varchar , + query_url varchar , + source_url varchar , + definition text , + load_date date NOT NULL, + version varchar , + CONSTRAINT pk_ontology PRIMARY KEY ( ontology_id ) + ); + +CREATE TABLE qiita.portal_type ( + portal_type_id bigserial NOT NULL, + portal varchar NOT NULL, + description varchar NOT NULL, + CONSTRAINT pk_portal_type PRIMARY KEY ( portal_type_id ) + ); + +COMMENT ON TABLE qiita.portal_type IS 'What portals are available to show a study in'; + +CREATE TABLE qiita.preprocessed_data ( + preprocessed_data_id bigserial NOT NULL, + preprocessed_params_table varchar NOT NULL, + preprocessed_params_id bigint NOT NULL, + submitted_to_insdc bool NOT NULL, + CONSTRAINT pk_preprocessed_data PRIMARY KEY ( preprocessed_data_id ) + ); + +COMMENT ON COLUMN qiita.preprocessed_data.preprocessed_params_table IS 'Name of table holding the params'; + +CREATE TABLE qiita.preprocessed_sequence_454_params ( + preprocessed_params_id bigserial NOT NULL, + trim_length integer NOT NULL, + CONSTRAINT pk_preprocessed_sequence_454_params PRIMARY KEY ( preprocessed_params_id ) + ); + +COMMENT ON TABLE qiita.preprocessed_sequence_454_params IS 'Parameters used for processing sequence data.'; + +CREATE TABLE qiita.preprocessed_sequence_illumina_params ( + preprocessed_params_id bigserial NOT NULL, + trim_length integer NOT NULL, + max_bad_run_length integer DEFAULT 3 NOT NULL, + min_per_read_length_fraction real DEFAULT 0.75 NOT NULL, + sequence_max_n integer DEFAULT 0 NOT NULL, + CONSTRAINT pk_preprocessed_sequence_illumina_params PRIMARY KEY ( preprocessed_params_id ) + ); + +COMMENT ON TABLE qiita.preprocessed_sequence_illumina_params IS 'Parameters used for processing illumina sequence data.'; + +CREATE TABLE qiita.preprocessed_spectra_params ( + preprocessed_params_id bigserial NOT NULL, + col varchar , + CONSTRAINT pk_preprocessed_spectra_params PRIMARY KEY ( preprocessed_params_id ) + ); + +COMMENT ON TABLE qiita.preprocessed_spectra_params IS 'Parameters used for processing spectra data.'; + +CREATE TABLE qiita.processed_data ( + processed_data_id bigserial NOT NULL, + processed_params_table varchar NOT NULL, + processed_params_id bigint NOT NULL, + processed_date timestamp NOT NULL, + CONSTRAINT pk_processed_data PRIMARY KEY ( processed_data_id ) + ); + +COMMENT ON COLUMN qiita.processed_data.processed_params_table IS 'Name of table holding processing params'; + +COMMENT ON COLUMN qiita.processed_data.processed_params_id IS 'Link to a table with the parameters used to generate processed data'; + +CREATE TABLE qiita.raw_data ( + raw_data_id bigserial NOT NULL, + filetype_id bigint NOT NULL, + CONSTRAINT pk_raw_data UNIQUE ( raw_data_id ) , + CONSTRAINT fk_raw_data_filetype FOREIGN KEY ( filetype_id ) REFERENCES qiita.filetype( filetype_id ) + ); + +CREATE INDEX idx_raw_data ON qiita.raw_data ( filetype_id ); + +CREATE TABLE qiita.raw_data_prep_columns ( + raw_data_id bigint NOT NULL, + column_name varchar NOT NULL, + column_type varchar NOT NULL, + CONSTRAINT idx_raw_data_prep_columns PRIMARY KEY ( raw_data_id, column_name, column_type ), + CONSTRAINT fk_prep_columns_raw_data FOREIGN KEY ( raw_data_id ) REFERENCES qiita.raw_data( raw_data_id ) + ); + +CREATE INDEX idx_prep_columns ON qiita.raw_data_prep_columns ( raw_data_id ); + +COMMENT ON TABLE qiita.raw_data_prep_columns IS 'Holds the columns available for a given raw data prep'; + +CREATE TABLE qiita.raw_preprocessed_data ( + raw_data_id bigint NOT NULL, + preprocessed_data_id bigint NOT NULL, + CONSTRAINT idx_raw_preprocessed_data PRIMARY KEY ( raw_data_id, preprocessed_data_id ), + CONSTRAINT fk_raw_preprocessed_data FOREIGN KEY ( raw_data_id ) REFERENCES qiita.raw_data( raw_data_id ) , + CONSTRAINT fk_raw_preprocessed_data_0 FOREIGN KEY ( preprocessed_data_id ) REFERENCES qiita.preprocessed_data( preprocessed_data_id ) + ); + +CREATE INDEX idx_raw_preprocessed_data_0 ON qiita.raw_preprocessed_data ( raw_data_id ); + +CREATE INDEX idx_raw_preprocessed_data_1 ON qiita.raw_preprocessed_data ( preprocessed_data_id ); + +CREATE TABLE qiita.reference ( + reference_id bigserial NOT NULL, + reference_name varchar NOT NULL, + reference_version varchar , + sequence_filepath varchar NOT NULL, + taxonomy_filepath varchar , + tree_filepath varchar , + CONSTRAINT pk_reference PRIMARY KEY ( reference_id ) + ); + +CREATE TABLE qiita.relationship_type ( + relationship_type_id bigserial NOT NULL, + relationship_type varchar NOT NULL, + CONSTRAINT pk_relationship_type PRIMARY KEY ( relationship_type_id ) + ); + +CREATE TABLE qiita.required_sample_info_status ( + required_sample_info_status_id bigserial NOT NULL, + status varchar , + CONSTRAINT pk_sample_status PRIMARY KEY ( required_sample_info_status_id ) + ); + +CREATE TABLE qiita.severity ( + severity_id serial NOT NULL, + severity varchar NOT NULL, + CONSTRAINT pk_severity PRIMARY KEY ( severity_id ) + ); + +CREATE TABLE qiita.study_person ( + study_person_id bigserial NOT NULL, + name varchar NOT NULL, + email varchar NOT NULL, + address varchar(100) , + phone varchar , + CONSTRAINT pk_study_person PRIMARY KEY ( study_person_id ) + ); + +COMMENT ON TABLE qiita.study_person IS 'Contact information for the various people involved in a study'; + +CREATE TABLE qiita.study_status ( + study_status_id bigserial NOT NULL, + status varchar NOT NULL, + description varchar NOT NULL, + CONSTRAINT pk_study_status PRIMARY KEY ( study_status_id ) + ); + +CREATE TABLE qiita.term ( + term_id bigserial NOT NULL, + ontology_id bigint NOT NULL, + term_name varchar NOT NULL, + identifier varchar , + definition varchar , + namespace varchar , + is_obsolete bool DEFAULT 'false' NOT NULL, + is_root_term bool NOT NULL, + is_leaf bool NOT NULL, + CONSTRAINT pk_term PRIMARY KEY ( term_id ), + CONSTRAINT idx_term UNIQUE ( ontology_id ) , + CONSTRAINT fk_term_ontology FOREIGN KEY ( ontology_id ) REFERENCES qiita.ontology( ontology_id ) + ); + +CREATE TABLE qiita.term_path ( + term_path_id bigserial NOT NULL, + subject_term_id bigint NOT NULL, + predicate_term_id bigint NOT NULL, + object_term_id bigint NOT NULL, + ontology_id bigint NOT NULL, + relationship_type_id integer NOT NULL, + distance integer , + CONSTRAINT pk_term_path PRIMARY KEY ( term_path_id ), + CONSTRAINT fk_term_path_ontology FOREIGN KEY ( ontology_id ) REFERENCES qiita.ontology( ontology_id ) , + CONSTRAINT fk_term_path_relationship_type FOREIGN KEY ( relationship_type_id ) REFERENCES qiita.relationship_type( relationship_type_id ) , + CONSTRAINT fk_term_path_term_subject FOREIGN KEY ( subject_term_id ) REFERENCES qiita.term( term_id ) , + CONSTRAINT fk_term_path_term_predicate FOREIGN KEY ( predicate_term_id ) REFERENCES qiita.term( term_id ) , + CONSTRAINT fk_term_path_term_object FOREIGN KEY ( object_term_id ) REFERENCES qiita.term( term_id ) + ); + +CREATE INDEX idx_term_path ON qiita.term_path ( ontology_id ); + +CREATE INDEX idx_term_path_relatonship ON qiita.term_path ( relationship_type_id ); + +CREATE INDEX idx_term_path_subject ON qiita.term_path ( subject_term_id ); + +CREATE INDEX idx_term_path_predicate ON qiita.term_path ( predicate_term_id ); + +CREATE INDEX idx_term_path_object ON qiita.term_path ( object_term_id ); + +CREATE TABLE qiita.term_relationship ( + term_relationship_id bigserial NOT NULL, + subject_term_id bigint NOT NULL, + predicate_term_id bigint NOT NULL, + object_term_id bigint NOT NULL, + ontology_id bigint NOT NULL, + CONSTRAINT pk_term_relationship PRIMARY KEY ( term_relationship_id ), + CONSTRAINT fk_term_relationship_subj_term FOREIGN KEY ( subject_term_id ) REFERENCES qiita.term( term_id ) , + CONSTRAINT fk_term_relationship_pred_term FOREIGN KEY ( predicate_term_id ) REFERENCES qiita.term( term_id ) , + CONSTRAINT fk_term_relationship_obj_term FOREIGN KEY ( object_term_id ) REFERENCES qiita.term( term_id ) , + CONSTRAINT fk_term_relationship_ontology FOREIGN KEY ( ontology_id ) REFERENCES qiita.ontology( ontology_id ) + ); + +CREATE INDEX idx_term_relationship_subject ON qiita.term_relationship ( subject_term_id ); + +CREATE INDEX idx_term_relationship_predicate ON qiita.term_relationship ( predicate_term_id ); + +CREATE INDEX idx_term_relationship_object ON qiita.term_relationship ( object_term_id ); + +CREATE INDEX idx_term_relationship_ontology ON qiita.term_relationship ( ontology_id ); + +CREATE TABLE qiita.term_synonym ( + synonym_id bigserial NOT NULL, + term_id bigint NOT NULL, + synonym_value varchar NOT NULL, + synonym_type_id bigint NOT NULL, + CONSTRAINT pk_term_synonym PRIMARY KEY ( synonym_id ), + CONSTRAINT fk_term_synonym_term FOREIGN KEY ( term_id ) REFERENCES qiita.term( term_id ) , + CONSTRAINT fk_term_synonym_type_term FOREIGN KEY ( synonym_id ) REFERENCES qiita.term( term_id ) + ); + +CREATE INDEX idx_term_synonym ON qiita.term_synonym ( term_id ); + +CREATE TABLE qiita.timeseries_type ( + timeseries_type_id bigserial NOT NULL, + timeseries_type varchar NOT NULL, + CONSTRAINT pk_timeseries_type PRIMARY KEY ( timeseries_type_id ) + ); + +CREATE TABLE qiita.user_level ( + user_level_id serial NOT NULL, + name varchar NOT NULL, + description text NOT NULL, + CONSTRAINT pk_user_level PRIMARY KEY ( user_level_id ) + ); + +COMMENT ON TABLE qiita.user_level IS 'Holds available user levels'; + +COMMENT ON COLUMN qiita.user_level.name IS 'One of the user levels (admin, user, guest, etc)'; + +CREATE TABLE qiita.annotation ( + annotation_id bigserial NOT NULL, + term_id bigint NOT NULL, + annotation_name varchar NOT NULL, + annotation_num_value bigint , + annotation_str_value varchar , + CONSTRAINT pk_annotation PRIMARY KEY ( annotation_id ), + CONSTRAINT fk_annotation_term FOREIGN KEY ( term_id ) REFERENCES qiita.term( term_id ) + ); + +CREATE INDEX idx_annotation ON qiita.annotation ( term_id ); + +CREATE TABLE qiita.column_controlled_vocabularies ( + controlled_vocab_id bigserial NOT NULL, + column_name varchar NOT NULL, + CONSTRAINT idx_column_controlled_vocabularies PRIMARY KEY ( controlled_vocab_id, column_name ), + CONSTRAINT fk_column_controlled_vocabularies FOREIGN KEY ( column_name ) REFERENCES qiita.mixs_field_description( column_name ) , + CONSTRAINT fk_column_controlled_vocab2 FOREIGN KEY ( controlled_vocab_id ) REFERENCES qiita.controlled_vocabularies( controlled_vocab_id ) + ); + +CREATE INDEX idx_column_controlled_vocabularies_0 ON qiita.column_controlled_vocabularies ( column_name ); + +CREATE INDEX idx_column_controlled_vocabularies_1 ON qiita.column_controlled_vocabularies ( controlled_vocab_id ); + +COMMENT ON TABLE qiita.column_controlled_vocabularies IS 'Table relates a column with a controlled vocabulary.'; + +CREATE TABLE qiita.column_ontology ( + column_name varchar NOT NULL, + ontology_short_name varchar NOT NULL, + bioportal_id integer NOT NULL, + ontology_branch_id integer NOT NULL, + CONSTRAINT idx_column_ontology PRIMARY KEY ( column_name, ontology_short_name ), + CONSTRAINT fk_column_ontology FOREIGN KEY ( column_name ) REFERENCES qiita.mixs_field_description( column_name ) + ); + +CREATE INDEX idx_column_ontology_0 ON qiita.column_ontology ( column_name ); + +COMMENT ON TABLE qiita.column_ontology IS 'This table relates a column with an ontology.'; + +CREATE TABLE qiita.controlled_vocab_values ( + vocab_value_id bigserial NOT NULL, + controlled_vocab_id bigint NOT NULL, + term varchar NOT NULL, + order_by varchar NOT NULL, + default_item varchar , + CONSTRAINT pk_controlled_vocab_values PRIMARY KEY ( vocab_value_id ), + CONSTRAINT fk_controlled_vocab_values FOREIGN KEY ( controlled_vocab_id ) REFERENCES qiita.controlled_vocabularies( controlled_vocab_id ) ON DELETE CASCADE ON UPDATE CASCADE + ); + +CREATE INDEX idx_controlled_vocab_values ON qiita.controlled_vocab_values ( controlled_vocab_id ); + +CREATE TABLE qiita.dbxref ( + dbxref_id bigserial NOT NULL, + term_id bigint NOT NULL, + dbname varchar NOT NULL, + accession varchar NOT NULL, + description varchar NOT NULL, + xref_type varchar NOT NULL, + CONSTRAINT pk_dbxref PRIMARY KEY ( dbxref_id ), + CONSTRAINT fk_dbxref_term FOREIGN KEY ( term_id ) REFERENCES qiita.term( term_id ) + ); + +CREATE INDEX idx_dbxref ON qiita.dbxref ( term_id ); + +CREATE TABLE qiita.filepath ( + filepath_id bigserial NOT NULL, + filepath varchar NOT NULL, + filepath_type_id bigint NOT NULL, + checksum varchar NOT NULL, + checksum_algorithm_id bigint NOT NULL, + CONSTRAINT pk_filepath PRIMARY KEY ( filepath_id ), + CONSTRAINT fk_filepath FOREIGN KEY ( filepath_type_id ) REFERENCES qiita.filepath_type( filepath_type_id ) , + CONSTRAINT fk_filepath_0 FOREIGN KEY ( checksum_algorithm_id ) REFERENCES qiita.checksum_algorithm( checksum_algorithm_id ) + ); + +CREATE INDEX idx_filepath ON qiita.filepath ( filepath_type_id ); + +CREATE TABLE qiita.investigation ( + investigation_id bigserial NOT NULL, + name varchar NOT NULL, + description varchar NOT NULL, + contact_person_id bigint , + CONSTRAINT pk_investigation PRIMARY KEY ( investigation_id ), + CONSTRAINT fk_investigation_study_person FOREIGN KEY ( contact_person_id ) REFERENCES qiita.study_person( study_person_id ) + ); + +CREATE INDEX idx_investigation ON qiita.investigation ( contact_person_id ); + +COMMENT ON TABLE qiita.investigation IS 'Overarching investigation information. +An investigation comprises one or more individual studies.'; + +COMMENT ON COLUMN qiita.investigation.description IS 'Describes the overarching goal of the investigation'; + +CREATE TABLE qiita.logging ( + logging_id bigserial NOT NULL, + time timestamp NOT NULL, + severity_id integer NOT NULL, + msg varchar NOT NULL, + information varchar , + CONSTRAINT pk_logging PRIMARY KEY ( logging_id ), + CONSTRAINT fk_logging_severity FOREIGN KEY ( severity_id ) REFERENCES qiita.severity( severity_id ) + ); + +CREATE INDEX idx_logging_0 ON qiita.logging ( severity_id ); + +COMMENT ON COLUMN qiita.logging.time IS 'Time the error was thrown'; + +COMMENT ON COLUMN qiita.logging.msg IS 'Error message thrown'; + +COMMENT ON COLUMN qiita.logging.information IS 'Other applicable information (depending on error)'; + +CREATE TABLE qiita.preprocessed_filepath ( + preprocessed_data_id bigint NOT NULL, + filepath_id bigint NOT NULL, + CONSTRAINT idx_preprocessed_filepath PRIMARY KEY ( preprocessed_data_id, filepath_id ), + CONSTRAINT fk_preprocessed_filepath FOREIGN KEY ( preprocessed_data_id ) REFERENCES qiita.preprocessed_data( preprocessed_data_id ) , + CONSTRAINT fk_preprocessed_filepath_0 FOREIGN KEY ( filepath_id ) REFERENCES qiita.filepath( filepath_id ) + ); + +CREATE INDEX idx_preprocessed_filepath_0 ON qiita.preprocessed_filepath ( preprocessed_data_id ); + +CREATE INDEX idx_preprocessed_filepath_1 ON qiita.preprocessed_filepath ( filepath_id ); + +CREATE TABLE qiita.preprocessed_processed_data ( + preprocessed_data_id bigint NOT NULL, + processed_data_id bigint NOT NULL, + CONSTRAINT idx_preprocessed_processed_data PRIMARY KEY ( preprocessed_data_id, processed_data_id ), + CONSTRAINT fk_preprocessed_processed_data FOREIGN KEY ( preprocessed_data_id ) REFERENCES qiita.preprocessed_data( preprocessed_data_id ) , + CONSTRAINT fk_preprocessed_processed_data_0 FOREIGN KEY ( processed_data_id ) REFERENCES qiita.processed_data( processed_data_id ) + ); + +CREATE INDEX idx_preprocessed_processed_data_0 ON qiita.preprocessed_processed_data ( preprocessed_data_id ); + +CREATE INDEX idx_preprocessed_processed_data_1 ON qiita.preprocessed_processed_data ( processed_data_id ); + +CREATE TABLE qiita.processed_filepath ( + processed_data_id bigint NOT NULL, + filepath_id bigint NOT NULL, + CONSTRAINT idx_processed_filepath PRIMARY KEY ( processed_data_id, filepath_id ), + CONSTRAINT fk_processed_data_filepath FOREIGN KEY ( processed_data_id ) REFERENCES qiita.processed_data( processed_data_id ) , + CONSTRAINT fk_processed_data_filepath_0 FOREIGN KEY ( filepath_id ) REFERENCES qiita.filepath( filepath_id ) + ); + +CREATE TABLE qiita.processed_params_uclust ( + processed_params_id bigserial NOT NULL, + reference_id bigint NOT NULL, + similarity float8 DEFAULT 0.97 NOT NULL, + enable_rev_strand_match bool DEFAULT TRUE NOT NULL, + suppress_new_clusters bool DEFAULT TRUE NOT NULL, + CONSTRAINT pk_processed_params_uclust PRIMARY KEY ( processed_params_id ), + CONSTRAINT fk_processed_params_uclust FOREIGN KEY ( reference_id ) REFERENCES qiita.reference( reference_id ) + ); + +CREATE INDEX idx_processed_params_uclust ON qiita.processed_params_uclust ( reference_id ); + +COMMENT ON TABLE qiita.processed_params_uclust IS 'Parameters used for processing data using method uclust'; + +COMMENT ON COLUMN qiita.processed_params_uclust.reference_id IS 'What version of reference or type of reference used'; + +CREATE TABLE qiita.qiita_user ( + email varchar NOT NULL, + user_level_id integer DEFAULT 5 NOT NULL, + password varchar NOT NULL, + name varchar , + affiliation varchar , + address varchar , + phone varchar , + user_verify_code varchar , + pass_reset_code varchar , + pass_reset_timestamp timestamp , + CONSTRAINT pk_user PRIMARY KEY ( email ), + CONSTRAINT fk_user_user_level FOREIGN KEY ( user_level_id ) REFERENCES qiita.user_level( user_level_id ) ON UPDATE RESTRICT + ); + +CREATE INDEX idx_user ON qiita.qiita_user ( user_level_id ); + +COMMENT ON TABLE qiita.qiita_user IS 'Holds all user information'; + +COMMENT ON COLUMN qiita.qiita_user.user_level_id IS 'user level'; + +COMMENT ON COLUMN qiita.qiita_user.user_verify_code IS 'Code for initial user email verification'; + +COMMENT ON COLUMN qiita.qiita_user.pass_reset_code IS 'Randomly generated code for password reset'; + +COMMENT ON COLUMN qiita.qiita_user.pass_reset_timestamp IS 'Time the reset code was generated'; + +CREATE TABLE qiita.raw_filepath ( + raw_data_id bigint NOT NULL, + filepath_id bigint NOT NULL, + CONSTRAINT idx_raw_filepath PRIMARY KEY ( raw_data_id, filepath_id ), + CONSTRAINT fk_raw_filepath FOREIGN KEY ( filepath_id ) REFERENCES qiita.filepath( filepath_id ) , + CONSTRAINT fk_raw_filepath_0 FOREIGN KEY ( raw_data_id ) REFERENCES qiita.raw_data( raw_data_id ) + ); + +CREATE INDEX idx_raw_filepath_0 ON qiita.raw_filepath ( filepath_id ); + +CREATE INDEX idx_raw_filepath_1 ON qiita.raw_filepath ( raw_data_id ); + +CREATE TABLE qiita.study ( + study_id bigserial NOT NULL, + email varchar NOT NULL, + study_status_id bigint NOT NULL, + emp_person_id bigint , + first_contact varchar NOT NULL, + funding varchar , + timeseries_type_id bigint NOT NULL, + lab_person_id bigint , + metadata_complete bool NOT NULL, + mixs_compliant bool NOT NULL, + most_recent_contact varchar , + number_samples_collected integer NOT NULL, + number_samples_promised integer NOT NULL, + portal_type_id bigint NOT NULL, + principal_investigator_id bigint NOT NULL, + reprocess bool NOT NULL, + spatial_series bool , + study_title varchar NOT NULL, + study_alias varchar NOT NULL, + study_description text NOT NULL, + study_abstract text NOT NULL, + vamps_id varchar , + CONSTRAINT pk_study PRIMARY KEY ( study_id ), + CONSTRAINT fk_study_user FOREIGN KEY ( email ) REFERENCES qiita.qiita_user( email ) , + CONSTRAINT fk_study_study_status FOREIGN KEY ( study_status_id ) REFERENCES qiita.study_status( study_status_id ) , + CONSTRAINT fk_study_study_emp_person FOREIGN KEY ( emp_person_id ) REFERENCES qiita.study_person( study_person_id ) , + CONSTRAINT fk_study_study_lab_person FOREIGN KEY ( lab_person_id ) REFERENCES qiita.study_person( study_person_id ) , + CONSTRAINT fk_study_study_pi_person FOREIGN KEY ( principal_investigator_id ) REFERENCES qiita.study_person( study_person_id ) , + CONSTRAINT fk_study_timeseries_type FOREIGN KEY ( timeseries_type_id ) REFERENCES qiita.timeseries_type( timeseries_type_id ) , + CONSTRAINT fk_study FOREIGN KEY ( portal_type_id ) REFERENCES qiita.portal_type( portal_type_id ) + ); + +CREATE INDEX idx_study ON qiita.study ( email ); + +CREATE INDEX idx_study_0 ON qiita.study ( study_status_id ); + +CREATE INDEX idx_study_1 ON qiita.study ( emp_person_id ); + +CREATE INDEX idx_study_2 ON qiita.study ( lab_person_id ); + +CREATE INDEX idx_study_3 ON qiita.study ( principal_investigator_id ); + +CREATE INDEX idx_study_4 ON qiita.study ( timeseries_type_id ); + +CREATE INDEX idx_study_5 ON qiita.study ( portal_type_id ); + +COMMENT ON COLUMN qiita.study.study_id IS 'Unique name for study'; + +COMMENT ON COLUMN qiita.study.email IS 'Email of study owner'; + +COMMENT ON COLUMN qiita.study.timeseries_type_id IS 'What type of timeseries this study is (or is not) +Controlled Vocabulary'; + +CREATE TABLE qiita.study_experimental_factor ( + study_id bigint NOT NULL, + efo_id bigint NOT NULL, + CONSTRAINT idx_study_experimental_factor PRIMARY KEY ( study_id, efo_id ), + CONSTRAINT fk_study_experimental_factor FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) + ); + +CREATE INDEX idx_study_experimental_factor_0 ON qiita.study_experimental_factor ( study_id ); + +COMMENT ON TABLE qiita.study_experimental_factor IS 'EFO ontological link of experimental factors to studies'; + +CREATE TABLE qiita.study_pmid ( + study_id bigint NOT NULL, + pmid varchar NOT NULL, + CONSTRAINT idx_study_pmid PRIMARY KEY ( study_id, pmid ), + CONSTRAINT fk_study_pmid_study FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) + ); + +CREATE INDEX idx_study_pmid_0 ON qiita.study_pmid ( study_id ); + +COMMENT ON TABLE qiita.study_pmid IS 'Links a study to all PMIDs for papers created from study'; + +CREATE TABLE qiita.study_preprocessed_data ( + study_id bigint NOT NULL, + preprocessed_data_id bigint NOT NULL, + CONSTRAINT idx_study_preprocessed_data PRIMARY KEY ( study_id, preprocessed_data_id ), + CONSTRAINT fk_study_preprocessed_data FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) , + CONSTRAINT fk_study_preprocessed_data_0 FOREIGN KEY ( preprocessed_data_id ) REFERENCES qiita.preprocessed_data( preprocessed_data_id ) + ); + +CREATE INDEX idx_study_preprocessed_data_0 ON qiita.study_preprocessed_data ( study_id ); + +CREATE INDEX idx_study_preprocessed_data_1 ON qiita.study_preprocessed_data ( preprocessed_data_id ); + +CREATE TABLE qiita.study_processed_data ( + study_id bigint NOT NULL, + processed_data_id bigint NOT NULL, + CONSTRAINT idx_study_processed_data PRIMARY KEY ( study_id, processed_data_id ), + CONSTRAINT pk_study_processed_data UNIQUE ( processed_data_id ) , + CONSTRAINT fk_study_processed_data FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) , + CONSTRAINT fk_study_processed_data_0 FOREIGN KEY ( processed_data_id ) REFERENCES qiita.processed_data( processed_data_id ) + ); + +CREATE INDEX idx_study_processed_data_0 ON qiita.study_processed_data ( study_id ); + +CREATE TABLE qiita.study_raw_data ( + study_id bigint NOT NULL, + raw_data_id bigint NOT NULL, + CONSTRAINT idx_study_raw_data_0 PRIMARY KEY ( study_id, raw_data_id ), + CONSTRAINT fk_study_raw_data_study FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) , + CONSTRAINT fk_study_raw_data_raw_data FOREIGN KEY ( raw_data_id ) REFERENCES qiita.raw_data( raw_data_id ) + ); + +CREATE INDEX idx_study_raw_data ON qiita.study_raw_data ( study_id ); + +COMMENT ON TABLE qiita.study_raw_data IS 'links study to its raw data'; + +CREATE TABLE qiita.study_sample_columns ( + study_id bigint NOT NULL, + column_name varchar(100) NOT NULL, + column_type varchar NOT NULL, + CONSTRAINT idx_study_mapping_columns PRIMARY KEY ( study_id, column_name, column_type ), + CONSTRAINT fk_study_mapping_columns_study FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) + ); + +CREATE INDEX idx_study_mapping_columns_study_id ON qiita.study_sample_columns ( study_id ); + +COMMENT ON TABLE qiita.study_sample_columns IS 'Holds information on which metadata columns are available for the study sample template'; + +CREATE TABLE qiita.study_users ( + study_id bigint NOT NULL, + email varchar NOT NULL, + CONSTRAINT idx_study_users PRIMARY KEY ( study_id, email ), + CONSTRAINT fk_study_users_study FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) , + CONSTRAINT fk_study_users_user FOREIGN KEY ( email ) REFERENCES qiita.qiita_user( email ) + ); + +CREATE INDEX idx_study_users_0 ON qiita.study_users ( study_id ); + +CREATE INDEX idx_study_users_1 ON qiita.study_users ( email ); + +COMMENT ON TABLE qiita.study_users IS 'Links shared studies to users they are shared with'; + +CREATE TABLE qiita.analysis ( + analysis_id bigserial NOT NULL, + email varchar NOT NULL, + name varchar NOT NULL, + description varchar NOT NULL, + analysis_status_id bigint NOT NULL, + pmid varchar , + CONSTRAINT pk_analysis PRIMARY KEY ( analysis_id ), + CONSTRAINT fk_analysis_user FOREIGN KEY ( email ) REFERENCES qiita.qiita_user( email ) , + CONSTRAINT fk_analysis_analysis_status FOREIGN KEY ( analysis_status_id ) REFERENCES qiita.analysis_status( analysis_status_id ) + ); + +CREATE INDEX idx_analysis_email ON qiita.analysis ( email ); + +CREATE INDEX idx_analysis_status_id ON qiita.analysis ( analysis_status_id ); + +COMMENT ON TABLE qiita.analysis IS 'hHolds analysis information'; + +COMMENT ON COLUMN qiita.analysis.analysis_id IS 'Unique identifier for analysis'; + +COMMENT ON COLUMN qiita.analysis.email IS 'Email for user who owns the analysis'; + +COMMENT ON COLUMN qiita.analysis.name IS 'Name of the analysis'; + +COMMENT ON COLUMN qiita.analysis.pmid IS 'PMID of paper from the analysis'; + +CREATE TABLE qiita.analysis_chain ( + parent_id bigint NOT NULL, + child_id bigint NOT NULL, + CONSTRAINT idx_analysis_chain_1 PRIMARY KEY ( parent_id, child_id ), + CONSTRAINT fk_analysis_chain FOREIGN KEY ( parent_id ) REFERENCES qiita.analysis( analysis_id ) , + CONSTRAINT fk_analysis_chain_0 FOREIGN KEY ( child_id ) REFERENCES qiita.analysis( analysis_id ) + ); + +CREATE INDEX idx_analysis_chain ON qiita.analysis_chain ( parent_id ); + +CREATE INDEX idx_analysis_chain_0 ON qiita.analysis_chain ( child_id ); + +COMMENT ON TABLE qiita.analysis_chain IS 'Keeps track of the chain of analysis edits. Tracks what previous analysis a given analysis came from. +If a given analysis is not in child_id, it is the root of the chain. '; + +CREATE TABLE qiita.analysis_filepath ( + analysis_id bigint NOT NULL, + filepath_id bigint NOT NULL, + CONSTRAINT idx_analysis_filepath_1 PRIMARY KEY ( analysis_id, filepath_id ), + CONSTRAINT fk_analysis_filepath FOREIGN KEY ( analysis_id ) REFERENCES qiita.analysis( analysis_id ) , + CONSTRAINT fk_analysis_filepath_0 FOREIGN KEY ( filepath_id ) REFERENCES qiita.filepath( filepath_id ) + ); + +CREATE INDEX idx_analysis_filepath ON qiita.analysis_filepath ( analysis_id ); + +CREATE INDEX idx_analysis_filepath_0 ON qiita.analysis_filepath ( filepath_id ); + +COMMENT ON TABLE qiita.analysis_filepath IS 'Stores link between analysis and the data file used for the analysis.'; + +CREATE TABLE qiita.analysis_users ( + analysis_id bigint NOT NULL, + email varchar NOT NULL, + CONSTRAINT idx_analysis_users PRIMARY KEY ( analysis_id, email ), + CONSTRAINT fk_analysis_users_analysis FOREIGN KEY ( analysis_id ) REFERENCES qiita.analysis( analysis_id ) ON DELETE CASCADE ON UPDATE CASCADE, + CONSTRAINT fk_analysis_users_user FOREIGN KEY ( email ) REFERENCES qiita.qiita_user( email ) ON DELETE CASCADE ON UPDATE CASCADE + ); + +CREATE INDEX idx_analysis_users_analysis ON qiita.analysis_users ( analysis_id ); + +CREATE INDEX idx_analysis_users_email ON qiita.analysis_users ( email ); + +COMMENT ON TABLE qiita.analysis_users IS 'Links analyses to the users they are shared with'; + +CREATE TABLE qiita.investigation_study ( + investigation_id bigint NOT NULL, + study_id bigint NOT NULL, + CONSTRAINT idx_investigation_study PRIMARY KEY ( investigation_id, study_id ), + CONSTRAINT fk_investigation_study FOREIGN KEY ( investigation_id ) REFERENCES qiita.investigation( investigation_id ) , + CONSTRAINT fk_investigation_study_study FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) + ); + +CREATE INDEX idx_investigation_study_investigation ON qiita.investigation_study ( investigation_id ); + +CREATE INDEX idx_investigation_study_study ON qiita.investigation_study ( study_id ); + +CREATE TABLE qiita.job ( + job_id bigserial NOT NULL, + data_type_id bigint NOT NULL, + job_status_id bigint NOT NULL, + command_id bigint NOT NULL, + options varchar , + log_id bigint , + CONSTRAINT pk_job PRIMARY KEY ( job_id ), + CONSTRAINT fk_job_function FOREIGN KEY ( command_id ) REFERENCES qiita.command( command_id ) , + CONSTRAINT fk_job_job_status_id FOREIGN KEY ( job_status_id ) REFERENCES qiita.job_status( job_status_id ) , + CONSTRAINT fk_job_data_type FOREIGN KEY ( data_type_id ) REFERENCES qiita.data_type( data_type_id ) , + CONSTRAINT fk_job FOREIGN KEY ( log_id ) REFERENCES qiita.logging( logging_id ) + ); + +CREATE INDEX idx_job_command ON qiita.job ( command_id ); + +CREATE INDEX idx_job_status ON qiita.job ( job_status_id ); + +CREATE INDEX idx_job_type ON qiita.job ( data_type_id ); + +CREATE INDEX idx_job ON qiita.job ( log_id ); + +COMMENT ON COLUMN qiita.job.job_id IS 'Unique identifier for job'; + +COMMENT ON COLUMN qiita.job.data_type_id IS 'What datatype (16s, metabolome, etc) job is run on.'; + +COMMENT ON COLUMN qiita.job.command_id IS 'The Qiime or other function being run (alpha diversity, etc)'; + +COMMENT ON COLUMN qiita.job.options IS 'Holds all options set for the job as a json string'; + +COMMENT ON COLUMN qiita.job.log_id IS 'Reference to error if status is error'; + +CREATE TABLE qiita.job_results_filepath ( + job_id bigint NOT NULL, + filepath_id bigint NOT NULL, + CONSTRAINT idx_job_results_filepath PRIMARY KEY ( job_id, filepath_id ), + CONSTRAINT fk_job_results_filepath FOREIGN KEY ( job_id ) REFERENCES qiita.job( job_id ) , + CONSTRAINT fk_job_results_filepath_0 FOREIGN KEY ( filepath_id ) REFERENCES qiita.filepath( filepath_id ) + ); + +CREATE INDEX idx_job_results_filepath_0 ON qiita.job_results_filepath ( job_id ); + +CREATE INDEX idx_job_results_filepath_1 ON qiita.job_results_filepath ( filepath_id ); + +COMMENT ON TABLE qiita.job_results_filepath IS 'Holds connection between jobs and the result filepaths'; + +CREATE TABLE qiita.required_sample_info ( + study_id bigint NOT NULL, + sample_id varchar NOT NULL, + physical_location varchar NOT NULL, + has_physical_specimen bool NOT NULL, + has_extracted_data bool NOT NULL, + sample_type varchar NOT NULL, + required_sample_info_status_id bigint NOT NULL, + collection_timestamp timestamp NOT NULL, + host_subject_id varchar NOT NULL, + description varchar NOT NULL, + CONSTRAINT idx_common_sample_information PRIMARY KEY ( study_id, sample_id ), + CONSTRAINT pk_required_sample_info UNIQUE ( sample_id ) , + CONSTRAINT fk_required_sample_info_study FOREIGN KEY ( study_id ) REFERENCES qiita.study( study_id ) , + CONSTRAINT fk_required_sample_info FOREIGN KEY ( required_sample_info_status_id ) REFERENCES qiita.required_sample_info_status( required_sample_info_status_id ) + ); + +CREATE INDEX idx_required_sample_info ON qiita.required_sample_info ( study_id ); + +CREATE INDEX idx_required_sample_info_0 ON qiita.required_sample_info ( required_sample_info_status_id ); + +COMMENT ON TABLE qiita.required_sample_info IS 'Required info for each sample. One row is one sample.'; + +COMMENT ON COLUMN qiita.required_sample_info.physical_location IS 'Where the sample itself is stored'; + +COMMENT ON COLUMN qiita.required_sample_info.has_physical_specimen IS 'Whether we have the full speciment or just DNA'; + +COMMENT ON COLUMN qiita.required_sample_info.sample_type IS 'Controlled vocabulary of sample types'; + +COMMENT ON COLUMN qiita.required_sample_info.required_sample_info_status_id IS 'What step of the pipeline the samples are in'; + +CREATE TABLE qiita.analysis_job ( + analysis_id bigint NOT NULL, + job_id bigint NOT NULL, + CONSTRAINT idx_analysis_jobs PRIMARY KEY ( analysis_id, job_id ), + CONSTRAINT fk_analysis_job_analysis FOREIGN KEY ( analysis_id ) REFERENCES qiita.analysis( analysis_id ) ON DELETE CASCADE ON UPDATE CASCADE, + CONSTRAINT fk_analysis_job_job FOREIGN KEY ( job_id ) REFERENCES qiita.job( job_id ) + ); + +CREATE INDEX idx_analysis_job ON qiita.analysis_job ( analysis_id ); + +CREATE INDEX idx_analysis_job_0 ON qiita.analysis_job ( job_id ); + +COMMENT ON TABLE qiita.analysis_job IS 'Holds information for a one-to-many relation of analysis to the jobs in it'; + +COMMENT ON COLUMN qiita.analysis_job.analysis_id IS 'Id of the analysis'; + +COMMENT ON COLUMN qiita.analysis_job.job_id IS 'Id for a job that is part of the analysis'; + +CREATE TABLE qiita.analysis_sample ( + analysis_id bigint NOT NULL, + processed_data_id bigint NOT NULL, + sample_id varchar NOT NULL, + CONSTRAINT fk_analysis_sample_analysis FOREIGN KEY ( analysis_id ) REFERENCES qiita.analysis( analysis_id ) , + CONSTRAINT fk_analysis_sample FOREIGN KEY ( processed_data_id ) REFERENCES qiita.processed_data( processed_data_id ) , + CONSTRAINT fk_analysis_sample_0 FOREIGN KEY ( sample_id ) REFERENCES qiita.required_sample_info( sample_id ) + ); + +CREATE INDEX idx_analysis_sample ON qiita.analysis_sample ( analysis_id ); + +CREATE INDEX idx_analysis_sample_0 ON qiita.analysis_sample ( processed_data_id ); + +CREATE INDEX idx_analysis_sample_1 ON qiita.analysis_sample ( sample_id ); + +CREATE TABLE qiita.common_prep_info ( + raw_data_id bigserial NOT NULL, + sample_id varchar NOT NULL, + center_name varchar , + center_project_name varchar , + ebi_submission_accession varchar , + ebi_study_accession varchar , + emp_status_id bigint NOT NULL, + data_type_id bigint NOT NULL, + CONSTRAINT idx_required_prep_info_1 PRIMARY KEY ( raw_data_id, sample_id ), + CONSTRAINT fk_required_prep_info_raw_data FOREIGN KEY ( raw_data_id ) REFERENCES qiita.raw_data( raw_data_id ) , + CONSTRAINT fk_required_prep_info_emp_status FOREIGN KEY ( emp_status_id ) REFERENCES qiita.emp_status( emp_status_id ) , + CONSTRAINT fk_required_prep_info FOREIGN KEY ( sample_id ) REFERENCES qiita.required_sample_info( sample_id ) , + CONSTRAINT fk_required_prep_info_0 FOREIGN KEY ( data_type_id ) REFERENCES qiita.data_type( data_type_id ) + ); + +CREATE INDEX idx_required_prep_info ON qiita.common_prep_info ( raw_data_id ); + +CREATE INDEX idx_required_prep_info_0 ON qiita.common_prep_info ( emp_status_id ); + +CREATE INDEX idx_required_prep_info_2 ON qiita.common_prep_info ( sample_id ); + +CREATE INDEX idx_required_prep_info_3 ON qiita.common_prep_info ( data_type_id ); + diff --git a/qiita_db/support_files/test_data/job/1_job_result.txt b/qiita_db/support_files/test_data/job/1_job_result.txt new file mode 100644 index 000000000..82ff6f913 --- /dev/null +++ b/qiita_db/support_files/test_data/job/1_job_result.txt @@ -0,0 +1 @@ +job1result.txt \ No newline at end of file diff --git a/qiita_db/support_files/test_data/job/2_test_folder/testfile.txt b/qiita_db/support_files/test_data/job/2_test_folder/testfile.txt new file mode 100644 index 000000000..418d6c385 --- /dev/null +++ b/qiita_db/support_files/test_data/job/2_test_folder/testfile.txt @@ -0,0 +1 @@ +DATA \ No newline at end of file diff --git a/qiita_db/support_files/test_data/preprocessed_data/seqs.fna b/qiita_db/support_files/test_data/preprocessed_data/seqs.fna new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/qiita_db/support_files/test_data/preprocessed_data/seqs.fna @@ -0,0 +1 @@ + diff --git a/qiita_db/support_files/test_data/preprocessed_data/seqs.qual b/qiita_db/support_files/test_data/preprocessed_data/seqs.qual new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/qiita_db/support_files/test_data/preprocessed_data/seqs.qual @@ -0,0 +1 @@ + diff --git a/qiita_db/support_files/test_data/processed_data/study_1001_closed_reference_otu_table.biom b/qiita_db/support_files/test_data/processed_data/study_1001_closed_reference_otu_table.biom new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/qiita_db/support_files/test_data/processed_data/study_1001_closed_reference_otu_table.biom @@ -0,0 +1 @@ + diff --git a/qiita_db/support_files/test_data/reference/gg_97_otus_4feb2011.fasta b/qiita_db/support_files/test_data/reference/gg_97_otus_4feb2011.fasta new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/qiita_db/support_files/test_data/reference/gg_97_otus_4feb2011.fasta @@ -0,0 +1 @@ + diff --git a/qiita_db/support_files/test_data/reference/gg_97_otus_4feb2011.tre b/qiita_db/support_files/test_data/reference/gg_97_otus_4feb2011.tre new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/qiita_db/support_files/test_data/reference/gg_97_otus_4feb2011.tre @@ -0,0 +1 @@ + diff --git a/qiita_db/support_files/test_data/reference/greengenes_tax.txt b/qiita_db/support_files/test_data/reference/greengenes_tax.txt new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/qiita_db/support_files/test_data/reference/greengenes_tax.txt @@ -0,0 +1 @@ + diff --git a/qiita_db/support_files/test_data/reference/params_qiime.txt b/qiita_db/support_files/test_data/reference/params_qiime.txt new file mode 100644 index 000000000..412b80d12 --- /dev/null +++ b/qiita_db/support_files/test_data/reference/params_qiime.txt @@ -0,0 +1 @@ +beta_diversity:metrics bray_curtis,euclidean diff --git a/qiita_db/support_files/work_data/placeholder.txt b/qiita_db/support_files/work_data/placeholder.txt new file mode 100644 index 000000000..fd2cb23fd --- /dev/null +++ b/qiita_db/support_files/work_data/placeholder.txt @@ -0,0 +1 @@ +placeholder.txt \ No newline at end of file diff --git a/qiita_db/test/__init__.py b/qiita_db/test/__init__.py new file mode 100644 index 000000000..a659159bc --- /dev/null +++ b/qiita_db/test/__init__.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +__version__ = "0.0.1-dev" diff --git a/qiita_db/test/test_analysis.py b/qiita_db/test/test_analysis.py new file mode 100644 index 000000000..bda1911ee --- /dev/null +++ b/qiita_db/test/test_analysis.py @@ -0,0 +1,146 @@ +from unittest import TestCase, main + +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from qiita_core.util import qiita_test_checker +from qiita_db.analysis import Analysis +from qiita_db.job import Job +from qiita_db.user import User +from qiita_db.data import ProcessedData +from qiita_db.exceptions import (QiitaDBDuplicateError, QiitaDBColumnError, + QiitaDBStatusError) +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + + +@qiita_test_checker() +class TestAnalysis(TestCase): + def setUp(self): + self.analysis = Analysis(1) + + def test_lock_check(self): + self.analysis.status = "public" + with self.assertRaises(QiitaDBStatusError): + self.analysis._lock_check(self.conn_handler) + + def test_lock_check_ok(self): + self.analysis.status = "queued" + self.analysis._lock_check(self.conn_handler) + + def test_create(self): + new = Analysis.create(User("admin@foo.bar"), "newAnalysis", + "A New Analysis") + self.assertEqual(new.id, 3) + sql = "SELECT * FROM qiita.analysis WHERE analysis_id = 3" + obs = self.conn_handler.execute_fetchall(sql) + self.assertEqual(obs, [[3, 'admin@foo.bar', 'newAnalysis', + 'A New Analysis', 1, None]]) + + def test_create_parent(self): + new = Analysis.create(User("admin@foo.bar"), "newAnalysis", + "A New Analysis", Analysis(1)) + self.assertEqual(new.id, 3) + sql = "SELECT * FROM qiita.analysis WHERE analysis_id = 3" + obs = self.conn_handler.execute_fetchall(sql) + self.assertEqual(obs, [[3, 'admin@foo.bar', 'newAnalysis', + 'A New Analysis', 1, None]]) + + sql = "SELECT * FROM qiita.analysis_chain WHERE child_id = 3" + obs = self.conn_handler.execute_fetchall(sql) + self.assertEqual(obs, [[1, 3]]) + + def test_retrieve_owner(self): + self.assertEqual(self.analysis.owner, "test@foo.bar") + + def test_retrieve_name(self): + self.assertEqual(self.analysis.name, "SomeAnalysis") + + def test_retrieve_description(self): + self.assertEqual(self.analysis.description, "A test analysis") + + def test_set_description(self): + self.analysis.description = "New description" + self.assertEqual(self.analysis.description, "New description") + + def test_retrieve_samples(self): + exp = {1: ['SKB8.640193', 'SKD8.640184', 'SKB7.640196', + 'SKM9.640192', 'SKM4.640180']} + self.assertEqual(self.analysis.samples, exp) + + def test_retrieve_shared_with(self): + self.assertEqual(self.analysis.shared_with, ["shared@foo.bar"]) + + def test_retrieve_biom_tables(self): + self.assertEqual(self.analysis.biom_tables, [7]) + + def test_retrieve_biom_tables_none(self): + new = Analysis.create(User("admin@foo.bar"), "newAnalysis", + "A New Analysis", Analysis(1)) + self.assertEqual(new.biom_tables, None) + + def test_retrieve_jobs(self): + self.assertEqual(self.analysis.jobs, [1, 2]) + + def test_retrieve_jobs_none(self): + new = Analysis.create(User("admin@foo.bar"), "newAnalysis", + "A New Analysis", Analysis(1)) + self.assertEqual(new.jobs, None) + + def test_retrieve_pmid(self): + self.assertEqual(self.analysis.pmid, "121112") + + def test_retrieve_pmid_none(self): + new = Analysis.create(User("admin@foo.bar"), "newAnalysis", + "A New Analysis", Analysis(1)) + self.assertEqual(new.pmid, None) + + def test_set_pmid(self): + self.analysis.pmid = "11211221212213" + self.assertEqual(self.analysis.pmid, "11211221212213") + + # def test_get_parent(self): + # raise NotImplementedError() + + # def test_get_children(self): + # raise NotImplementedError() + + def test_add_samples(self): + new = Analysis.create(User("admin@foo.bar"), "newAnalysis", + "A New Analysis") + new.add_samples([(1, 'SKB8.640193')]) + + def test_remove_samples(self): + self.analysis.remove_samples([(1, 'SKB8.640193'), (1, 'SKD8.640184')]) + + def test_add_biom_tables(self): + new = Analysis.create(User("admin@foo.bar"), "newAnalysis", + "A New Analysis") + new.add_biom_tables([ProcessedData(1)]) + self.assertEqual(new.biom_tables, [7]) + + def test_remove_biom_tables(self): + self.analysis.remove_biom_tables([ProcessedData(1)]) + self.assertEqual(self.analysis.biom_tables, None) + + def test_add_jobs(self): + new = Analysis.create(User("admin@foo.bar"), "newAnalysis", + "A New Analysis") + new.add_jobs([Job(1)]) + self.assertEqual(new.jobs, [1]) + + def test_share(self): + self.analysis.share(User("admin@foo.bar")) + self.assertEqual(self.analysis.shared_with, ["shared@foo.bar", + "admin@foo.bar"]) + + def test_unshare(self): + self.analysis.unshare(User("shared@foo.bar")) + self.assertEqual(self.analysis.shared_with, []) + + +if __name__ == "__main__": + main() diff --git a/qiita_db/test/test_base.py b/qiita_db/test/test_base.py new file mode 100644 index 000000000..c1861227c --- /dev/null +++ b/qiita_db/test/test_base.py @@ -0,0 +1,161 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from unittest import TestCase, main + +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from qiita_core.util import qiita_test_checker +from qiita_db.base import QiitaObject, QiitaStatusObject +from qiita_db.exceptions import QiitaDBUnknownIDError +from qiita_db.data import RawData +from qiita_db.study import Study + + +@qiita_test_checker() +class QiitaBaseTest(TestCase): + """Tests that the base class functions act correctly""" + + def setUp(self): + # We need an actual subclass in order to test the equality functions + self.tester = RawData(1) + + def test_init_base_error(self): + """Raises an error when instantiating a base class directly""" + with self.assertRaises(IncompetentQiitaDeveloperError): + QiitaObject(1) + + def test_init_error_inexistent(self): + """Raises an error when instantiating an object that does not exists""" + with self.assertRaises(QiitaDBUnknownIDError): + RawData(10) + + def test_check_subclass(self): + """Nothing happens if check_subclass called from a subclass""" + self.tester._check_subclass() + + def test_check_subclass_error(self): + """check_subclass raises an error if called from a base class""" + # Checked through the __init__ call + with self.assertRaises(IncompetentQiitaDeveloperError): + QiitaObject(1) + with self.assertRaises(IncompetentQiitaDeveloperError): + QiitaStatusObject(1) + + def test_check_id(self): + """Correctly checks if an id exists on the database""" + self.assertTrue(self.tester._check_id(1)) + self.assertFalse(self.tester._check_id(100)) + + def test_equal_self(self): + """Equality works with the same object""" + self.assertEqual(self.tester, self.tester) + + def test_equal(self): + """Equality works with two objects pointing to the same instance""" + new = RawData(1) + self.assertEqual(self.tester, new) + + def test_not_equal(self): + """Not equals works with object of the same type""" + new = RawData(2) + self.assertNotEqual(self.tester, new) + + def test_not_equal_type(self): + """Not equals works with object of different type""" + new = Study(1) + self.assertNotEqual(self.tester, new) + + +@qiita_test_checker() +class QiitaStatusObjectTest(TestCase): + """Tests that the QittaStatusObject class functions act correctly""" + + def setUp(self): + # We need an actual subclass in order to test the equality functions + self.tester = Study(1) + + def test_status(self): + """Correctly returns the status of the object""" + self.assertEqual(self.tester.status, "public") + + def test_check_status_single(self): + """check_status works passing a single status""" + self.assertTrue(self.tester.check_status(["public"])) + self.assertTrue(self.tester.check_status(["public"], + conn_handler=self.conn_handler)) + self.assertFalse(self.tester.check_status(["private"])) + self.assertFalse(self.tester.check_status(["private"], + conn_handler=self.conn_handler)) + + def test_check_status_exclude_single(self): + """check_status works passing a single status and the exclude flag""" + self.assertTrue(self.tester.check_status(["private"], exclude=True)) + self.assertTrue(self.tester.check_status(["private"], exclude=True, + conn_handler=self.conn_handler)) + self.assertFalse(self.tester.check_status(["public"], exclude=True)) + self.assertFalse(self.tester.check_status(["public"], exclude=True, + conn_handler=self.conn_handler)) + + def test_check_status_list(self): + """check_status work passing a list of status""" + self.assertTrue(self.tester.check_status( + ["public", "waiting_approval"])) + self.assertTrue(self.tester.check_status( + ["public", "waiting_approval"], conn_handler=self.conn_handler)) + self.assertFalse(self.tester.check_status( + ["private", "waiting_approval"])) + self.assertFalse(self.tester.check_status( + ["private", "waiting_approval"], conn_handler=self.conn_handler)) + + def test_check_status_exclude_list(self): + """check_status work passing a list of status and the exclude flag""" + self.assertTrue(self.tester.check_status( + ["private", "waiting_approval"], exclude=True)) + self.assertTrue(self.tester.check_status( + ["private", "waiting_approval"], exclude=True, + conn_handler=self.conn_handler)) + self.assertFalse(self.tester.check_status( + ["public", "waiting_approval"], exclude=True)) + self.assertFalse(self.tester.check_status( + ["public", "waiting_approval"], exclude=True, + conn_handler=self.conn_handler)) + + def test_check_status_unknown_status(self): + """check_status raises an error if an invalid status is provided""" + with self.assertRaises(ValueError): + self.tester.check_status(["foo"]) + + with self.assertRaises(ValueError): + self.tester.check_status(["foo"], exclude=True) + + with self.assertRaises(ValueError): + self.tester.check_status(["foo"], conn_handler=self.conn_handler) + + with self.assertRaises(ValueError): + self.tester.check_status(["foo"], exclude=True, + conn_handler=self.conn_handler) + + def test_check_status_unknown_status_list(self): + """check_status raises an error if an invalid status list is provided + """ + with self.assertRaises(ValueError): + self.tester.check_status(["foo", "bar"]) + + with self.assertRaises(ValueError): + self.tester.check_status(["foo", "bar"], exclude=True) + + with self.assertRaises(ValueError): + self.tester.check_status(["foo", "bar"], + conn_handler=self.conn_handler) + + with self.assertRaises(ValueError): + self.tester.check_status(["foo", "bar"], exclude=True, + conn_handler=self.conn_handler) + +if __name__ == '__main__': + main() diff --git a/qiita_db/test/test_commands.py b/qiita_db/test/test_commands.py new file mode 100644 index 000000000..b80389270 --- /dev/null +++ b/qiita_db/test/test_commands.py @@ -0,0 +1,351 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from os import remove, close +from os.path import exists, join, basename +from tempfile import mkstemp, mkdtemp +from shutil import rmtree +from unittest import TestCase, main +from future.utils.six import StringIO +from future import standard_library +from functools import partial +with standard_library.hooks(): + import configparser + +from qiita_db.commands import (load_study_from_cmd, load_raw_data_cmd, + load_sample_template_from_cmd, + load_prep_template_from_cmd, + load_processed_data_cmd, + load_preprocessed_data_from_cmd) +from qiita_db.study import Study, StudyPerson +from qiita_db.user import User +from qiita_db.data import RawData +from qiita_db.util import get_count, check_count, get_db_files_base_dir +from qiita_core.util import qiita_test_checker + + +@qiita_test_checker() +class TestMakeStudyFromCmd(TestCase): + def setUp(self): + StudyPerson.create('SomeDude', 'somedude@foo.bar', + '111 fake street', '111-121-1313') + User.create('test@test.com', 'password') + self.config1 = CONFIG_1 + self.config2 = CONFIG_2 + + def test_make_study_from_cmd(self): + fh = StringIO(self.config1) + load_study_from_cmd('test@test.com', 'newstudy', fh) + sql = ("select study_id from qiita.study where email = %s and " + "study_title = %s") + study_id = self.conn_handler.execute_fetchone(sql, ('test@test.com', + 'newstudy')) + self.assertTrue(study_id is not None) + + fh2 = StringIO(self.config2) + with self.assertRaises(configparser.NoOptionError): + load_study_from_cmd('test@test.com', 'newstudy2', fh2) + + +@qiita_test_checker() +class TestImportPreprocessedData(TestCase): + def setUp(self): + self.tmpdir = mkdtemp() + fd, self.file1 = mkstemp(dir=self.tmpdir) + close(fd) + fd, self.file2 = mkstemp(dir=self.tmpdir) + close(fd) + with open(self.file1, "w") as f: + f.write("\n") + with open(self.file2, "w") as f: + f.write("\n") + + self.files_to_remove = [self.file1, self.file2] + self.dirs_to_remove = [self.tmpdir] + + self.db_test_ppd_dir = join(get_db_files_base_dir(), + 'preprocessed_data') + + def tearDown(self): + for fp in self.files_to_remove: + if exists(fp): + remove(fp) + for dp in self.dirs_to_remove: + if exists(dp): + rmtree(dp) + + def test_import_preprocessed_data(self): + initial_ppd_count = get_count('qiita.preprocessed_data') + initial_fp_count = get_count('qiita.filepath') + ppd = load_preprocessed_data_from_cmd( + 1, 'preprocessed_sequence_illumina_params', + self.tmpdir, 'preprocessed_sequences', 1, False, 1) + self.files_to_remove.append( + join(self.db_test_ppd_dir, + '%d_%s' % (ppd.id, basename(self.file1)))) + self.files_to_remove.append( + join(self.db_test_ppd_dir, + '%d_%s' % (ppd.id, basename(self.file2)))) + self.assertEqual(ppd.id, 3) + self.assertTrue(check_count('qiita.preprocessed_data', + initial_ppd_count + 1)) + self.assertTrue(check_count('qiita.filepath', initial_fp_count+2)) + + +@qiita_test_checker() +class TestLoadSampleTemplateFromCmd(TestCase): + def setUp(self): + # Create a sample template file + self.st_contents = SAMPLE_TEMPLATE + + # create a new study to attach the sample template + info = { + "timeseries_type_id": 1, + "metadata_complete": True, + "mixs_compliant": True, + "number_samples_collected": 4, + "number_samples_promised": 4, + "portal_type_id": 3, + "study_alias": "TestStudy", + "study_description": "Description of a test study", + "study_abstract": "No abstract right now...", + "emp_person_id": StudyPerson(2), + "principal_investigator_id": StudyPerson(3), + "lab_person_id": StudyPerson(1) + } + self.study = Study.create(User('test@foo.bar'), + "Test study", [1], info) + + def test_load_sample_template_from_cmd(self): + """Correctly adds a sample template to the DB""" + fh = StringIO(self.st_contents) + st = load_sample_template_from_cmd(fh, self.study.id) + self.assertEqual(st.id, self.study.id) + + +@qiita_test_checker() +class TestLoadPrepTemplateFromCmd(TestCase): + def setUp(self): + # Create a sample template file + fd, seqs_fp = mkstemp(suffix='_seqs.fastq') + close(fd) + fd, barcodes_fp = mkstemp(suffix='_barcodes.fastq') + close(fd) + + with open(seqs_fp, "w") as f: + f.write("\n") + with open(barcodes_fp, "w") as f: + f.write("\n") + + self.pt_contents = PREP_TEMPLATE + + self.raw_data = RawData.create( + 2, [(seqs_fp, 1), (barcodes_fp, 2)], [Study(1)]) + + join_f = partial(join, join(get_db_files_base_dir(), 'raw_data')) + self.files_to_remove = [ + join_f("%s_%s" % (self.raw_data.id, basename(seqs_fp))), + join_f("%s_%s" % (self.raw_data.id, basename(barcodes_fp)))] + + def tearDown(self): + for fp in self.files_to_remove: + if exists(fp): + remove(fp) + + def test_load_prep_template_from_cmd(self): + """Correctly adds a sample template to the DB""" + fh = StringIO(self.pt_contents) + st = load_prep_template_from_cmd(fh, self.raw_data.id) + self.assertEqual(st.id, self.raw_data.id) + + +@qiita_test_checker() +class TestLoadRawDataFromCmd(TestCase): + def setUp(self): + fd, self.forward_fp = mkstemp(suffix='_forward.fastq.gz') + close(fd) + fd, self.reverse_fp = mkstemp(suffix='_reverse.fastq.gz') + close(fd) + fd, self.barcodes_fp = mkstemp(suffix='_barcodes.fastq.gz') + close(fd) + + with open(self.forward_fp, "w") as f: + f.write("\n") + with open(self.reverse_fp, "w") as f: + f.write("\n") + with open(self.barcodes_fp, "w") as f: + f.write("\n") + + self.files_to_remove = [] + self.files_to_remove.append(self.forward_fp) + self.files_to_remove.append(self.reverse_fp) + self.files_to_remove.append(self.barcodes_fp) + + self.db_test_raw_dir = join(get_db_files_base_dir(), 'raw_data') + + def tearDown(self): + for fp in self.files_to_remove: + if exists(fp): + remove(fp) + + def test_load_data_from_cmd(self): + filepaths = [self.forward_fp, self.reverse_fp, self.barcodes_fp] + filepath_types = ['raw_sequences', 'raw_sequences', 'raw_barcodes'] + + filetype = 'FASTQ' + study_ids = [1] + + initial_raw_count = get_count('qiita.raw_data') + initial_fp_count = get_count('qiita.filepath') + initial_raw_fp_count = get_count('qiita.raw_filepath') + + new = load_raw_data_cmd(filepaths, filepath_types, filetype, + study_ids) + raw_data_id = new.id + self.files_to_remove.append( + join(self.db_test_raw_dir, + '%d_%s' % (raw_data_id, basename(self.forward_fp)))) + self.files_to_remove.append( + join(self.db_test_raw_dir, + '%d_%s' % (raw_data_id, basename(self.reverse_fp)))) + self.files_to_remove.append( + join(self.db_test_raw_dir, + '%d_%s' % (raw_data_id, basename(self.barcodes_fp)))) + + self.assertTrue(check_count('qiita.raw_data', initial_raw_count + 1)) + self.assertTrue(check_count('qiita.filepath', + initial_fp_count + 3)) + self.assertTrue(check_count('qiita.raw_filepath', + initial_raw_fp_count + 3)) + self.assertTrue(check_count('qiita.study_raw_data', + initial_raw_count + 1)) + + # Ensure that the ValueError is raised when a filepath_type is not + # provided for each and every filepath + with self.assertRaises(ValueError): + load_raw_data_cmd(filepaths, filepath_types[:-1], filetype, + study_ids) + + +@qiita_test_checker() +class TestLoadProcessedDataFromCmd(TestCase): + def setUp(self): + fd, self.otu_table_fp = mkstemp(suffix='_otu_table.biom') + close(fd) + fd, self.otu_table_2_fp = mkstemp(suffix='_otu_table2.biom') + close(fd) + + with open(self.otu_table_fp, "w") as f: + f.write("\n") + with open(self.otu_table_2_fp, "w") as f: + f.write("\n") + + self.files_to_remove = [] + self.files_to_remove.append(self.otu_table_fp) + self.files_to_remove.append(self.otu_table_2_fp) + + self.db_test_processed_data_dir = join(get_db_files_base_dir(), + 'processed_data') + + def tearDown(self): + for fp in self.files_to_remove: + if exists(fp): + remove(fp) + + def test_load_processed_data_from_cmd(self): + filepaths = [self.otu_table_fp, self.otu_table_2_fp] + filepath_types = ['biom', 'biom'] + + initial_processed_data_count = get_count('qiita.processed_data') + initial_processed_fp_count = get_count('qiita.processed_filepath') + initial_fp_count = get_count('qiita.filepath') + + new = load_processed_data_cmd(filepaths, filepath_types, + 'processed_params_uclust', 1, 1, None) + processed_data_id = new.id + self.files_to_remove.append( + join(self.db_test_processed_data_dir, + '%d_%s' % (processed_data_id, basename(self.otu_table_fp)))) + self.files_to_remove.append( + join(self.db_test_processed_data_dir, + '%d_%s' % (processed_data_id, + basename(self.otu_table_2_fp)))) + + self.assertTrue(check_count('qiita.processed_data', + initial_processed_data_count + 1)) + self.assertTrue(check_count('qiita.processed_filepath', + initial_processed_fp_count + 2)) + self.assertTrue(check_count('qiita.filepath', + initial_fp_count + 2)) + + # Ensure that the ValueError is raised when a filepath_type is not + # provided for each and every filepath + with self.assertRaises(ValueError): + load_processed_data_cmd(filepaths, filepath_types[:-1], + 'processed_params_uclust', 1, 1, None) + + +CONFIG_1 = """[required] +timeseries_type_id = 1 +metadata_complete = True +mixs_compliant = True +number_samples_collected = 50 +number_samples_promised = 25 +portal_type_id = 3 +principal_investigator = SomeDude, somedude@foo.bar +reprocess = False +study_alias = 'test study' +study_description = 'test study description' +study_abstract = 'study abstract' +efo_ids = 1,2,3,4 +[optional] +lab_person = SomeDude, somedude@foo.bar +funding = 'funding source' +vamps_id = vamps_id +""" + +CONFIG_2 = """[required] +timeseries_type_id = 1 +metadata_complete = True +number_samples_collected = 50 +number_samples_promised = 25 +portal_type_id = 3 +principal_investigator = SomeDude, somedude@foo.bar +reprocess = False +study_alias = 'test study' +study_description = 'test study description' +study_abstract = 'study abstract' +efo_ids = 1,2,3,4 +[optional] +lab_person = SomeDude, somedude@foo.bar +funding = 'funding source' +vamps_id = vamps_id +""" + +SAMPLE_TEMPLATE = ( + "#SampleID\trequired_sample_info_status_id\tcollection_timestamp\t" + "sample_type\thas_physical_specimen\tphysical_location\thas_extracted_data" + "\thost_subject_id\tTreatment\tDOB\tDescription\n" + "PC.354\t1\t2014-06-18 16:44\ttype_1\tTrue\tLocation_1\tTrue\tHS_ID_PC.354" + "\tControl\t20061218\tControl_mouse_I.D._354\n" + "PC.593\t1\t2014-06-18 16:44\ttype_1\tTrue\tLocation_1\tTrue\tHS_ID_PC.593" + "\tControl\t20071210\tControl_mouse_I.D._593\n" + "PC.607\t1\t2014-06-18 16:44\ttype_1\tTrue\tLocation_1\tTrue\tHS_ID_PC.607" + "\tFast\t20071112\tFasting_mouse_I.D._607\n" + "PC.636\t1\t2014-06-18 16:44\ttype_1\tTrue\tLocation_1\tTrue\tHS_ID_PC.636" + "\tFast\t20080116\tFasting_mouse_I.D._636") + +PREP_TEMPLATE = ( + "#SampleID\tcenter_name\tcusom_col\temp_status_id\tdata_type_id\n" + "SKB8.640193\tANL\tPC.354\t1\t1\n" + "SKD8.640184\tANL\tPC.593\t1\t1\n" + "SKB7.640196\tANL\tPC.607\t1\t1\n" + "SKM9.640192\tANL\tPC.636\t1\t1\n") + +if __name__ == "__main__": + main() diff --git a/qiita_db/test/test_data.py b/qiita_db/test/test_data.py new file mode 100644 index 000000000..11ebbab14 --- /dev/null +++ b/qiita_db/test/test_data.py @@ -0,0 +1,422 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from unittest import TestCase, main +from datetime import datetime +from os import close, remove +from os.path import join, basename, exists +from tempfile import mkstemp + +from qiita_core.util import qiita_test_checker +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from qiita_db.study import Study +from qiita_db.util import get_db_files_base_dir +from qiita_db.data import BaseData, RawData, PreprocessedData, ProcessedData + + +@qiita_test_checker() +class BaseDataTests(TestCase): + """Tests the BaseData class""" + + def test_init(self): + """Raises an error if trying to instantiate the base data""" + with self.assertRaises(IncompetentQiitaDeveloperError): + BaseData(1) + + +@qiita_test_checker() +class RawDataTests(TestCase): + """Tests the RawData class""" + + def setUp(self): + fd, self.seqs_fp = mkstemp(suffix='_seqs.fastq') + close(fd) + fd, self.barcodes_fp = mkstemp(suffix='_barcodes.fastq') + close(fd) + self.filetype = 2 + self.filepaths = [(self.seqs_fp, 1), (self.barcodes_fp, 2)] + self.studies = [Study(1)] + self.db_test_raw_dir = join(get_db_files_base_dir(), 'raw_data') + + with open(self.seqs_fp, "w") as f: + f.write("\n") + with open(self.barcodes_fp, "w") as f: + f.write("\n") + self._clean_up_files = [] + + def tearDown(self): + for f in self._clean_up_files: + remove(f) + + def test_create(self): + """Correctly creates all the rows in the DB for the raw data""" + # Check that the returned object has the correct id + obs = RawData.create(self.filetype, self.filepaths, self.studies) + self.assertEqual(obs.id, 3) + + # Check that the raw data have been correctly added to the DB + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.raw_data WHERE raw_data_id=3") + # raw_data_id, filetype, submitted_to_insdc + self.assertEqual(obs, [[3, 2]]) + + # Check that the raw data have been correctly linked with the study + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.study_raw_data WHERE raw_data_id=3") + # study_id , raw_data_id + self.assertEqual(obs, [[1, 3]]) + + # Check that the files have been copied to right location + exp_seqs_fp = join(self.db_test_raw_dir, + "3_%s" % basename(self.seqs_fp)) + self.assertTrue(exists(exp_seqs_fp)) + self._clean_up_files.append(exp_seqs_fp) + + exp_bc_fp = join(self.db_test_raw_dir, + "3_%s" % basename(self.barcodes_fp)) + self.assertTrue(exists(exp_bc_fp)) + self._clean_up_files.append(exp_bc_fp) + + # Check that the filepaths have been correctly added to the DB + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.filepath WHERE filepath_id=10 or " + "filepath_id=11") + # filepath_id, path, filepath_type_id + exp = [[10, exp_seqs_fp, 1, '852952723', 1], + [11, exp_bc_fp, 2, '852952723', 1]] + self.assertEqual(obs, exp) + + # Check that the raw data have been correctly linked with the filepaths + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.raw_filepath WHERE raw_data_id=3") + # raw_data_id, filepath_id + self.assertEqual(obs, [[3, 10], [3, 11]]) + + def test_get_filepaths(self): + """Correctly returns the filepaths to the raw files""" + rd = RawData(1) + obs = rd.get_filepaths() + exp = [ + (join(self.db_test_raw_dir, '1_s_G1_L001_sequences.fastq.gz'), 1), + (join(self.db_test_raw_dir, + '1_s_G1_L001_sequences_barcodes.fastq.gz'), 2)] + self.assertEqual(obs, exp) + + def test_studies(self): + """Correctly returns the study ids""" + rd = RawData(1) + self.assertEqual(rd.studies, [1]) + + +@qiita_test_checker() +class PreprocessedDataTests(TestCase): + """Tests the PreprocessedData class""" + def setUp(self): + self.raw_data = RawData(1) + self.study = Study(1) + self.params_table = "preprocessed_sequence_illumina_params" + self.params_id = 1 + fd, self.fna_fp = mkstemp(suffix='_seqs.fna') + close(fd) + fd, self.qual_fp = mkstemp(suffix='_seqs.qual') + close(fd) + self.filepaths = [(self.fna_fp, 4), (self.qual_fp, 5)] + self.db_test_ppd_dir = join(get_db_files_base_dir(), + 'preprocessed_data') + + with open(self.fna_fp, "w") as f: + f.write("\n") + with open(self.qual_fp, "w") as f: + f.write("\n") + self._clean_up_files = [] + + def tearDown(self): + for f in self._clean_up_files: + remove(f) + + def test_create(self): + """Correctly creates all the rows in the DB for preprocessed data""" + # Check that the returned object has the correct id + obs = PreprocessedData.create(self.study, self.params_table, + self.params_id, self.filepaths, + raw_data=self.raw_data) + self.assertEqual(obs.id, 3) + + # Check that the preprocessed data have been correctly added to the DB + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.preprocessed_data WHERE " + "preprocessed_data_id=3") + # preprocessed_data_id, raw_data_id, preprocessed_params_tables, + # preprocessed_params_id + exp = [[3, "preprocessed_sequence_illumina_params", 1, False]] + self.assertEqual(obs, exp) + + # Check that the preprocessed data has been linked with its study + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.study_preprocessed_data WHERE " + "preprocessed_data_id=3") + exp = [[1, 3]] + self.assertEqual(obs, exp) + + # Check that the files have been copied to right location + exp_fna_fp = join(self.db_test_ppd_dir, + "3_%s" % basename(self.fna_fp)) + self.assertTrue(exists(exp_fna_fp)) + self._clean_up_files.append(exp_fna_fp) + + exp_qual_fp = join(self.db_test_ppd_dir, + "3_%s" % basename(self.qual_fp)) + self.assertTrue(exists(exp_qual_fp)) + self._clean_up_files.append(exp_qual_fp) + + # Check that the filepaths have been correctly added to the DB + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.filepath WHERE filepath_id=10 or " + "filepath_id=11") + # filepath_id, path, filepath_type_id + exp = [[10, exp_fna_fp, 4, '852952723', 1], + [11, exp_qual_fp, 5, '852952723', 1]] + self.assertEqual(obs, exp) + + # Check that the preprocessed data have been correctly + # linked with the filepaths + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.preprocessed_filepath WHERE " + "preprocessed_data_id=3") + # preprocessed_data_id, filepath_id + self.assertEqual(obs, [[3, 10], [3, 11]]) + + def test_create_error(self): + """Raises an error if the preprocessed_params_table does not exist""" + with self.assertRaises(IncompetentQiitaDeveloperError): + PreprocessedData.create(self.study, "foo", self.params_id, + self.filepaths) + with self.assertRaises(IncompetentQiitaDeveloperError): + PreprocessedData.create(self.study, "preprocessed_foo", + self.params_id, self.filepaths) + with self.assertRaises(IncompetentQiitaDeveloperError): + PreprocessedData.create(self.study, "foo_params", self.params_id, + self.filepaths) + with self.assertRaises(IncompetentQiitaDeveloperError): + PreprocessedData.create(self.study, "preprocessed_foo_params", + self.params_id, self.filepaths) + + def test_get_filepaths(self): + """Correctly returns the filepaths to the preprocessed files""" + ppd = PreprocessedData(1) + obs = ppd.get_filepaths() + exp = [(join(self.db_test_ppd_dir, '1_seqs.fna'), 4), + (join(self.db_test_ppd_dir, '1_seqs.qual'), 5)] + self.assertEqual(obs, exp) + + def test_raw_data(self): + """Correctly returns the raw data""" + ppd = PreprocessedData(1) + self.assertEqual(ppd.raw_data, 1) + + def test_study(self): + """Correctly returns the study""" + ppd = PreprocessedData(1) + self.assertEqual(ppd.study, 1) + + def test_is_submitted_to_insdc(self): + """is_submitted_to_insdc works correctly""" + # False case + pd = PreprocessedData(1) + self.assertTrue(pd.is_submitted_to_insdc()) + # True case + pd = PreprocessedData(2) + self.assertFalse(pd.is_submitted_to_insdc()) + + +@qiita_test_checker() +class ProcessedDataTests(TestCase): + """Tests the ProcessedData class""" + def setUp(self): + self.preprocessed_data = PreprocessedData(1) + self.params_table = "processed_params_uclust" + self.params_id = 1 + fd, self.biom_fp = mkstemp(suffix='_table.biom') + close(fd) + self.filepaths = [(self.biom_fp, 6)] + self.date = datetime(2014, 5, 29, 12, 24, 51) + self.db_test_pd_dir = join(get_db_files_base_dir(), 'processed_data') + + with open(self.biom_fp, "w") as f: + f.write("\n") + self._clean_up_files = [] + + def tearDown(self): + for f in self._clean_up_files: + remove(f) + + def test_create(self): + """Correctly creates all the rows in the DB for the processed data""" + # Check that the returned object has the correct id + obs = ProcessedData.create(self.params_table, self.params_id, + self.filepaths, + preprocessed_data=self.preprocessed_data, + processed_date=self.date) + self.assertEqual(obs.id, 2) + + # Check that the processed data have been correctly added to the DB + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.processed_data WHERE processed_data_id=2") + # processed_data_id, preprocessed_data_id, processed_params_table, + # processed_params_id, processed_date + exp = [[2, "processed_params_uclust", 1, self.date]] + self.assertEqual(obs, exp) + + # Check that the files have been copied to right location + exp_biom_fp = join(self.db_test_pd_dir, + "2_%s" % basename(self.biom_fp)) + self.assertTrue(exists(exp_biom_fp)) + self._clean_up_files.append(exp_biom_fp) + + # Check that the filepaths have been correctly added to the DB + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.filepath WHERE filepath_id=10") + # Filepath_id, path, filepath_type_id + exp = [[10, exp_biom_fp, 6, '852952723', 1]] + self.assertEqual(obs, exp) + + # Check that the processed data have been correctly linked + # with the fileapths + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.processed_filepath WHERE processed_data_id=2") + # processed_data_id, filepath_id + self.assertEqual(obs, [[2, 10]]) + + # Check that the processed data have been correctly linked with the + # study + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.study_processed_data WHERE " + "processed_data_id=2") + # study_id, processed_data + self.assertEqual(obs, [[1, 2]]) + + # Check that the processed data have been correctly linked with the + # preprocessed data + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.preprocessed_processed_data WHERE " + "processed_data_id=2") + # preprocessed_data_id, processed_Data_id + self.assertEqual(obs, [[1, 2]]) + + def test_create_no_date(self): + """Correctly adds a processed data with no date on it""" + # All the other settings have been already tested on test_create + # here we will only check that the code added a good date + before = datetime.now() + ProcessedData.create(self.params_table, self.params_id, self.filepaths, + preprocessed_data=self.preprocessed_data) + after = datetime.now() + obs = self.conn_handler.execute_fetchone( + "SELECT processed_date FROM qiita.processed_data WHERE " + "processed_data_id=2")[0] + + # Make sure that we clean up the environment + exp_biom_fp = join(self.db_test_pd_dir, + "2_%s" % basename(self.biom_fp)) + self._clean_up_files.append(exp_biom_fp) + + self.assertTrue(before <= obs <= after) + + def test_create_w_study(self): + """Correctly adds a processed data passing a study""" + obs = ProcessedData.create(self.params_table, self.params_id, + self.filepaths, study=Study(1), + processed_date=self.date) + + # Check that the processed data have been correctly added to the DB + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.processed_data WHERE processed_data_id=2") + # processed_data_id, preprocessed_data_id, processed_params_table, + # processed_params_id, processed_date + exp = [[2, "processed_params_uclust", 1, self.date]] + self.assertEqual(obs, exp) + + # Check that the files have been copied to right location + exp_biom_fp = join(self.db_test_pd_dir, + "2_%s" % basename(self.biom_fp)) + self.assertTrue(exists(exp_biom_fp)) + self._clean_up_files.append(exp_biom_fp) + + # Check that the filepaths have been correctly added to the DB + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.filepath WHERE filepath_id=10") + # Filepath_id, path, filepath_type_id + exp = [[10, exp_biom_fp, 6, '852952723', 1]] + self.assertEqual(obs, exp) + + # Check that the processed data have been correctly linked + # with the fileapths + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.processed_filepath WHERE processed_data_id=2") + # processed_data_id, filepath_id + self.assertTrue(obs, [[2, 10]]) + + # Check that the processed data have been correctly linked with the + # study + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.study_processed_data WHERE " + "processed_data_id=2") + # study_id, processed_data + self.assertEqual(obs, [[1, 2]]) + + def test_create_params_table_error(self): + """Raises an error if the processed_params_table does not exist""" + with self.assertRaises(IncompetentQiitaDeveloperError): + ProcessedData.create("foo", self.params_id, self.filepaths, + preprocessed_data=self.preprocessed_data) + with self.assertRaises(IncompetentQiitaDeveloperError): + ProcessedData.create("processed_params_foo", self.params_id, + self.filepaths, + preprocessed_data=self.preprocessed_data) + with self.assertRaises(IncompetentQiitaDeveloperError): + ProcessedData.create("processed_params_", self.params_id, + self.filepaths, + preprocessed_data=self.preprocessed_data) + + def test_create_no_preprocessed_no_study_error(self): + with self.assertRaises(IncompetentQiitaDeveloperError): + ProcessedData.create(self.params_table, self.params_id, + self.filepaths) + + def test_create_preprocessed_and_study_error(self): + with self.assertRaises(IncompetentQiitaDeveloperError): + ProcessedData.create(self.params_table, self.params_id, + self.filepaths, + preprocessed_data=self.preprocessed_data, + study=Study(1)) + + def test_get_filepath(self): + """Correctly returns the filepaths to the processed files""" + # check the test data + pd = ProcessedData(1) + obs = pd.get_filepaths() + exp = [(join(self.db_test_pd_dir, + '1_study_1001_closed_reference_otu_table.biom'), 6)] + self.assertEqual(obs, exp) + + def test_get_filepath_ids(self): + pd = ProcessedData(1) + self.assertEqual(pd.get_filepath_ids(), [7]) + + def test_preprocessed_data(self): + """Correctly returns the preprocessed_data""" + pd = ProcessedData(1) + self.assertEqual(pd.preprocessed_data, 1) + + def test_data_type(self): + pd = ProcessedData(1) + self.assertEqual(pd.data_type, "18S") + + +if __name__ == '__main__': + main() diff --git a/qiita_db/test/test_data/sample_template.txt b/qiita_db/test/test_data/sample_template.txt new file mode 100644 index 000000000..208b2ecd3 --- /dev/null +++ b/qiita_db/test/test_data/sample_template.txt @@ -0,0 +1,5 @@ +#SampleID required_sample_info_status_id collection_timestamp sample_type has_physical_specimen physical_location has_extracted_data host_subject_id Treatment DOB Description +PC.354 1 2014-06-18 16:44 type_1 True Location_1 True HS_ID_PC.354 Control 20061218 Control_mouse_I.D._354 +PC.593 1 2014-06-18 16:44 type_1 True Location_1 True HS_ID_PC.593 Control 20071210 Control_mouse_I.D._593 +PC.607 1 2014-06-18 16:44 type_1 True Location_1 True HS_ID_PC.607 Fast 20071112 Fasting_mouse_I.D._607 +PC.636 1 2014-06-18 16:44 type_1 True Location_1 True HS_ID_PC.636 Fast 20080116 Fasting_mouse_I.D._636 \ No newline at end of file diff --git a/qiita_db/test/test_job.py b/qiita_db/test/test_job.py new file mode 100644 index 000000000..78e01feba --- /dev/null +++ b/qiita_db/test/test_job.py @@ -0,0 +1,196 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from unittest import TestCase, main +from os import remove +from os.path import join +from shutil import rmtree +from datetime import datetime + +from qiita_core.util import qiita_test_checker +from qiita_db.job import Job, Command +from qiita_db.util import get_db_files_base_dir +from qiita_db.analysis import Analysis +from qiita_db.exceptions import QiitaDBDuplicateError, QiitaDBStatusError +from qiita_db.logger import LogEntry + + +@qiita_test_checker() +class JobTest(TestCase): + """Tests that the job object works as expected""" + + def setUp(self): + self.job = Job(1) + self.options = {"option1": False, "option2": 25, "option3": "NEW"} + self._delete_path = [] + self._delete_dir = [] + + def tearDown(self): + # needs to be this way because map does not play well with remove and + # rmtree for python3 + for item in self._delete_path: + remove(item) + for item in self._delete_dir: + rmtree(item) + + # EXISTS IGNORED FOR DEMO, ISSUE #83 + # def test_exists(self): + # """tests that existing job returns true""" + # self.assertTrue(Job.exists("16S", "Summarize Taxa", + # {'option1': True, 'option2': 12, + # 'option3': 'FCM'})) + + # def test_exists_not_there(self): + # """tests that non-existant job returns false""" + # self.assertFalse(Job.exists("Metabolomic", + # "Summarize Taxa", + # {'option1': "Nope", 'option2': 10, + # 'option3': 'FCM'})) + + def test_get_commands(self): + exp = [ + Command('Summarize Taxa', 'summarize_taxa_through_plots.py', + '{"--otu_table_fp":null}', '{}', + '{"--mapping_category":null, "--mapping_fp":null,' + '"--sort":null}', '{"--output_dir":null}'), + Command('Beta Diversity', 'beta_diversity_through_plots.py', + '{"--otu_table_fp":null,"--mapping_fp":null}', '{}', + '{"--tree_fp":null,"--color_by_all_fields":null,' + '"--seqs_per_sample":null}', '{"--output_dir":null}'), + Command('Alpha Rarefaction', 'alpha_rarefaction.py', + '{"--otu_table_fp":null,"--mapping_fp":null}', '{}', + '{"--tree_fp":null,"--num_steps":null,''"--min_rare_depth"' + ':null,"--max_rare_depth":null,' + '"--retain_intermediate_files":false}', + '{"--output_dir":null}') + ] + self.assertEqual(Job.get_commands(), exp) + + def test_create(self): + """Makes sure creation works as expected""" + # make first job + new = Job.create("18S", "Alpha Rarefaction", + self.options, Analysis(1)) + self.assertEqual(new.id, 4) + # make sure job inserted correctly + obs = self.conn_handler.execute_fetchall("SELECT * FROM qiita.job " + "WHERE job_id = 4") + exp = [[4, 2, 1, 3, '{"option1":false,"option2":25,"option3":"NEW"}', + None]] + self.assertEqual(obs, exp) + # make sure job added to analysis correctly + obs = self.conn_handler.execute_fetchall("SELECT * FROM " + "qiita.analysis_job WHERE " + "job_id = 4") + exp = [[1, 4]] + self.assertEqual(obs, exp) + + # make second job with diff datatype and command to test column insert + new = Job.create("16S", "Beta Diversity", + self.options, Analysis(1)) + self.assertEqual(new.id, 5) + # make sure job inserted correctly + obs = self.conn_handler.execute_fetchall("SELECT * FROM qiita.job " + "WHERE job_id = 5") + exp = [[5, 1, 1, 2, '{"option1":false,"option2":25,"option3":"NEW"}', + None]] + self.assertEqual(obs, exp) + # make sure job added to analysis correctly + obs = self.conn_handler.execute_fetchall("SELECT * FROM " + "qiita.analysis_job WHERE " + "job_id = 5") + exp = [[1, 5]] + self.assertEqual(obs, exp) + + # def test_create_exists(self): + # """Makes sure creation doesn't duplicate a job""" + # with self.assertRaises(QiitaDBDuplicateError): + # Job.create("16S", "Summarize Taxa", + # {'option1': True, 'option2': 12, 'option3': 'FCM'}, + # Analysis(1)) + + def test_retrieve_datatype(self): + """Makes sure datatype retriveal is correct""" + self.assertEqual(self.job.datatype, '16S') + + def test_retrieve_command(self): + """Makes sure command retriveal is correct""" + self.assertEqual(self.job.command, ['Summarize Taxa', + 'summarize_taxa_through_plots.py']) + + def test_retrieve_options(self): + self.assertEqual(self.job.options, { + '--otu_table_fp': 1, + '--output_dir': join(get_db_files_base_dir(), 'job/' + '1_summarize_taxa_through_plots.py' + '_output_dir')}) + + def test_retrieve_results(self): + self.assertEqual(self.job.results, [join("job", "1_job_result.txt")]) + + def test_retrieve_results_empty(self): + new = Job.create("18S", "Beta Diversity", self.options, Analysis(1)) + self.assertEqual(new.results, []) + + def test_retrieve_results_dir(self): + self.assertEqual(Job(2).results, [join("job", "2_test_folder")]) + + def test_set_error(self): + before = datetime.now() + self.job.set_error("TESTERROR", 1) + after = datetime.now() + self.assertEqual(self.job.status, "error") + + error = self.job.error + + self.assertEqual(error.severity, 1) + self.assertEqual(error.msg, 'TESTERROR') + self.assertTrue(before < error.time < after) + + def test_retrieve_error_blank(self): + self.assertEqual(self.job.error, None) + + def test_set_error_completed(self): + self.job.status = "error" + with self.assertRaises(QiitaDBStatusError): + self.job.set_error("TESTERROR", 1) + + def test_retrieve_error_exists(self): + self.job.set_error("TESTERROR", 1) + self.assertEqual(self.job.error.msg, "TESTERROR") + + def test_add_results(self): + self.job.add_results([(join(get_db_files_base_dir(), "job", + "1_job_result.txt"), "plain_text")]) + + # make sure files attached to job properly + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.job_results_filepath WHERE job_id = 1") + + self.assertEqual(obs, [[1, 8], [1, 10]]) + + def test_add_results_dir(self): + # Create a test directory + test_dir = join(get_db_files_base_dir(), "job", "2_test_folder") + + # add folder to job + self.job.add_results([(test_dir, "directory")]) + + # make sure files attached to job properly + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.job_results_filepath WHERE job_id = 1") + self.assertEqual(obs, [[1, 8], [1, 10]]) + + def test_add_results_completed(self): + self.job.status = "completed" + with self.assertRaises(QiitaDBStatusError): + self.job.add_results([("/fake/dir/", "directory")]) + + +if __name__ == "__main__": + main() diff --git a/qiita_db/test/test_logger.py b/qiita_db/test/test_logger.py new file mode 100644 index 000000000..2a2d1fec9 --- /dev/null +++ b/qiita_db/test/test_logger.py @@ -0,0 +1,68 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from unittest import TestCase, main +from datetime import datetime + +from qiita_core.util import qiita_test_checker +from qiita_db.logger import LogEntry +from qiita_db.exceptions import QiitaDBExecutionError + + +@qiita_test_checker() +class LoggerTests(TestCase): + def test_create_log_entry(self): + """""" + log_entry = LogEntry.create(2, 'runtime message') + log_entry = LogEntry.create(3, 'fatal message', info={1: 2}) + log_entry = LogEntry.create(1, 'warning message', info={9: 0}) + with self.assertRaises(QiitaDBExecutionError): + # This severity level does not exist in the test schema + log_entry = LogEntry.create(4, 'warning message', info={9: 0}) + + def test_severity_property(self): + """""" + log_entry = LogEntry.create(1, 'warning test', info=None) + self.assertEqual(log_entry.severity, 1) + + def test_time_property(self): + """""" + before = datetime.now() + log_entry = LogEntry.create(1, 'warning test', info=None) + after = datetime.now() + self.assertTrue(before < log_entry.time < after) + + def test_info_property(self): + """""" + log_entry = LogEntry.create(1, 'warning test', + info={1: 2, 'test': 'yeah'}) + self.assertEqual(log_entry.info, [{'1': 2, 'test': 'yeah'}]) + + def test_message_property(self): + """""" + log_entry = LogEntry.create(1, 'warning test', info=None) + self.assertEqual(log_entry.msg, 'warning test') + + def test_add_info(self): + """""" + log_entry = LogEntry.create(1, 'warning test', + info={1: 2, 'test': 'yeah'}) + log_entry.add_info({'another': 'set', 'of': 'entries', 'test': 3}) + self.assertEqual(log_entry.info, [{'1': 2, 'test': 'yeah'}, + {'another': 'set', 'of': 'entries', + 'test': 3}]) + + def test_clear_info(self): + """""" + log_entry = LogEntry.create(1, 'warning test', + info={1: 2, 'test': 'yeah'}) + log_entry.clear_info() + self.assertEqual(log_entry.info, []) + +if __name__ == '__main__': + main() diff --git a/qiita_db/test/test_metadata_template.py b/qiita_db/test/test_metadata_template.py new file mode 100644 index 000000000..cc2a2eca6 --- /dev/null +++ b/qiita_db/test/test_metadata_template.py @@ -0,0 +1,1058 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from future.builtins import zip +from unittest import TestCase, main +from datetime import datetime +from tempfile import mkstemp +from os import close, remove +from os.path import join, basename +from collections import Iterable + +import pandas as pd + +from qiita_core.util import qiita_test_checker +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from qiita_db.exceptions import (QiitaDBDuplicateError, QiitaDBUnknownIDError, + QiitaDBNotImplementedError, + QiitaDBDuplicateHeaderError) +from qiita_db.study import Study, StudyPerson +from qiita_db.user import User +from qiita_db.data import RawData +from qiita_db.util import exists_table, get_db_files_base_dir +from qiita_db.metadata_template import (_get_datatypes, _as_python_types, + MetadataTemplate, SampleTemplate, + PrepTemplate, BaseSample, PrepSample, + Sample) + + +class TestUtilMetadataMap(TestCase): + """Tests some utility functions on the metadata_template module""" + def setUp(self): + metadata_dict = { + 'Sample1': {'int_col': 1, 'float_col': 2.1, 'str_col': 'str1'}, + 'Sample2': {'int_col': 2, 'float_col': 3.1, 'str_col': '200'}, + 'Sample3': {'int_col': 3, 'float_col': 3, 'str_col': 'string30'}, + } + self.metadata_map = pd.DataFrame.from_dict(metadata_dict, + orient='index') + self.headers = ['float_col', 'str_col', 'int_col'] + + def test_get_datatypes(self): + """Correctly returns the data types of each column""" + obs = _get_datatypes(self.metadata_map.ix[:, self.headers]) + exp = ['float8', 'varchar', 'integer'] + self.assertEqual(obs, exp) + + def test_as_python_types(self): + """Correctly returns the columns as python types""" + obs = _as_python_types(self.metadata_map, self.headers) + exp = [[2.1, 3.1, 3], + ['str1', '200', 'string30'], + [1, 2, 3]] + self.assertEqual(obs, exp) + + +@qiita_test_checker() +class TestBaseSample(TestCase): + """Tests the BaseSample class""" + + def test_init(self): + """BaseSample init should raise an error (it's a base class)""" + with self.assertRaises(IncompetentQiitaDeveloperError): + BaseSample('SKM7.640188', SampleTemplate(1)) + + def test_exists(self): + """exists should raise an error if called from the base class""" + with self.assertRaises(IncompetentQiitaDeveloperError): + BaseSample.exists('SKM7.640188', SampleTemplate(1)) + + +@qiita_test_checker() +class TestSample(TestCase): + """Tests the Sample class""" + + def setUp(self): + self.sample_template = SampleTemplate(1) + self.sample_id = 'SKB8.640193' + self.tester = Sample(self.sample_id, self.sample_template) + self.exp_categories = {'physical_location', 'has_physical_specimen', + 'has_extracted_data', 'sample_type', + 'required_sample_info_status_id', + 'collection_timestamp', 'host_subject_id', + 'description', 'season_environment', + 'assigned_from_geo', 'texture', 'taxon_id', + 'depth', 'host_taxid', 'common_name', + 'water_content_soil', 'elevation', 'temp', + 'tot_nitro', 'samp_salinity', 'altitude', + 'env_biome', 'country', 'ph', 'anonymized_name', + 'tot_org_carb', 'longitude', + 'description_duplicate', 'env_feature', + 'latitude'} + + def test_init_unknown_error(self): + """Init raises an error if the sample id is not found in the template + """ + with self.assertRaises(QiitaDBUnknownIDError): + Sample('Not_a_Sample', self.sample_template) + + def test_init_wrong_template(self): + """Raises an error if using a PrepTemplate instead of SampleTemplate""" + with self.assertRaises(IncompetentQiitaDeveloperError): + Sample('SKB8.640193', PrepTemplate(1)) + + def test_init(self): + """Init correctly initializes the sample object""" + sample = Sample(self.sample_id, self.sample_template) + # Check that the internal id have been correctly set + self.assertEqual(sample._id, 'SKB8.640193') + # Check that the internal template have been correctly set + self.assertEqual(sample._md_template, self.sample_template) + # Check that the internal dynamic table name have been correctly set + self.assertEqual(sample._dynamic_table, "sample_1") + + def test_eq_true(self): + """Equality correctly returns true""" + other = Sample(self.sample_id, self.sample_template) + self.assertTrue(self.tester == other) + + def test_eq_false_type(self): + """Equality returns false if types are not equal""" + other = PrepSample(self.sample_id, PrepTemplate(1)) + self.assertFalse(self.tester == other) + + def test_eq_false_id(self): + """Equality returns false if ids are different""" + other = Sample('SKD8.640184', self.sample_template) + self.assertFalse(self.tester == other) + + def test_exists_true(self): + """Exists returns true if the sample exists""" + self.assertTrue(Sample.exists(self.sample_id, self.sample_template)) + + def test_exists_false(self): + """Exists returns false if the sample does not exists""" + self.assertFalse(Sample.exists('Not_a_Sample', self.sample_template)) + + def test_get_categories(self): + """Correctly returns the set of category headers""" + obs = self.tester._get_categories(self.conn_handler) + self.assertEqual(obs, self.exp_categories) + + def test_len(self): + """Len returns the correct number of categories""" + self.assertEqual(len(self.tester), 30) + + def test_getitem_required(self): + """Get item returns the correct metadata value from the required table + """ + self.assertEqual(self.tester['physical_location'], 'ANL') + self.assertEqual(self.tester['collection_timestamp'], + datetime(2011, 11, 11, 13, 00, 00)) + self.assertTrue(self.tester['has_physical_specimen']) + + def test_getitem_dynamic(self): + """Get item returns the correct metadata value from the dynamic table + """ + self.assertEqual(self.tester['SEASON_ENVIRONMENT'], 'winter') + self.assertEqual(self.tester['depth'], 0.15) + + def test_getitem_error(self): + """Get item raises an error if category does not exists""" + with self.assertRaises(KeyError): + self.tester['Not_a_Category'] + + def test_setitem(self): + """setitem raises an error (currently not allowed)""" + with self.assertRaises(QiitaDBNotImplementedError): + self.tester['DEPTH'] = 0.30 + + def test_delitem(self): + """delitem raises an error (currently not allowed)""" + with self.assertRaises(QiitaDBNotImplementedError): + del self.tester['DEPTH'] + + def test_iter(self): + """iter returns an iterator over the category headers""" + obs = self.tester.__iter__() + self.assertTrue(isinstance(obs, Iterable)) + self.assertEqual(set(obs), self.exp_categories) + + def test_contains_true(self): + """contains returns true if the category header exists""" + self.assertTrue('DEPTH' in self.tester) + self.assertTrue('depth' in self.tester) + + def test_contains_false(self): + """contains returns false if the category header does not exists""" + self.assertFalse('Not_a_Category' in self.tester) + + def test_keys(self): + """keys returns an iterator over the metadata headers""" + obs = self.tester.keys() + self.assertTrue(isinstance(obs, Iterable)) + self.assertEqual(set(obs), self.exp_categories) + + def test_values(self): + """values returns an iterator over the values""" + obs = self.tester.values() + self.assertTrue(isinstance(obs, Iterable)) + exp = {'ANL', True, True, 'ENVO:soil', 4, + datetime(2011, 11, 11, 13, 00, 00), '1001:M7', + 'Cannabis Soil Microbiome', 'winter', 'n', + '64.6 sand, 17.6 silt, 17.8 clay', '1118232', 0.15, '3483', + 'root metagenome', 0.164, 114, 15, 1.41, 7.15, 0, + 'ENVO:Temperate grasslands, savannas, and shrubland biome', + 'GAZ:United States of America', 6.94, 'SKB8', 5, -117.241111, + 'Burmese root', 'ENVO:plant-associated habitat', 33.193611} + self.assertEqual(set(obs), exp) + + def test_items(self): + """items returns an iterator over the (key, value) tuples""" + obs = self.tester.items() + self.assertTrue(isinstance(obs, Iterable)) + exp = {('physical_location', 'ANL'), ('has_physical_specimen', True), + ('has_extracted_data', True), ('sample_type', 'ENVO:soil'), + ('required_sample_info_status_id', 4), + ('collection_timestamp', datetime(2011, 11, 11, 13, 00, 00)), + ('host_subject_id', '1001:M7'), + ('description', 'Cannabis Soil Microbiome'), + ('season_environment', 'winter'), ('assigned_from_geo', 'n'), + ('texture', '64.6 sand, 17.6 silt, 17.8 clay'), + ('taxon_id', '1118232'), ('depth', 0.15), + ('host_taxid', '3483'), ('common_name', 'root metagenome'), + ('water_content_soil', 0.164), ('elevation', 114), ('temp', 15), + ('tot_nitro', 1.41), ('samp_salinity', 7.15), ('altitude', 0), + ('env_biome', + 'ENVO:Temperate grasslands, savannas, and shrubland biome'), + ('country', 'GAZ:United States of America'), ('ph', 6.94), + ('anonymized_name', 'SKB8'), ('tot_org_carb', 5), + ('longitude', -117.241111), + ('description_duplicate', 'Burmese root'), + ('env_feature', 'ENVO:plant-associated habitat'), + ('latitude', 33.193611)} + self.assertEqual(set(obs), exp) + + def test_get(self): + """get returns the correct sample object""" + self.assertEqual(self.tester.get('SEASON_ENVIRONMENT'), 'winter') + self.assertEqual(self.tester.get('depth'), 0.15) + + def test_get_none(self): + """get returns none if the sample id is not present""" + self.assertTrue(self.tester.get('Not_a_Category') is None) + + +@qiita_test_checker() +class TestPrepSample(TestCase): + """Tests the PrepSample class""" + + def setUp(self): + self.prep_template = PrepTemplate(1) + self.sample_id = 'SKB8.640193' + self.tester = PrepSample(self.sample_id, self.prep_template) + self.exp_categories = {'center_name', 'center_project_name', + 'ebi_submission_accession', + 'ebi_study_accession', 'emp_status_id', + 'data_type_id', 'barcodesequence', + 'library_construction_protocol', + 'linkerprimersequence', 'target_subfragment', + 'target_gene', 'run_center', 'run_prefix', + 'run_date', 'experiment_center', + 'experiment_design_description', + 'experiment_title', 'platform', 'samp_size', + 'sequencing_meth', 'illumina_technology', + 'sample_center', 'pcr_primers', 'study_center'} + + def test_init_unknown_error(self): + """Init errors if the PrepSample id is not found in the template""" + with self.assertRaises(QiitaDBUnknownIDError): + PrepSample('Not_a_Sample', self.prep_template) + + def test_init_wrong_template(self): + """Raises an error if using a SampleTemplate instead of PrepTemplate""" + with self.assertRaises(IncompetentQiitaDeveloperError): + PrepSample('SKB8.640193', SampleTemplate(1)) + + def test_init(self): + """Init correctly initializes the PrepSample object""" + sample = PrepSample(self.sample_id, self.prep_template) + # Check that the internal id have been correctly set + self.assertEqual(sample._id, 'SKB8.640193') + # Check that the internal template have been correctly set + self.assertEqual(sample._md_template, self.prep_template) + # Check that the internal dynamic table name have been correctly set + self.assertEqual(sample._dynamic_table, "prep_1") + + def test_eq_true(self): + """Equality correctly returns true""" + other = PrepSample(self.sample_id, self.prep_template) + self.assertTrue(self.tester == other) + + def test_eq_false_type(self): + """Equality returns false if types are not equal""" + other = Sample(self.sample_id, SampleTemplate(1)) + self.assertFalse(self.tester == other) + + def test_eq_false_id(self): + """Equality returns false if ids are different""" + other = PrepSample('SKD8.640184', self.prep_template) + self.assertFalse(self.tester == other) + + def test_exists_true(self): + """Exists returns true if the PrepSample exists""" + self.assertTrue(PrepSample.exists(self.sample_id, self.prep_template)) + + def test_exists_false(self): + """Exists returns false if the PrepSample does not exists""" + self.assertFalse(PrepSample.exists('Not_a_Sample', self.prep_template)) + + def test_get_categories(self): + """Correctly returns the set of category headers""" + obs = self.tester._get_categories(self.conn_handler) + self.assertEqual(obs, self.exp_categories) + + def test_len(self): + """Len returns the correct number of categories""" + self.assertEqual(len(self.tester), 24) + + def test_getitem_required(self): + """Get item returns the correct metadata value from the required table + """ + self.assertEqual(self.tester['center_name'], 'ANL') + self.assertEqual(self.tester['emp_status_id'], 1) + self.assertTrue(self.tester['center_project_name'] is None) + + def test_getitem_dynamic(self): + """Get item returns the correct metadata value from the dynamic table + """ + self.assertEqual(self.tester['pcr_primers'], + 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT') + self.assertEqual(self.tester['barcodesequence'], 'AGCGCTCACATC') + + def test_getitem_error(self): + """Get item raises an error if category does not exists""" + with self.assertRaises(KeyError): + self.tester['Not_a_Category'] + + def test_setitem(self): + """setitem raises an error (currently not allowed)""" + with self.assertRaises(QiitaDBNotImplementedError): + self.tester['barcodesequence'] = 'GTCCGCAAGTTA' + + def test_delitem(self): + """delitem raises an error (currently not allowed)""" + with self.assertRaises(QiitaDBNotImplementedError): + del self.tester['pcr_primers'] + + def test_iter(self): + """iter returns an iterator over the category headers""" + obs = self.tester.__iter__() + self.assertTrue(isinstance(obs, Iterable)) + self.assertEqual(set(obs), self.exp_categories) + + def test_contains_true(self): + """contains returns true if the category header exists""" + self.assertTrue('BarcodeSequence' in self.tester) + self.assertTrue('barcodesequence' in self.tester) + + def test_contains_false(self): + """contains returns false if the category header does not exists""" + self.assertFalse('Not_a_Category' in self.tester) + + def test_keys(self): + """keys returns an iterator over the metadata headers""" + obs = self.tester.keys() + self.assertTrue(isinstance(obs, Iterable)) + self.assertEqual(set(obs), self.exp_categories) + + def test_values(self): + """values returns an iterator over the values""" + obs = self.tester.values() + self.assertTrue(isinstance(obs, Iterable)) + exp = {'ANL', None, None, None, 1, 2, 'AGCGCTCACATC', + 'This analysis was done as in Caporaso et al 2011 Genome ' + 'research. The PCR primers (F515/R806) were developed against ' + 'the V4 region of the 16S rRNA (both bacteria and archaea), ' + 'which we determined would yield optimal community clustering ' + 'with reads of this length using a procedure similar to that of' + ' ref. 15. [For reference, this primer pair amplifies the ' + 'region 533_786 in the Escherichia coli strain 83972 sequence ' + '(greengenes accession no. prokMSA_id:470367).] The reverse PCR' + ' primer is barcoded with a 12-base error-correcting Golay code' + ' to facilitate multiplexing of up to 1,500 samples per lane, ' + 'and both PCR primers contain sequencer adapter regions.', + 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL', + 's_G1_L001_sequences', '8/1/12', 'ANL', + 'micro biome of soil and rhizosphere of cannabis plants from ' + 'CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g', + 'Sequencing by synthesis', 'MiSeq', 'ANL', + 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'} + self.assertEqual(set(obs), exp) + + def test_items(self): + """items returns an iterator over the (key, value) tuples""" + obs = self.tester.items() + self.assertTrue(isinstance(obs, Iterable)) + exp = {('center_name', 'ANL'), ('center_project_name', None), + ('ebi_submission_accession', None), + ('ebi_study_accession', None), ('emp_status_id', 1), + ('data_type_id', 2), ('barcodesequence', 'AGCGCTCACATC'), + ('library_construction_protocol', + 'This analysis was done as in Caporaso et al 2011 Genome ' + 'research. The PCR primers (F515/R806) were developed against ' + 'the V4 region of the 16S rRNA (both bacteria and archaea), ' + 'which we determined would yield optimal community clustering ' + 'with reads of this length using a procedure similar to that ' + 'of ref. 15. [For reference, this primer pair amplifies the ' + 'region 533_786 in the Escherichia coli strain 83972 sequence ' + '(greengenes accession no. prokMSA_id:470367).] The reverse ' + 'PCR primer is barcoded with a 12-base error-correcting Golay ' + 'code to facilitate multiplexing of up to 1,500 samples per ' + 'lane, and both PCR primers contain sequencer adapter ' + 'regions.'), ('linkerprimersequence', 'GTGCCAGCMGCCGCGGTAA'), + ('target_subfragment', 'V4'), ('target_gene', '16S rRNA'), + ('run_center', 'ANL'), ('run_prefix', 's_G1_L001_sequences'), + ('run_date', '8/1/12'), ('experiment_center', 'ANL'), + ('experiment_design_description', + 'micro biome of soil and rhizosphere of cannabis plants ' + 'from CA'), ('experiment_title', 'Cannabis Soil Microbiome'), + ('platform', 'Illumina'), ('samp_size', '.25,g'), + ('sequencing_meth', 'Sequencing by synthesis'), + ('illumina_technology', 'MiSeq'), ('sample_center', 'ANL'), + ('pcr_primers', + 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT'), + ('study_center', 'CCME')} + self.assertEqual(set(obs), exp) + + def test_get(self): + """get returns the correct sample object""" + self.assertEqual(self.tester.get('barcodesequence'), 'AGCGCTCACATC') + + def test_get_none(self): + """get returns none if the sample id is not present""" + self.assertTrue(self.tester.get('Not_a_Category') is None) + + +@qiita_test_checker() +class TestMetadataTemplate(TestCase): + """Tests the MetadataTemplate base class""" + def setUp(self): + self.study = Study(1) + self.metadata = pd.DataFrame.from_dict({}) + + def test_init(self): + """Init raises an error because it's not called from a subclass""" + with self.assertRaises(IncompetentQiitaDeveloperError): + MetadataTemplate(1) + + def test_create(self): + """Create raises an error because it's not called from a subclass""" + with self.assertRaises(IncompetentQiitaDeveloperError): + MetadataTemplate.create(self.metadata, self.study) + + def test_exist(self): + """Exists raises an error because it's not called from a subclass""" + with self.assertRaises(IncompetentQiitaDeveloperError): + MetadataTemplate.exists(self.study) + + def test_table_name(self): + """table name raises an error because it's not called from a subclass + """ + with self.assertRaises(IncompetentQiitaDeveloperError): + MetadataTemplate._table_name(self.study) + + +@qiita_test_checker() +class TestSampleTemplate(TestCase): + """Tests the SampleTemplate class""" + + def setUp(self): + metadata_dict = { + 'Sample1': {'physical_location': 'location1', + 'has_physical_specimen': True, + 'has_extracted_data': True, + 'sample_type': 'type1', + 'required_sample_info_status_id': 1, + 'collection_timestamp': + datetime(2014, 5, 29, 12, 24, 51), + 'host_subject_id': 'NotIdentified', + 'Description': 'Test Sample 1', + 'str_column': 'Value for sample 1'}, + 'Sample2': {'physical_location': 'location1', + 'has_physical_specimen': True, + 'has_extracted_data': True, + 'sample_type': 'type1', + 'required_sample_info_status_id': 1, + 'collection_timestamp': + datetime(2014, 5, 29, 12, 24, 51), + 'host_subject_id': 'NotIdentified', + 'Description': 'Test Sample 2', + 'str_column': 'Value for sample 2'}, + 'Sample3': {'physical_location': 'location1', + 'has_physical_specimen': True, + 'has_extracted_data': True, + 'sample_type': 'type1', + 'required_sample_info_status_id': 1, + 'collection_timestamp': + datetime(2014, 5, 29, 12, 24, 51), + 'host_subject_id': 'NotIdentified', + 'Description': 'Test Sample 3', + 'str_column': 'Value for sample 3'} + } + self.metadata = pd.DataFrame.from_dict(metadata_dict, orient='index') + + self.test_study = Study(1) + info = { + "timeseries_type_id": 1, + "metadata_complete": True, + "mixs_compliant": True, + "number_samples_collected": 25, + "number_samples_promised": 28, + "portal_type_id": 3, + "study_alias": "FCM", + "study_description": "Microbiome of people who eat nothing but " + "fried chicken", + "study_abstract": "Exploring how a high fat diet changes the " + "gut microbiome", + "emp_person_id": StudyPerson(2), + "principal_investigator_id": StudyPerson(3), + "lab_person_id": StudyPerson(1) + } + self.new_study = Study.create(User('test@foo.bar'), + "Fried Chicken Microbiome", [1], info) + self.tester = SampleTemplate(1) + self.exp_sample_ids = {'SKB1.640202', 'SKB2.640194', 'SKB3.640195', + 'SKB4.640189', 'SKB5.640181', 'SKB6.640176', + 'SKB7.640196', 'SKB8.640193', 'SKB9.640200', + 'SKD1.640179', 'SKD2.640178', 'SKD3.640198', + 'SKD4.640185', 'SKD5.640186', 'SKD6.640190', + 'SKD7.640191', 'SKD8.640184', 'SKD9.640182', + 'SKM1.640183', 'SKM2.640199', 'SKM3.640197', + 'SKM4.640180', 'SKM5.640177', 'SKM6.640187', + 'SKM7.640188', 'SKM8.640201', 'SKM9.640192'} + self._clean_up_files = [] + + def tearDown(self): + for f in self._clean_up_files: + remove(f) + + def test_init_unknown_error(self): + """Init raises an error if the id is not known""" + with self.assertRaises(QiitaDBUnknownIDError): + SampleTemplate(2) + + def test_init(self): + """Init successfully instantiates the object""" + st = SampleTemplate(1) + self.assertTrue(st.id, 1) + + def test_table_name(self): + """Table name return the correct string""" + obs = SampleTemplate._table_name(self.test_study) + self.assertEqual(obs, "sample_1") + + def test_create_duplicate(self): + """Create raises an error when creating a duplicated SampleTemplate""" + with self.assertRaises(QiitaDBDuplicateError): + SampleTemplate.create(self.metadata, self.test_study) + + def test_create_duplicate_header(self): + """Create raises an error when duplicate headers are present""" + self.metadata['STR_COLUMN'] = pd.Series(['', '', ''], + index=self.metadata.index) + with self.assertRaises(QiitaDBDuplicateHeaderError): + SampleTemplate.create(self.metadata, self.new_study) + + def test_create(self): + """Creates a new SampleTemplate""" + st = SampleTemplate.create(self.metadata, self.new_study) + # The returned object has the correct id + self.assertEqual(st.id, 2) + + # The relevant rows to required_sample_info have been added. + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.required_sample_info WHERE study_id=2") + # study_id sample_id physical_location has_physical_specimen + # has_extracted_data sample_type required_sample_info_status_id + # collection_timestamp host_subject_id description + exp = [[2, "Sample1", "location1", True, True, "type1", 1, + datetime(2014, 5, 29, 12, 24, 51), "NotIdentified", + "Test Sample 1"], + [2, "Sample2", "location1", True, True, "type1", 1, + datetime(2014, 5, 29, 12, 24, 51), "NotIdentified", + "Test Sample 2"], + [2, "Sample3", "location1", True, True, "type1", 1, + datetime(2014, 5, 29, 12, 24, 51), "NotIdentified", + "Test Sample 3"]] + self.assertEqual(obs, exp) + + # The relevant rows have been added to the study_sample_columns + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.study_sample_columns WHERE study_id=2") + # study_id, column_name, column_type + exp = [[2, "str_column", "varchar"]] + self.assertEqual(obs, exp) + + # The new table exists + self.assertTrue(exists_table("sample_2", self.conn_handler)) + + # The new table hosts the correct values + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.sample_2") + # sample_id, str_column + exp = [['Sample1', "Value for sample 1"], + ['Sample2', "Value for sample 2"], + ['Sample3', "Value for sample 3"]] + self.assertEqual(obs, exp) + + def test_exists_true(self): + """Exists returns true when the SampleTemplate already exists""" + self.assertTrue(SampleTemplate.exists(self.test_study)) + + def test_exists_false(self): + """Exists returns false when the SampleTemplate does not exists""" + self.assertFalse(SampleTemplate.exists(self.new_study)) + + def test_get_sample_ids(self): + """get_sample_ids returns the correct set of sample ids""" + obs = self.tester._get_sample_ids(self.conn_handler) + self.assertEqual(obs, self.exp_sample_ids) + + def test_len(self): + """Len returns the correct number of sample ids""" + self.assertEqual(len(self.tester), 27) + + def test_getitem(self): + """Get item returns the correct sample object""" + obs = self.tester['SKM7.640188'] + exp = Sample('SKM7.640188', self.tester) + self.assertEqual(obs, exp) + + def test_getitem_error(self): + """Get item raises an error if key does not exists""" + with self.assertRaises(KeyError): + self.tester['Not_a_Sample'] + + def test_setitem(self): + """setitem raises an error (currently not allowed)""" + with self.assertRaises(QiitaDBNotImplementedError): + self.tester['SKM7.640188'] = Sample('SKM7.640188', self.tester) + + def test_delitem(self): + """delitem raises an error (currently not allowed)""" + with self.assertRaises(QiitaDBNotImplementedError): + del self.tester['SKM7.640188'] + + def test_iter(self): + """iter returns an iterator over the sample ids""" + obs = self.tester.__iter__() + self.assertTrue(isinstance(obs, Iterable)) + self.assertEqual(set(obs), self.exp_sample_ids) + + def test_contains_true(self): + """contains returns true if the sample id exists""" + self.assertTrue('SKM7.640188' in self.tester) + + def test_contains_false(self): + """contains returns false if the sample id does not exists""" + self.assertFalse('Not_a_Sample' in self.tester) + + def test_keys(self): + """keys returns an iterator over the sample ids""" + obs = self.tester.keys() + self.assertTrue(isinstance(obs, Iterable)) + self.assertEqual(set(obs), self.exp_sample_ids) + + def test_values(self): + """values returns an iterator over the values""" + obs = self.tester.values() + self.assertTrue(isinstance(obs, Iterable)) + exp = {Sample('SKB1.640202', self.tester), + Sample('SKB2.640194', self.tester), + Sample('SKB3.640195', self.tester), + Sample('SKB4.640189', self.tester), + Sample('SKB5.640181', self.tester), + Sample('SKB6.640176', self.tester), + Sample('SKB7.640196', self.tester), + Sample('SKB8.640193', self.tester), + Sample('SKB9.640200', self.tester), + Sample('SKD1.640179', self.tester), + Sample('SKD2.640178', self.tester), + Sample('SKD3.640198', self.tester), + Sample('SKD4.640185', self.tester), + Sample('SKD5.640186', self.tester), + Sample('SKD6.640190', self.tester), + Sample('SKD7.640191', self.tester), + Sample('SKD8.640184', self.tester), + Sample('SKD9.640182', self.tester), + Sample('SKM1.640183', self.tester), + Sample('SKM2.640199', self.tester), + Sample('SKM3.640197', self.tester), + Sample('SKM4.640180', self.tester), + Sample('SKM5.640177', self.tester), + Sample('SKM6.640187', self.tester), + Sample('SKM7.640188', self.tester), + Sample('SKM8.640201', self.tester), + Sample('SKM9.640192', self.tester)} + # Creating a list and looping over it since unittest does not call + # the __eq__ function on the objects + for o, e in zip(sorted(list(obs), key=lambda x: x.id), + sorted(exp, key=lambda x: x.id)): + self.assertEqual(o, e) + + def test_items(self): + """items returns an iterator over the (key, value) tuples""" + obs = self.tester.items() + self.assertTrue(isinstance(obs, Iterable)) + exp = [('SKB1.640202', Sample('SKB1.640202', self.tester)), + ('SKB2.640194', Sample('SKB2.640194', self.tester)), + ('SKB3.640195', Sample('SKB3.640195', self.tester)), + ('SKB4.640189', Sample('SKB4.640189', self.tester)), + ('SKB5.640181', Sample('SKB5.640181', self.tester)), + ('SKB6.640176', Sample('SKB6.640176', self.tester)), + ('SKB7.640196', Sample('SKB7.640196', self.tester)), + ('SKB8.640193', Sample('SKB8.640193', self.tester)), + ('SKB9.640200', Sample('SKB9.640200', self.tester)), + ('SKD1.640179', Sample('SKD1.640179', self.tester)), + ('SKD2.640178', Sample('SKD2.640178', self.tester)), + ('SKD3.640198', Sample('SKD3.640198', self.tester)), + ('SKD4.640185', Sample('SKD4.640185', self.tester)), + ('SKD5.640186', Sample('SKD5.640186', self.tester)), + ('SKD6.640190', Sample('SKD6.640190', self.tester)), + ('SKD7.640191', Sample('SKD7.640191', self.tester)), + ('SKD8.640184', Sample('SKD8.640184', self.tester)), + ('SKD9.640182', Sample('SKD9.640182', self.tester)), + ('SKM1.640183', Sample('SKM1.640183', self.tester)), + ('SKM2.640199', Sample('SKM2.640199', self.tester)), + ('SKM3.640197', Sample('SKM3.640197', self.tester)), + ('SKM4.640180', Sample('SKM4.640180', self.tester)), + ('SKM5.640177', Sample('SKM5.640177', self.tester)), + ('SKM6.640187', Sample('SKM6.640187', self.tester)), + ('SKM7.640188', Sample('SKM7.640188', self.tester)), + ('SKM8.640201', Sample('SKM8.640201', self.tester)), + ('SKM9.640192', Sample('SKM9.640192', self.tester))] + # Creating a list and looping over it since unittest does not call + # the __eq__ function on the objects + for o, e in zip(sorted(list(obs)), sorted(exp)): + self.assertEqual(o, e) + + def test_get(self): + """get returns the correct sample object""" + obs = self.tester.get('SKM7.640188') + exp = Sample('SKM7.640188', self.tester) + self.assertEqual(obs, exp) + + def test_get_none(self): + """get returns none if the sample id is not present""" + self.assertTrue(self.tester.get('Not_a_Sample') is None) + + def test_to_file(self): + """to file writes a tab delimited file with all the metadata""" + fd, fp = mkstemp() + close(fd) + st = SampleTemplate.create(self.metadata, self.new_study) + st.to_file(fp) + self._clean_up_files.append(fp) + with open(fp, 'U') as f: + obs = f.read() + self.assertEqual(obs, EXP_SAMPLE_TEMPLATE) + + +@qiita_test_checker() +class TestPrepTemplate(TestCase): + """Tests the PrepTemplate class""" + + def setUp(self): + metadata_dict = { + 'SKB8.640193': {'center_name': 'ANL', + 'center_project_name': 'Test Project', + 'ebi_submission_accession': None, + 'EMP_status_id': 1, + 'data_type_id': 2, + 'str_column': 'Value for sample 1'}, + 'SKD8.640184': {'center_name': 'ANL', + 'center_project_name': 'Test Project', + 'ebi_submission_accession': None, + 'EMP_status_id': 1, + 'data_type_id': 2, + 'str_column': 'Value for sample 2'}, + 'SKB7.640196': {'center_name': 'ANL', + 'center_project_name': 'Test Project', + 'ebi_submission_accession': None, + 'EMP_status_id': 1, + 'data_type_id': 2, + 'str_column': 'Value for sample 3'} + } + self.metadata = pd.DataFrame.from_dict(metadata_dict, orient='index') + self.test_raw_data = RawData(1) + + fd, seqs_fp = mkstemp(suffix='_seqs.fastq') + close(fd) + fd, barcodes_fp = mkstemp(suffix='_barcodes.fastq') + close(fd) + filepaths = [(seqs_fp, 1), (barcodes_fp, 2)] + with open(seqs_fp, "w") as f: + f.write("\n") + with open(barcodes_fp, "w") as f: + f.write("\n") + self.new_raw_data = RawData.create(2, filepaths, [Study(1)]) + db_test_raw_dir = join(get_db_files_base_dir(), 'raw_data') + db_seqs_fp = join(db_test_raw_dir, "3_%s" % basename(seqs_fp)) + db_barcodes_fp = join(db_test_raw_dir, "3_%s" % basename(barcodes_fp)) + self._clean_up_files = [db_seqs_fp, db_barcodes_fp] + + self.tester = PrepTemplate(1) + self.exp_sample_ids = {'SKB1.640202', 'SKB2.640194', 'SKB3.640195', + 'SKB4.640189', 'SKB5.640181', 'SKB6.640176', + 'SKB7.640196', 'SKB8.640193', 'SKB9.640200', + 'SKD1.640179', 'SKD2.640178', 'SKD3.640198', + 'SKD4.640185', 'SKD5.640186', 'SKD6.640190', + 'SKD7.640191', 'SKD8.640184', 'SKD9.640182', + 'SKM1.640183', 'SKM2.640199', 'SKM3.640197', + 'SKM4.640180', 'SKM5.640177', 'SKM6.640187', + 'SKM7.640188', 'SKM8.640201', 'SKM9.640192'} + + def tearDown(self): + for f in self._clean_up_files: + remove(f) + + def test_init_unknown_error(self): + """Init raises an error if the id is not known""" + with self.assertRaises(QiitaDBUnknownIDError): + PrepTemplate(2) + + def test_init(self): + """Init successfully instantiates the object""" + st = PrepTemplate(1) + self.assertTrue(st.id, 1) + + def test_table_name(self): + """Table name return the correct string""" + obs = PrepTemplate._table_name(self.test_raw_data) + self.assertEqual(obs, "prep_1") + + def test_create_duplicate(self): + """Create raises an error when creating a duplicated PrepTemplate""" + with self.assertRaises(QiitaDBDuplicateError): + PrepTemplate.create(self.metadata, self.test_raw_data) + + def test_create_duplicate_header(self): + """Create raises an error when duplicate headers are present""" + self.metadata['STR_COLUMN'] = pd.Series(['', '', ''], + index=self.metadata.index) + with self.assertRaises(QiitaDBDuplicateHeaderError): + PrepTemplate.create(self.metadata, self.new_raw_data) + + def test_create(self): + """Creates a new PrepTemplate""" + pt = PrepTemplate.create(self.metadata, self.new_raw_data) + # The returned object has the correct id + self.assertEqual(pt.id, 3) + + # The relevant rows to common_prep_info have been added. + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.common_prep_info WHERE raw_data_id=3") + # raw_data_id, sample_id, center_name, center_project_name, + # ebi_submission_accession, ebi_study_accession, emp_status_id, + # data_type_id + exp = [[3, 'SKB8.640193', 'ANL', 'Test Project', None, None, 1, 2], + [3, 'SKD8.640184', 'ANL', 'Test Project', None, None, 1, 2], + [3, 'SKB7.640196', 'ANL', 'Test Project', None, None, 1, 2]] + self.assertEqual(sorted(obs), sorted(exp)) + + # The relevant rows have been added to the raw_data_prep_columns + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.raw_data_prep_columns WHERE raw_data_id=3") + # raw_data_id, column_name, column_type + exp = [[3, "str_column", "varchar"]] + self.assertEqual(obs, exp) + + # The new table exists + self.assertTrue(exists_table("prep_3", self.conn_handler)) + + # The new table hosts the correct values + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.prep_3") + # sample_id, str_column + exp = [['SKB8.640193', "Value for sample 1"], + ['SKD8.640184', "Value for sample 2"], + ['SKB7.640196', "Value for sample 3"]] + self.assertEqual(sorted(obs), sorted(exp)) + + def test_exists_true(self): + """Exists returns true when the PrepTemplate already exists""" + self.assertTrue(PrepTemplate.exists(self.test_raw_data)) + + def test_exists_false(self): + """Exists returns false when the PrepTemplate does not exists""" + self.assertFalse(PrepTemplate.exists(self.new_raw_data)) + + def test_get_sample_ids(self): + """get_sample_ids returns the correct set of sample ids""" + obs = self.tester._get_sample_ids(self.conn_handler) + self.assertEqual(obs, self.exp_sample_ids) + + def test_len(self): + """Len returns the correct number of sample ids""" + self.assertEqual(len(self.tester), 27) + + def test_getitem(self): + """Get item returns the correct sample object""" + obs = self.tester['SKM7.640188'] + exp = PrepSample('SKM7.640188', self.tester) + self.assertEqual(obs, exp) + + def test_getitem_error(self): + """Get item raises an error if key does not exists""" + with self.assertRaises(KeyError): + self.tester['Not_a_Sample'] + + def test_setitem(self): + """setitem raises an error (currently not allowed)""" + with self.assertRaises(QiitaDBNotImplementedError): + self.tester['SKM7.640188'] = PrepSample('SKM7.640188', self.tester) + + def test_delitem(self): + """delitem raises an error (currently not allowed)""" + with self.assertRaises(QiitaDBNotImplementedError): + del self.tester['SKM7.640188'] + + def test_iter(self): + """iter returns an iterator over the sample ids""" + obs = self.tester.__iter__() + self.assertTrue(isinstance(obs, Iterable)) + self.assertEqual(set(obs), self.exp_sample_ids) + + def test_contains_true(self): + """contains returns true if the sample id exists""" + self.assertTrue('SKM7.640188' in self.tester) + + def test_contains_false(self): + """contains returns false if the sample id does not exists""" + self.assertFalse('Not_a_Sample' in self.tester) + + def test_keys(self): + """keys returns an iterator over the sample ids""" + obs = self.tester.keys() + self.assertTrue(isinstance(obs, Iterable)) + self.assertEqual(set(obs), self.exp_sample_ids) + + def test_values(self): + """values returns an iterator over the values""" + obs = self.tester.values() + self.assertTrue(isinstance(obs, Iterable)) + exp = {PrepSample('SKB1.640202', self.tester), + PrepSample('SKB2.640194', self.tester), + PrepSample('SKB3.640195', self.tester), + PrepSample('SKB4.640189', self.tester), + PrepSample('SKB5.640181', self.tester), + PrepSample('SKB6.640176', self.tester), + PrepSample('SKB7.640196', self.tester), + PrepSample('SKB8.640193', self.tester), + PrepSample('SKB9.640200', self.tester), + PrepSample('SKD1.640179', self.tester), + PrepSample('SKD2.640178', self.tester), + PrepSample('SKD3.640198', self.tester), + PrepSample('SKD4.640185', self.tester), + PrepSample('SKD5.640186', self.tester), + PrepSample('SKD6.640190', self.tester), + PrepSample('SKD7.640191', self.tester), + PrepSample('SKD8.640184', self.tester), + PrepSample('SKD9.640182', self.tester), + PrepSample('SKM1.640183', self.tester), + PrepSample('SKM2.640199', self.tester), + PrepSample('SKM3.640197', self.tester), + PrepSample('SKM4.640180', self.tester), + PrepSample('SKM5.640177', self.tester), + PrepSample('SKM6.640187', self.tester), + PrepSample('SKM7.640188', self.tester), + PrepSample('SKM8.640201', self.tester), + PrepSample('SKM9.640192', self.tester)} + # Creating a list and looping over it since unittest does not call + # the __eq__ function on the objects + for o, e in zip(sorted(list(obs), key=lambda x: x.id), + sorted(exp, key=lambda x: x.id)): + self.assertEqual(o, e) + + def test_items(self): + """items returns an iterator over the (key, value) tuples""" + obs = self.tester.items() + self.assertTrue(isinstance(obs, Iterable)) + exp = [('SKB1.640202', PrepSample('SKB1.640202', self.tester)), + ('SKB2.640194', PrepSample('SKB2.640194', self.tester)), + ('SKB3.640195', PrepSample('SKB3.640195', self.tester)), + ('SKB4.640189', PrepSample('SKB4.640189', self.tester)), + ('SKB5.640181', PrepSample('SKB5.640181', self.tester)), + ('SKB6.640176', PrepSample('SKB6.640176', self.tester)), + ('SKB7.640196', PrepSample('SKB7.640196', self.tester)), + ('SKB8.640193', PrepSample('SKB8.640193', self.tester)), + ('SKB9.640200', PrepSample('SKB9.640200', self.tester)), + ('SKD1.640179', PrepSample('SKD1.640179', self.tester)), + ('SKD2.640178', PrepSample('SKD2.640178', self.tester)), + ('SKD3.640198', PrepSample('SKD3.640198', self.tester)), + ('SKD4.640185', PrepSample('SKD4.640185', self.tester)), + ('SKD5.640186', PrepSample('SKD5.640186', self.tester)), + ('SKD6.640190', PrepSample('SKD6.640190', self.tester)), + ('SKD7.640191', PrepSample('SKD7.640191', self.tester)), + ('SKD8.640184', PrepSample('SKD8.640184', self.tester)), + ('SKD9.640182', PrepSample('SKD9.640182', self.tester)), + ('SKM1.640183', PrepSample('SKM1.640183', self.tester)), + ('SKM2.640199', PrepSample('SKM2.640199', self.tester)), + ('SKM3.640197', PrepSample('SKM3.640197', self.tester)), + ('SKM4.640180', PrepSample('SKM4.640180', self.tester)), + ('SKM5.640177', PrepSample('SKM5.640177', self.tester)), + ('SKM6.640187', PrepSample('SKM6.640187', self.tester)), + ('SKM7.640188', PrepSample('SKM7.640188', self.tester)), + ('SKM8.640201', PrepSample('SKM8.640201', self.tester)), + ('SKM9.640192', PrepSample('SKM9.640192', self.tester))] + # Creating a list and looping over it since unittest does not call + # the __eq__ function on the objects + for o, e in zip(sorted(list(obs)), sorted(exp)): + self.assertEqual(o, e) + + def test_get(self): + """get returns the correct PrepSample object""" + obs = self.tester.get('SKM7.640188') + exp = PrepSample('SKM7.640188', self.tester) + self.assertEqual(obs, exp) + + def test_get_none(self): + """get returns none if the sample id is not present""" + self.assertTrue(self.tester.get('Not_a_Sample') is None) + + def test_to_file(self): + """to file writes a tab delimited file with all the metadata""" + fd, fp = mkstemp() + close(fd) + pt = PrepTemplate.create(self.metadata, self.new_raw_data) + pt.to_file(fp) + self._clean_up_files.append(fp) + with open(fp, 'U') as f: + obs = f.read() + self.assertEqual(obs, EXP_PREP_TEMPLATE) + +EXP_SAMPLE_TEMPLATE = ( + "#SampleID\tcollection_timestamp\tdescription\thas_extracted_data\t" + "has_physical_specimen\thost_subject_id\tphysical_location\t" + "required_sample_info_status_id\tsample_type\tstr_column\nSample1\t" + "2014-05-29 12:24:51\tTest Sample 1\tTrue\tTrue\tNotIdentified\tlocation1" + "\t1\ttype1\tValue for sample 1\nSample2\t2014-05-29 12:24:51\t" + "Test Sample 2\tTrue\tTrue\tNotIdentified\tlocation1\t1\ttype1\t" + "Value for sample 2\nSample3\t2014-05-29 12:24:51\tTest Sample 3\tTrue\t" + "True\tNotIdentified\tlocation1\t1\ttype1\tValue for sample 3\n") + +EXP_PREP_TEMPLATE = ( + "#SampleID\tcenter_name\tcenter_project_name\tdata_type_id\t" + "ebi_study_accession\tebi_submission_accession\temp_status_id\tstr_column" + "\nSKB7.640196\tANL\tTest Project\t2\tNone\tNone\t1\tValue for sample 3\n" + "SKB8.640193\tANL\tTest Project\t2\tNone\tNone\t1\tValue for sample 1\n" + "SKD8.640184\tANL\tTest Project\t2\tNone\tNone\t1\tValue for sample 2\n") + +if __name__ == '__main__': + main() diff --git a/qiita_db/test/test_setup.py b/qiita_db/test/test_setup.py new file mode 100644 index 000000000..5558be84a --- /dev/null +++ b/qiita_db/test/test_setup.py @@ -0,0 +1,125 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from unittest import TestCase, main + +from qiita_core.util import qiita_test_checker +from qiita_db.util import check_count + + +@qiita_test_checker() +class SetupTest(TestCase): + """Tests that the test database have been successfully populated""" + + def test_qitta_user(self): + check_count("qiita.qiita_user", 4) + + def test_study_person(self): + check_count("qiita.study_person", 3) + + def test_study(self): + check_count("qiita.study", 1) + + def test_study_users(self): + check_count("qiita.study_users", 2) + + def test_investigation(self): + check_count("qiita.investigation", 1) + + def test_investigation_study(self): + check_count("qiita.investigation_study", 1) + + def test_study_experimental_factor(self): + check_count("qiita.study_experimental_factor", 1) + + def test_filepath(self): + check_count("qiita.filepath", 9) + + def test_filepath_type(self): + check_count("qiita.filepath_type", 8) + + def test_raw_data(self): + check_count("qiita.raw_data", 2) + + def test_raw_filepath(self): + check_count("qiita.raw_filepath", 4) + + def test_study_raw_data(self): + check_count("qiita.study_raw_data", 2) + + def test_required_sample_info(self): + check_count("qiita.required_sample_info", 27) + + def test_study_sample_columns(self): + check_count("qiita.study_sample_columns", 23) + + def test_sample_1(self): + check_count("qiita.sample_1", 27) + + def test_common_prep_info(self): + check_count("qiita.common_prep_info", 27) + + def test_raw_data_prep_columns(self): + check_count("qiita.raw_data_prep_columns", 19) + + def test_prep_1(self): + check_count("qiita.prep_1", 27) + + def test_preprocessed_data(self): + check_count("qiita.preprocessed_data", 2) + + def test_raw_preprocessed_data(self): + check_count("qiita.raw_preprocessed_data", 2) + + def test_study_preprocessed_data(self): + check_count("qiita.study_preprocessed_data", 2) + + def test_preprocessed_filepath(self): + check_count("qiita.preprocessed_filepath", 2) + + def test_preprocessed_sequence_illumina_params(self): + check_count("qiita.preprocessed_sequence_illumina_params", 2) + + def test_processed_data(self): + check_count("qiita.processed_data", 1) + + def test_preprocessed_processed_data(self): + check_count("qiita.preprocessed_processed_data", 1) + + def test_reference(self): + check_count("qiita.reference", 1) + + def test_processed_params_uclust(self): + check_count("qiita.processed_params_uclust", 1) + + def test_processed_filepath(self): + check_count("qiita.processed_filepath", 1) + + def test_job(self): + check_count("qiita.job", 2) + + def test_analysis(self): + check_count("qiita.analysis", 1) + + def test_analysis_job(self): + check_count("qiita.analysis_job", 3) + + def test_analysis_filepath(self): + check_count("qiita.analysis_filepath", 1) + + def test_analysis_sample(self): + check_count("qiita.analysis_sample", 8) + + def test_analysis_users(self): + check_count("qiita.analysis_users", 1) + + def test_job_results_filepath(self): + check_count("qiita.job_results_filepath", 2) + +if __name__ == '__main__': + main() diff --git a/qiita_db/test/test_study.py b/qiita_db/test/test_study.py new file mode 100644 index 000000000..0f5a3f4aa --- /dev/null +++ b/qiita_db/test/test_study.py @@ -0,0 +1,420 @@ +from unittest import TestCase, main +from datetime import date + +from future.utils import viewitems + +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from qiita_core.util import qiita_test_checker +from qiita_db.base import QiitaObject +from qiita_db.study import Study, StudyPerson +from qiita_db.investigation import Investigation +from qiita_db.user import User +from qiita_db.exceptions import QiitaDBColumnError, QiitaDBStatusError + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + + +@qiita_test_checker() +class TestStudyPerson(TestCase): + def setUp(self): + self.studyperson = StudyPerson(1) + + def test_create_studyperson(self): + new = StudyPerson.create('SomeDude', 'somedude@foo.bar', + '111 fake street', '111-121-1313') + self.assertEqual(new.id, 4) + obs = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.study_person WHERE study_person_id = 4") + self.assertEqual(obs, [[4, 'SomeDude', 'somedude@foo.bar', + '111 fake street', '111-121-1313']]) + + def test_create_studyperson_already_exists(self): + obs = StudyPerson.create('LabDude', 'lab_dude@foo.bar') + self.assertEqual(obs.name, 'LabDude') + self.assertEqual(obs.email, 'lab_dude@foo.bar') + + def test_retrieve_name(self): + self.assertEqual(self.studyperson.name, 'LabDude') + + def test_set_name_fail(self): + with self.assertRaises(AttributeError): + self.studyperson.name = 'Fail Dude' + + def test_retrieve_email(self): + self.assertEqual(self.studyperson.email, 'lab_dude@foo.bar') + + def test_set_email_fail(self): + with self.assertRaises(AttributeError): + self.studyperson.email = 'faildude@foo.bar' + + def test_retrieve_address(self): + self.assertEqual(self.studyperson.address, '123 lab street') + + def test_retrieve_address_null(self): + person = StudyPerson(2) + self.assertEqual(person.address, None) + + def test_set_address(self): + self.studyperson.address = '123 nonsense road' + self.assertEqual(self.studyperson.address, '123 nonsense road') + + def test_retrieve_phone(self): + self.assertEqual(self.studyperson.phone, '121-222-3333') + + def test_retrieve_phone_null(self): + person = StudyPerson(3) + self.assertEqual(person.phone, None) + + def test_set_phone(self): + self.studyperson.phone = '111111111111111111121' + self.assertEqual(self.studyperson.phone, '111111111111111111121') + + +@qiita_test_checker() +class TestStudy(TestCase): + def setUp(self): + self.study = Study(1) + + self.info = { + "timeseries_type_id": 1, + "metadata_complete": True, + "mixs_compliant": True, + "number_samples_collected": 25, + "number_samples_promised": 28, + "portal_type_id": 3, + "study_alias": "FCM", + "study_description": "Microbiome of people who eat nothing but " + "fried chicken", + "study_abstract": "Exploring how a high fat diet changes the " + "gut microbiome", + "emp_person_id": StudyPerson(2), + "principal_investigator_id": StudyPerson(3), + "lab_person_id": StudyPerson(1) + } + + self.infoexp = { + "timeseries_type_id": 1, + "metadata_complete": True, + "mixs_compliant": True, + "number_samples_collected": 25, + "number_samples_promised": 28, + "portal_type_id": 3, + "study_alias": "FCM", + "study_description": "Microbiome of people who eat nothing but " + "fried chicken", + "study_abstract": "Exploring how a high fat diet changes the " + "gut microbiome", + "emp_person_id": 2, + "principal_investigator_id": 3, + "lab_person_id": 1 + } + + self.existingexp = { + 'mixs_compliant': True, + 'metadata_complete': True, + 'reprocess': False, + 'number_samples_promised': 27, + 'emp_person_id': StudyPerson(2), + 'funding': None, + 'vamps_id': None, + 'first_contact': '2014-05-19 16:10', + 'principal_investigator_id': StudyPerson(3), + 'timeseries_type_id': 1, + 'study_abstract': + "This is a preliminary study to examine the " + "microbiota associated with the Cannabis plant. Soils samples " + "from the bulk soil, soil associated with the roots, and the " + "rhizosphere were extracted and the DNA sequenced. Roots " + "from three independent plants of different strains were " + "examined. These roots were obtained November 11, 2011 from " + "plants that had been harvested in the summer. Future " + "studies will attempt to analyze the soils and rhizospheres " + "from the same location at different time points in the plant " + "lifecycle.", + 'spatial_series': False, + 'study_description': 'Analysis of the Cannabis Plant Microbiome', + 'portal_type_id': 2, + 'study_alias': 'Cannabis Soils', + 'most_recent_contact': '2014-05-19 16:11', + 'lab_person_id': StudyPerson(1), + 'number_samples_collected': 27} + + def test_get_public(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + obs = Study.get_public() + self.assertEqual(obs, [Study(1)]) + + def test_create_study_min_data(self): + """Insert a study into the database""" + obs = Study.create(User('test@foo.bar'), "Fried chicken microbiome", + [1], self.info) + self.assertEqual(obs.id, 2) + exp = {'mixs_compliant': True, 'metadata_complete': True, + 'reprocess': False, 'study_status_id': 1, + 'number_samples_promised': 28, 'emp_person_id': 2, + 'funding': None, 'vamps_id': None, + 'first_contact': date.today().isoformat(), + 'principal_investigator_id': 3, + 'timeseries_type_id': 1, + 'study_abstract': 'Exploring how a high fat diet changes the ' + 'gut microbiome', + 'email': 'test@foo.bar', 'spatial_series': None, + 'study_description': 'Microbiome of people who eat nothing but' + ' fried chicken', + 'portal_type_id': 3, 'study_alias': 'FCM', 'study_id': 2, + 'most_recent_contact': None, 'lab_person_id': 1, + 'study_title': 'Fried chicken microbiome', + 'number_samples_collected': 25} + + obsins = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.study WHERE study_id = 2") + self.assertEqual(len(obsins), 1) + obsins = dict(obsins[0]) + self.assertEqual(obsins, exp) + + # make sure EFO went in to table correctly + efo = self.conn_handler.execute_fetchall( + "SELECT efo_id FROM qiita.study_experimental_factor " + "WHERE study_id = 2") + self.assertEqual(efo, [[1]]) + + def test_create_study_with_investigation(self): + """Insert a study into the database with an investigation""" + obs = Study.create(User('test@foo.bar'), "Fried chicken microbiome", + [1], self.info, Investigation(1)) + self.assertEqual(obs.id, 2) + # check the investigation was assigned + obs = self.conn_handler.execute_fetchall( + "SELECT * from qiita.investigation_study WHERE study_id = 2") + self.assertEqual(obs, [[1, 2]]) + + def test_create_study_all_data(self): + """Insert a study into the database with every info field""" + self.info.update({ + 'vamps_id': 'MBE_1111111', + 'funding': 'FundAgency', + 'spatial_series': True, + 'metadata_complete': False, + 'reprocess': True, + 'first_contact': "Today" + }) + obs = Study.create(User('test@foo.bar'), "Fried chicken microbiome", + [1], self.info) + self.assertEqual(obs.id, 2) + exp = {'mixs_compliant': True, 'metadata_complete': False, + 'reprocess': True, 'study_status_id': 1, + 'number_samples_promised': 28, 'emp_person_id': 2, + 'funding': 'FundAgency', 'vamps_id': 'MBE_1111111', + 'first_contact': "Today", + 'principal_investigator_id': 3, 'timeseries_type_id': 1, + 'study_abstract': 'Exploring how a high fat diet changes the ' + 'gut microbiome', + 'email': 'test@foo.bar', 'spatial_series': True, + 'study_description': 'Microbiome of people who eat nothing ' + 'but fried chicken', + 'portal_type_id': 3, 'study_alias': 'FCM', 'study_id': 2, + 'most_recent_contact': None, 'lab_person_id': 1, + 'study_title': 'Fried chicken microbiome', + 'number_samples_collected': 25} + obsins = self.conn_handler.execute_fetchall( + "SELECT * FROM qiita.study WHERE study_id = 2") + self.assertEqual(len(obsins), 1) + obsins = dict(obsins[0]) + self.assertEqual(obsins, exp) + + # make sure EFO went in to table correctly + obsefo = self.conn_handler.execute_fetchall( + "SELECT efo_id FROM qiita.study_experimental_factor " + "WHERE study_id = 2") + self.assertEqual(obsefo, [[1]]) + + def test_create_missing_required(self): + """ Insert a study that is missing a required info key""" + self.info.pop("study_alias") + with self.assertRaises(QiitaDBColumnError): + Study.create(User('test@foo.bar'), "Fried Chicken Microbiome", + [1], self.info) + + def test_create_empty_efo(self): + """ Insert a study that is missing a required info key""" + with self.assertRaises(IncompetentQiitaDeveloperError): + Study.create(User('test@foo.bar'), "Fried Chicken Microbiome", + [], self.info) + + def test_create_study_with_not_allowed_key(self): + """Insert a study with key from _non_info present""" + self.info.update({"study_id": 1}) + with self.assertRaises(QiitaDBColumnError): + Study.create(User('test@foo.bar'), "Fried Chicken Microbiome", + [1], self.info) + + def test_create_unknown_db_col(self): + """ Insert a study with an info key not in the database""" + self.info["SHOULDNOTBEHERE"] = "BWAHAHAHAHAHA" + with self.assertRaises(QiitaDBColumnError): + Study.create(User('test@foo.bar'), "Fried Chicken Microbiome", + [1], self.info) + + def test_retrieve_title(self): + self.assertEqual(self.study.title, 'Identification of the Microbiomes' + ' for Cannabis Soils') + + def test_set_title(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + new.title = "Cannabis soils" + self.assertEqual(new.title, "Cannabis soils") + + def test_set_title_public(self): + """Tests for fail if editing title of a public study""" + with self.assertRaises(QiitaDBStatusError): + self.study.title = "FAILBOAT" + + def test_get_efo(self): + self.assertEqual(self.study.efo, [1]) + + def test_set_efo(self): + """Set efo with list efo_id""" + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + new.efo = [3, 4] + self.assertEqual(new.efo, [3, 4]) + + def test_set_efo_empty(self): + """Set efo with list efo_id""" + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + with self.assertRaises(IncompetentQiitaDeveloperError): + new.efo = [] + + def test_set_efo_public(self): + """Set efo on a public study""" + with self.assertRaises(QiitaDBStatusError): + self.study.efo = 6 + + def test_retrieve_info(self): + for key, val in viewitems(self.existingexp): + if isinstance(val, QiitaObject): + self.existingexp[key] = val.id + self.assertEqual(self.study.info, self.existingexp) + + def test_set_info(self): + """Set info in a study""" + newinfo = { + "timeseries_type_id": 2, + "metadata_complete": False, + "number_samples_collected": 28, + "lab_person_id": StudyPerson(2), + "vamps_id": 'MBE_111222', + "first_contact": "June 11, 2014" + } + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + self.infoexp.update(newinfo) + new.info = newinfo + # add missing table cols + self.infoexp["funding"] = None + self.infoexp["spatial_series"] = None + self.infoexp["most_recent_contact"] = None + self.infoexp["reprocess"] = False + self.infoexp["lab_person_id"] = 2 + self.assertEqual(new.info, self.infoexp) + + def test_set_info_public(self): + """Tests for fail if editing info of a public study""" + with self.assertRaises(QiitaDBStatusError): + self.study.info = {"vamps_id": "12321312"} + + def test_set_info_disallowed_keys(self): + """Tests for fail if sending non-info keys in info dict""" + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + with self.assertRaises(QiitaDBColumnError): + new.info = {"email": "fail@fail.com"} + + def test_info_empty(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + with self.assertRaises(IncompetentQiitaDeveloperError): + new.info = {} + + def test_retrieve_status(self): + self.assertEqual(self.study.status, "public") + + def test_set_status(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + new.status = "private" + self.assertEqual(new.status, "private") + + def test_retrieve_shared_with(self): + self.assertEqual(self.study.shared_with, ['shared@foo.bar', + 'demo@microbio.me']) + + def test_retrieve_pmids(self): + exp = ['123456', '7891011'] + self.assertEqual(self.study.pmids, exp) + + def test_retrieve_pmids_empty(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + self.assertEqual(new.pmids, []) + + def test_retrieve_investigation(self): + self.assertEqual(self.study.investigation, 1) + + def test_retrieve_investigation_empty(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + self.assertEqual(new.investigation, None) + + def test_retrieve_sample_template(self): + self.assertEqual(self.study.sample_template, 1) + + def test_retrieve_data_types(self): + self.assertEqual(self.study.data_types, ['18S']) + + def test_retrieve_data_types_none(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + self.assertEqual(new.data_types, []) + + def test_retrieve_raw_data(self): + self.assertEqual(self.study.raw_data, [1, 2]) + + def test_retrieve_raw_data_none(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + self.assertEqual(new.raw_data, []) + + def test_retrieve_preprocessed_data(self): + self.assertEqual(self.study.preprocessed_data, [1, 2]) + + def test_retrieve_preprocessed_data_none(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + self.assertEqual(new.preprocessed_data, []) + + def test_retrieve_processed_data(self): + self.assertEqual(self.study.processed_data, [1]) + + def test_retrieve_processed_data_none(self): + new = Study.create(User('test@foo.bar'), 'Identification of the ' + 'Microbiomes for Cannabis Soils', [1], self.info) + self.assertEqual(new.processed_data, []) + + def test_add_pmid(self): + self.study.add_pmid('4544444') + exp = ['123456', '7891011', '4544444'] + self.assertEqual(self.study.pmids, exp) + + +if __name__ == "__main__": + main() diff --git a/qiita_db/test/test_user.py b/qiita_db/test/test_user.py new file mode 100644 index 000000000..9f12cc89b --- /dev/null +++ b/qiita_db/test/test_user.py @@ -0,0 +1,203 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from unittest import TestCase, main + +from qiita_core.exceptions import (IncorrectEmailError, IncorrectPasswordError, + IncompetentQiitaDeveloperError) +from qiita_core.util import qiita_test_checker +from qiita_db.user import User +from qiita_db.exceptions import (QiitaDBDuplicateError, QiitaDBColumnError, + QiitaDBUnknownIDError) + + +@qiita_test_checker() +class UserTest(TestCase): + """Tests the User object and all properties/methods""" + + def setUp(self): + self.user = User('admin@foo.bar') + + self.userinfo = { + 'name': 'Dude', + 'affiliation': 'Nowhere University', + 'address': '123 fake st, Apt 0, Faketown, CO 80302', + 'phone': '111-222-3344' + } + + def test_instantiate_user(self): + User('admin@foo.bar') + + def test_instantiate_unknown_user(self): + with self.assertRaises(QiitaDBUnknownIDError): + User('FAIL@OMG.bar') + + def _check_correct_info(self, obs, exp): + self.assertEqual(set(exp.keys()), set(obs.keys())) + for key in exp: + # user_verify_code and password seed randomly generated so just + # making sure they exist and is correct length + if key == 'user_verify_code': + self.assertEqual(len(obs[key]), 20) + elif key == "password": + self.assertEqual(len(obs[key]), 60) + else: + self.assertEqual(obs[key], exp[key]) + + def test_create_user(self): + user = User.create('new@test.bar', 'password') + self.assertEqual(user.id, 'new@test.bar') + sql = "SELECT * from qiita.qiita_user WHERE email = 'new@test.bar'" + obs = self.conn_handler.execute_fetchall(sql) + self.assertEqual(len(obs), 1) + obs = dict(obs[0]) + exp = { + 'password': '', + 'name': None, + 'pass_reset_timestamp': None, + 'affiliation': None, + 'pass_reset_code': None, + 'phone': None, + 'user_verify_code': '', + 'address': None, + 'user_level_id': 5, + 'email': 'new@test.bar'} + self._check_correct_info(obs, exp) + + def test_create_user_info(self): + user = User.create('new@test.bar', 'password', self.userinfo) + self.assertEqual(user.id, 'new@test.bar') + sql = "SELECT * from qiita.qiita_user WHERE email = 'new@test.bar'" + obs = self.conn_handler.execute_fetchall(sql) + self.assertEqual(len(obs), 1) + obs = dict(obs[0]) + exp = { + 'password': '', + 'name': 'Dude', + 'affiliation': 'Nowhere University', + 'address': '123 fake st, Apt 0, Faketown, CO 80302', + 'phone': '111-222-3344', + 'pass_reset_timestamp': None, + 'pass_reset_code': None, + 'user_verify_code': '', + 'user_level_id': 5, + 'email': 'new@test.bar'} + self._check_correct_info(obs, exp) + + def test_create_user_column_not_allowed(self): + self.userinfo["pass_reset_code"] = "FAIL" + with self.assertRaises(QiitaDBColumnError): + User.create('new@test.bar', 'password', self.userinfo) + + def test_create_user_non_existent_column(self): + self.userinfo["BADTHING"] = "FAIL" + with self.assertRaises(QiitaDBColumnError): + User.create('new@test.bar', 'password', self.userinfo) + + def test_create_user_duplicate(self): + with self.assertRaises(QiitaDBDuplicateError): + User.create('test@foo.bar', 'password') + + def test_create_user_bad_email(self): + with self.assertRaises(IncorrectEmailError): + User.create('notanemail', 'password') + + def test_create_user_bad_password(self): + with self.assertRaises(IncorrectPasswordError): + User.create('new@test.com', '') + + def test_login(self): + self.assertEqual(User.login("test@foo.bar", "password"), + User("test@foo.bar")) + + def test_login_incorrect_user(self): + with self.assertRaises(IncorrectEmailError): + User.login("notexist@foo.bar", "password") + + def test_login_incorrect_password(self): + with self.assertRaises(IncorrectPasswordError): + User.login("test@foo.bar", "WRONGPASSWORD") + + def test_login_invalid_password(self): + with self.assertRaises(IncorrectPasswordError): + User.login("test@foo.bar", "SHORT") + + def test_exists(self): + self.assertTrue(User.exists("test@foo.bar")) + + def test_exists_notindb(self): + self.assertFalse(User.exists("notexist@foo.bar")) + + def test_exists_invaid_email(self): + with self.assertRaises(IncorrectEmailError): + User.exists("notanemail@badformat") + + def test_get_email(self): + self.assertEqual(self.user.email, 'admin@foo.bar') + + def test_get_level(self): + self.assertEqual(self.user.level, "user") + + def test_get_info(self): + expinfo = { + 'name': 'Admin', + 'affiliation': 'Owner University', + 'address': '312 noname st, Apt K, Nonexistantown, CO 80302', + 'phone': '222-444-6789' + } + self.assertEqual(self.user.info, expinfo) + + def test_set_info(self): + self.user.info = self.userinfo + self.assertEqual(self.user.info, self.userinfo) + + def test_set_info_not_info(self): + """Tests setting info with a non-allowed column""" + self.userinfo["email"] = "FAIL" + with self.assertRaises(QiitaDBColumnError): + self.user.info = self.userinfo + + def test_set_info_bad_info(self): + """Test setting info with a key not in the table""" + self.userinfo["BADTHING"] = "FAIL" + with self.assertRaises(QiitaDBColumnError): + self.user.info = self.userinfo + + def test_get_private_studies(self): + user = User('test@foo.bar') + self.assertEqual(user.private_studies, [1]) + + def test_get_shared_studies(self): + user = User('shared@foo.bar') + self.assertEqual(user.shared_studies, [1]) + + def test_get_private_analyses(self): + self.assertEqual(self.user.private_analyses, []) + + def test_get_shared_analyses(self): + self.assertEqual(self.user.shared_analyses, []) + + def test_verify_code(self): + sql = ("insert into qiita.qiita_user values ('test@user.com', '1', " + "'testtest', 'testuser', '', '', '', 'verifycode', 'resetcode'" + ",null)") + self.conn_handler.execute(sql) + self.assertTrue(User.verify_code('test@user.com', 'verifycode', + 'create')) + self.assertTrue(User.verify_code('test@user.com', 'resetcode', + 'reset')) + self.assertFalse(User.verify_code('test@user.com', 'wrongcode', + 'create')) + self.assertFalse(User.verify_code('test@user.com', 'wrongcode', + 'reset')) + with self.assertRaises(IncompetentQiitaDeveloperError): + User.verify_code('test@user.com', 'fakecode', 'badtype') + + +if __name__ == "__main__": + main() diff --git a/qiita_db/test/test_util.py b/qiita_db/test/test_util.py new file mode 100644 index 000000000..5214ba446 --- /dev/null +++ b/qiita_db/test/test_util.py @@ -0,0 +1,186 @@ +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from unittest import TestCase, main +from tempfile import mkstemp +from os import close + +from qiita_core.util import qiita_test_checker +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from qiita_db.exceptions import QiitaDBColumnError +from qiita_db.util import (exists_table, exists_dynamic_table, scrub_data, + compute_checksum, check_table_cols, + check_required_columns, convert_to_id, + get_table_cols, get_filetypes, get_filepath_types, + get_count, check_count, get_processed_params_tables) + + +@qiita_test_checker() +class DBUtilTests(TestCase): + def setUp(self): + self.table = 'study' + self.required = [ + 'number_samples_promised', 'study_title', 'mixs_compliant', + 'metadata_complete', 'study_description', 'first_contact', + 'reprocess', 'study_status_id', 'portal_type_id', + 'timeseries_type_id', 'study_alias', 'study_abstract', + 'principal_investigator_id', 'email', 'number_samples_collected'] + + def test_check_required_columns(self): + # Doesn't do anything if correct info passed, only errors if wrong info + check_required_columns(self.conn_handler, self.required, self.table) + + def test_check_required_columns_fail(self): + self.required.remove('study_title') + with self.assertRaises(QiitaDBColumnError): + check_required_columns(self.conn_handler, self.required, + self.table) + + def test_check_table_cols(self): + # Doesn't do anything if correct info passed, only errors if wrong info + check_table_cols(self.conn_handler, self.required, self.table) + + def test_check_table_cols_fail(self): + self.required.append('BADTHINGNOINHERE') + with self.assertRaises(QiitaDBColumnError): + check_table_cols(self.conn_handler, self.required, + self.table) + + def test_get_table_cols(self): + obs = get_table_cols("qiita_user", self.conn_handler) + exp = {"email", "user_level_id", "password", "name", "affiliation", + "address", "phone", "user_verify_code", "pass_reset_code", + "pass_reset_timestamp"} + self.assertEqual(set(obs), exp) + + def test_exists_table(self): + """Correctly checks if a table exists""" + # True cases + self.assertTrue(exists_table("filepath", self.conn_handler)) + self.assertTrue(exists_table("qiita_user", self.conn_handler)) + self.assertTrue(exists_table("analysis", self.conn_handler)) + self.assertTrue(exists_table("prep_1", self.conn_handler)) + self.assertTrue(exists_table("sample_1", self.conn_handler)) + # False cases + self.assertFalse(exists_table("sample_2", self.conn_handler)) + self.assertFalse(exists_table("prep_2", self.conn_handler)) + self.assertFalse(exists_table("foo_table", self.conn_handler)) + self.assertFalse(exists_table("bar_table", self.conn_handler)) + + def test_exists_dynamic_table(self): + """Correctly checks if a dynamic table exists""" + # True cases + self.assertTrue(exists_dynamic_table( + "preprocessed_sequence_illumina_params", "preprocessed_", + "_params", self.conn_handler)) + self.assertTrue(exists_dynamic_table("prep_1", "prep_", "", + self.conn_handler)) + self.assertTrue(exists_dynamic_table("filepath", "", "", + self.conn_handler)) + # False cases + self.assertFalse(exists_dynamic_table( + "preprocessed_foo_params", "preprocessed_", "_params", + self.conn_handler)) + self.assertFalse(exists_dynamic_table( + "preprocessed__params", "preprocessed_", "_params", + self.conn_handler)) + self.assertFalse(exists_dynamic_table( + "foo_params", "preprocessed_", "_params", + self.conn_handler)) + self.assertFalse(exists_dynamic_table( + "preprocessed_foo", "preprocessed_", "_params", + self.conn_handler)) + self.assertFalse(exists_dynamic_table( + "foo", "preprocessed_", "_params", + self.conn_handler)) + + def test_convert_to_id(self): + """Tests that ids are returned correctly""" + self.assertEqual(convert_to_id("directory", "filepath_type"), 7) + + def test_convert_to_id_bad_value(self): + """Tests that ids are returned correctly""" + with self.assertRaises(IncompetentQiitaDeveloperError): + convert_to_id("FAKE", "filepath_type") + + def test_get_filetypes(self): + """Tests that get_filetypes works with valid arguments""" + + obs = get_filetypes() + exp = {'FASTA': 1, 'FASTQ': 2, 'SPECTRA': 3} + self.assertEqual(obs, exp) + + obs = get_filetypes(key='filetype_id') + exp = {v: k for k, v in exp.items()} + self.assertEqual(obs, exp) + + def test_get_filetypes_fail(self): + """Tests that get_Filetypes fails with invalid argument""" + with self.assertRaises(QiitaDBColumnError): + get_filetypes(key='invalid') + + def test_get_filepath_types(self): + """Tests that get_filepath_types works with valid arguments""" + obs = get_filepath_types() + exp = {'raw_sequences': 1, 'raw_barcodes': 2, 'raw_spectra': 3, + 'preprocessed_sequences': 4, 'preprocessed_sequences_qual': 5, + 'biom': 6, 'directory': 7, 'plain_text': 8} + self.assertEqual(obs, exp) + + obs = get_filepath_types(key='filepath_type_id') + exp = {v: k for k, v in exp.items()} + self.assertEqual(obs, exp) + + def test_get_filepath_types_fail(self): + """Tests that get_Filetypes fails with invalid argument""" + with self.assertRaises(QiitaDBColumnError): + get_filepath_types(key='invalid') + + def test_get_count(self): + """Checks that get_count retrieves proper count""" + self.assertEqual(get_count('qiita.study_person'), 3) + + def test_check_count(self): + """Checks that check_count returns True and False appropriately""" + self.assertTrue(check_count('qiita.study_person', 3)) + self.assertFalse(check_count('qiita.study_person', 2)) + + def test_get_processed_params_tables(self): + obs = get_processed_params_tables() + self.assertEqual(obs, ['processed_params_uclust']) + + +class UtilTests(TestCase): + """Tests for the util functions that do not need to access the DB""" + + def setUp(self): + fh, self.filepath = mkstemp() + close(fh) + with open(self.filepath, "w") as f: + f.write("Some text so we can actually compute a checksum") + + def test_compute_checksum(self): + """Correctly returns the file checksum""" + obs = compute_checksum(self.filepath) + exp = 1719580229 + self.assertEqual(obs, exp) + + def test_scrub_data_nothing(self): + """Returns the same string without changes""" + self.assertEqual(scrub_data("nothing_changes"), "nothing_changes") + + def test_scrub_data_semicolon(self): + """Correctly removes the semicolon from the string""" + self.assertEqual(scrub_data("remove_;_char"), "remove__char") + + def test_scrub_data_single_quote(self): + """Correctly removes single quotes from the string""" + self.assertEqual(scrub_data("'quotes'"), "quotes") + +if __name__ == '__main__': + main() diff --git a/qiita_db/user.py b/qiita_db/user.py new file mode 100644 index 000000000..6aa06c390 --- /dev/null +++ b/qiita_db/user.py @@ -0,0 +1,377 @@ +r""" +User object (:mod:`qiita_db.user`) +================================== + +.. currentmodule:: qiita_db.user + +This modules provides the implementation of the User class. This is used for +handling creation, deletion, and login of users, as well as retrieval of all +studies and analyses that are owned by or shared with the user. + +Classes +------- + +.. autosummary:: + :toctree: generated/ + + User + +Examples +-------- +TODO +""" +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- +from __future__ import division +from re import match + +from qiita_core.exceptions import (IncorrectEmailError, IncorrectPasswordError, + IncompetentQiitaDeveloperError) +from .base import QiitaObject +from .sql_connection import SQLConnectionHandler +from .util import create_rand_string, check_table_cols, hash_password +from .exceptions import (QiitaDBColumnError, QiitaDBDuplicateError) + + +class User(QiitaObject): + """ + User object to access to the Qiita user information + + Attributes + ---------- + email + level + info + private_studies + shared_studies + private_analyses + shared_analyses + + Methods + ------- + add_shared_study + remove_shared_study + add_private_analysis + remove_private_analysis + add_shared_analysis + remove_shared_analysis + """ + + _table = "qiita_user" + # The following columns are considered not part of the user info + _non_info = {"email", "user_level_id", "password", "user_verify_code", + "pass_reset_code", "pass_reset_timestamp"} + + def _check_id(self, id_, conn_handler=None): + r"""Check that the provided ID actually exists in the database + + Parameters + ---------- + id_ : object + The ID to test + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + + Notes + ----- + This function overwrites the base function, as sql layout doesn't + follow the same conventions done in the other classes. + """ + self._check_subclass() + + conn_handler = (conn_handler if conn_handler is not None + else SQLConnectionHandler()) + return conn_handler.execute_fetchone( + "SELECT EXISTS(SELECT * FROM qiita.qiita_user WHERE " + "email = %s)", (id_, ))[0] + + @classmethod + def login(cls, email, password): + """Logs a user into the system + + Parameters + ---------- + email : str + The email of the user + password: str + The plaintext password of the user + + Returns + ------- + User object + Returns the User object corresponding to the login information + if correct login information + + Raises + ------ + IncorrectEmailError + Email passed is not a valid email + IncorrectPasswordError + Password passed is not correct for user + """ + # see if user exists + if not cls.exists(email): + raise IncorrectEmailError("Email not valid: %s" % email) + + if not validate_password(password): + raise IncorrectPasswordError("Password not valid!") + + # pull password out of database + conn_handler = SQLConnectionHandler() + sql = ("SELECT password FROM qiita.{0} WHERE " + "email = %s".format(cls._table)) + dbpass = conn_handler.execute_fetchone(sql, (email, ))[0] + + # verify password + hashed = hash_password(password, dbpass) + if hashed == dbpass: + return cls(email) + else: + raise IncorrectPasswordError("Password not valid!") + + @classmethod + def exists(cls, email): + """Checks if a user exists on the database + + Parameters + ---------- + email : str + the email of the user + """ + if not validate_email(email): + raise IncorrectEmailError("Email string not valid: %s" % email) + conn_handler = SQLConnectionHandler() + + return conn_handler.execute_fetchone( + "SELECT EXISTS(SELECT * FROM qiita.{0} WHERE " + "email = %s)".format(cls._table), (email, ))[0] + + @classmethod + def create(cls, email, password, info=None): + """Creates a new user on the database + + Parameters + ---------- + email : str + The email of the user - used for log in + password : + The plaintext password of the user + info: dict + Other information for the user keyed to table column name + + Raises + ------ + IncorrectPasswordError + Password string given is not proper format + IncorrectEmailError + Email string given is not a valid email + QiitaDBDuplicateError + User already exists + """ + # validate email and password for new user + if not validate_email(email): + raise IncorrectEmailError("Bad email given: %s" % email) + if not validate_password(password): + raise IncorrectPasswordError("Bad password given!") + + # make sure user does not already exist + if cls.exists(email): + raise QiitaDBDuplicateError("User", "email: %s" % email) + + # make sure non-info columns aren't passed in info dict + if info: + if cls._non_info.intersection(info): + raise QiitaDBColumnError("non info keys passed: %s" % + cls._non_info.intersection(info)) + else: + info = {} + + # create email verification code and hashed password to insert + # add values to info + info["email"] = email + info["password"] = hash_password(password) + info["user_verify_code"] = create_rand_string(20, punct=False) + + # make sure keys in info correspond to columns in table + conn_handler = SQLConnectionHandler() + check_table_cols(conn_handler, info, cls._table) + + # build info to insert making sure columns and data are in same order + # for sql insertion + columns = info.keys() + values = [info[col] for col in columns] + + sql = ("INSERT INTO qiita.%s (%s) VALUES (%s)" % + (cls._table, ','.join(columns), ','.join(['%s'] * len(values)))) + conn_handler.execute(sql, values) + return cls(email) + + @classmethod + def verify_code(cls, email, code, code_type): + """Verify that a code and email match + + Parameters + ---------- + email : str + email address of the user + code : str + code to verify + code_type : {'create' or 'reset'} + + Returns + ------- + bool + + Raises + ------ + IncompentQiitaDeveloper + code_type is not create or reset + """ + if code_type == 'create': + column = 'user_verify_code' + elif code_type == 'reset': + column = 'pass_reset_code' + else: + raise IncompetentQiitaDeveloperError("code_type must be 'create'" + " or 'reset' Uknown type " + "%s" % code_type) + sql = ("SELECT {1} from qiita.{0} where email" + " = %s".format(cls._table, column)) + conn_handler = SQLConnectionHandler() + db_code = conn_handler.execute_fetchone(sql, (email,))[0] + return db_code == code + + # ---properties--- + @property + def email(self): + """The email of the user""" + return self._id + + @property + def level(self): + """The level of privileges of the user""" + conn_handler = SQLConnectionHandler() + sql = ("SELECT name from qiita.user_level WHERE user_level_id = " + "(SELECT user_level_id from qiita.{0} WHERE " + "email = %s)".format(self._table)) + return conn_handler.execute_fetchone(sql, (self._id, ))[0] + + @property + def info(self): + """Dict with any other information attached to the user""" + conn_handler = SQLConnectionHandler() + sql = "SELECT * from qiita.{0} WHERE email = %s".format(self._table) + # Need direct typecast from psycopg2 dict to standard dict + info = dict(conn_handler.execute_fetchone(sql, (self._id, ))) + # Remove non-info columns + for col in self._non_info: + info.pop(col) + return info + + @info.setter + def info(self, info): + """Updates the information attached to the user + + Parameters + ---------- + info : dict + """ + # make sure non-info columns aren't passed in info dict + if self._non_info.intersection(info): + raise QiitaDBColumnError("non info keys passed!") + + # make sure keys in info correspond to columns in table + conn_handler = SQLConnectionHandler() + check_table_cols(conn_handler, info, self._table) + + # build sql command and data to update + sql_insert = [] + data = [] + # items used for py3 compatability + for key, val in info.items(): + sql_insert.append("{0} = %s".format(key)) + data.append(val) + data.append(self._id) + + sql = ("UPDATE qiita.{0} SET {1} WHERE " + "email = %s".format(self._table, ','.join(sql_insert))) + conn_handler.execute(sql, data) + + @property + def private_studies(self): + """Returns a list of private study ids owned by the user""" + sql = ("SELECT study_id FROM qiita.study WHERE " + "email = %s".format(self._table)) + conn_handler = SQLConnectionHandler() + study_ids = conn_handler.execute_fetchall(sql, (self._id, )) + return [s[0] for s in study_ids] + + @property + def shared_studies(self): + """Returns a list of study ids shared with the user""" + sql = ("SELECT study_id FROM qiita.study_users WHERE " + "email = %s".format(self._table)) + conn_handler = SQLConnectionHandler() + study_ids = conn_handler.execute_fetchall(sql, (self._id, )) + return [s[0] for s in study_ids] + + @property + def private_analyses(self): + """Returns a list of private analysis ids owned by the user""" + sql = ("Select analysis_id from qiita.analysis WHERE email = %s AND " + "analysis_status_id <> 6") + conn_handler = SQLConnectionHandler() + analysis_ids = conn_handler.execute_fetchall(sql, (self._id, )) + return [a[0] for a in analysis_ids] + + @property + def shared_analyses(self): + """Returns a list of analysis ids shared with the user""" + sql = ("SELECT analysis_id FROM qiita.analysis_users WHERE " + "email = %s".format(self._table)) + conn_handler = SQLConnectionHandler() + analysis_ids = conn_handler.execute_fetchall(sql, (self._id, )) + return [a[0] for a in analysis_ids] + + +def validate_email(email): + """Makes sure email string has one @ and a period after the @ + + Parameters + ---------- + email: str + email to validate + + Returns + ------- + bool + Whether or not the email is valid + """ + return True if match(r"[^@]+@[^@]+\.[^@]+", email) else False + + +def validate_password(password): + """Validates a password is only ascii letters, numbers, or characters and + at least 8 characters + + Parameters + ---------- + password: str + Password to validate + + Returns + ------- + bool + Whether or not the password is valid + + References + ----- + http://stackoverflow.com/questions/2990654/how-to-test-a-regex- + password-in-python + """ + return True if match(r'[A-Za-z0-9@#$%^&+=]{8,}', password) else False diff --git a/qiita_db/util.py b/qiita_db/util.py new file mode 100644 index 000000000..94b172cac --- /dev/null +++ b/qiita_db/util.py @@ -0,0 +1,512 @@ +r""" +Util functions (:mod: `qiita_db.util`) +====================================== + +..currentmodule:: qiita_db.util + +This module provides different util functions. + +Methods +------- + +..autosummary:: + :toctree: generated/ + + quote_data_value + scrub_data + exists_table + exists_dynamic_table + get_db_files_base_dir + compute_checksum + insert_filepaths + check_table_cols + check_required_columns + convert_to_id +""" +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- + +from __future__ import division +from future.builtins import zip +from random import choice +from string import ascii_letters, digits, punctuation +from binascii import crc32 +from bcrypt import hashpw, gensalt +from functools import partial +from os.path import join, basename, isdir +from os import walk +from shutil import move + +from qiita_core.exceptions import IncompetentQiitaDeveloperError +from .exceptions import QiitaDBColumnError +from .sql_connection import SQLConnectionHandler + + +def scrub_data(s): + r"""Scrubs data fields of characters not allowed by PostgreSQL + + disallowed characters: + ' ; + + Parameters + ---------- + s : str + The string to clean up + + Returns + ------- + str + The scrubbed string + """ + ret = s.replace("'", "") + ret = ret.replace(";", "") + return ret + + +def get_filetypes(key='type'): + """Gets the list of possible filetypes from the filetype table + + Parameters + ---------- + key : {'type', 'filetype_id'}, optional + Defaults to "type". Determines the format of the returned dict. + + Returns + ------- + dict + If `key` is "type", dict is of the form {type: filetype_id} + If `key` is "filetype_id", dict is of the form {filetype_id: type} + """ + con = SQLConnectionHandler() + if key == 'type': + cols = 'type, filetype_id' + elif key == 'filetype_id': + cols = 'filetype_id, type' + else: + raise QiitaDBColumnError("Unknown key. Pass either 'type' or " + "'filetype_id'.") + sql = 'select {} from qiita.filetype'.format(cols) + return dict(con.execute_fetchall(sql)) + + +def get_filepath_types(key='filepath_type'): + """Gets the list of possible filepath types from the filetype table + + Parameters + ---------- + key : {'filepath_type', 'filepath_type_id'}, optional + Defaults to "filepath_type". Determines the format of the returned + dict. + + Returns + ------- + dict + - If `key` is "filepath_type", dict is of the form + {filepath_type: filepath_type_id} + - If `key` is "filepath_type_id", dict is of the form + {filepath_type_id: filepath_type} + """ + con = SQLConnectionHandler() + if key == 'filepath_type': + cols = 'filepath_type, filepath_type_id' + elif key == 'filepath_type_id': + cols = 'filepath_type_id, filepath_type' + else: + raise QiitaDBColumnError("Unknown key. Pass either 'filepath_type' or " + "'filepath_type_id'.") + sql = 'select {} from qiita.filepath_type'.format(cols) + return dict(con.execute_fetchall(sql)) + + +def create_rand_string(length, punct=True): + """Returns a string of random ascii characters + + Parameters + ---------- + length: int + Length of string to return + punct: bool, optional + Include punctiation as well as letters and numbers. Default True. + """ + chars = ''.join((ascii_letters, digits)) + if punct: + chars = ''.join((chars, punctuation)) + return ''.join(choice(chars) for i in range(length)) + + +def hash_password(password, hashedpw=None): + """ Hashes password + + Parameters + ---------- + password: str + Plaintext password + hashedpw: str, optional + Previously hashed password for bcrypt to pull salt from. If not + given, salt generated before hash + + Returns + ------- + str + Hashed password + + Notes + ----- + Relies on bcrypt library to hash passwords, which stores the salt as + part of the hashed password. Don't need to actually store the salt + because of this. + """ + # all the encode/decode as a python 3 workaround for bcrypt + if hashedpw is None: + hashedpw = gensalt() + else: + hashedpw = hashedpw.encode('utf-8') + password = password.encode('utf-8') + output = hashpw(password, hashedpw) + if isinstance(output, bytes): + output = output.decode("utf-8") + return output + + +def check_required_columns(conn_handler, keys, table): + """Makes sure all required columns in database table are in keys + + Parameters + ---------- + conn_handler: SQLConnectionHandler object + Previously opened connection to the database + keys: iterable + Holds the keys in the dictionary + table: str + name of the table to check required columns + + Raises + ------ + QiitaDBColumnError + If keys exist that are not in the table + RuntimeError + Unable to get columns from database + """ + sql = ("SELECT is_nullable, column_name FROM information_schema.columns " + "WHERE table_name = %s") + cols = conn_handler.execute_fetchall(sql, (table, )) + # Test needed because a user with certain permissions can query without + # error but be unable to get the column names + if len(cols) == 0: + raise RuntimeError("Unable to fetch column names for table %s" % table) + required = set(x[1] for x in cols if x[0] == 'NO') + # remove the table id column as required + required.remove("%s_id" % table) + if len(required.difference(keys)) > 0: + raise QiitaDBColumnError("Required keys missing: %s" % + required.difference(keys)) + + +def check_table_cols(conn_handler, keys, table): + """Makes sure all keys correspond to column headers in a table + + Parameters + ---------- + conn_handler: SQLConnectionHandler object + Previously opened connection to the database + keys: iterable + Holds the keys in the dictionary + table: str + name of the table to check column names + + Raises + ------ + QiitaDBColumnError + If a key is found that is not in table columns + RuntimeError + Unable to get columns from database + """ + sql = ("SELECT column_name FROM information_schema.columns WHERE " + "table_name = %s") + cols = [x[0] for x in conn_handler.execute_fetchall(sql, (table, ))] + # Test needed because a user with certain permissions can query without + # error but be unable to get the column names + if len(cols) == 0: + raise RuntimeError("Unable to fetch column names for table %s" % table) + if len(set(keys).difference(cols)) > 0: + raise QiitaDBColumnError("Non-database keys found: %s" % + set(keys).difference(cols)) + + +def get_table_cols(table, conn_handler): + """Returns the column headers of table + + Parameters + ---------- + table : str + The table name + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + + Returns + ------- + list of str + The column headers of `table` + """ + headers = conn_handler.execute_fetchall( + "SELECT column_name FROM information_schema.columns WHERE " + "table_name=%s", (table, )) + return [h[0] for h in headers] + + +def exists_table(table, conn_handler): + r"""Checks if `table` exists on the database connected through + `conn_handler` + + Parameters + ---------- + table : str + The table name to check if exists + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + """ + return conn_handler.execute_fetchone( + "SELECT exists(SELECT * FROM information_schema.tables WHERE " + "table_name=%s)", (table,))[0] + + +def exists_dynamic_table(table, prefix, suffix, conn_handler): + r"""Checks if the dynamic`table` exists on the database connected through + `conn_handler`, and its name starts with prefix and ends with suffix + + Parameters + ---------- + table : str + The table name to check if exists + prefix : str + The table name prefix + suffix : str + The table name suffix + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + """ + return (table.startswith(prefix) and table.endswith(suffix) and + exists_table(table, conn_handler)) + + +def get_db_files_base_dir(conn_handler=None): + r"""Returns the path to the base directory of all db files + + Returns + ------- + str + The path to the base directory of all db files + """ + conn_handler = (conn_handler if conn_handler is not None + else SQLConnectionHandler()) + return conn_handler.execute_fetchone( + "SELECT base_data_dir FROM settings")[0] + + +def get_work_base_dir(conn_handler=None): + r"""Returns the path to the base directory of all db files + + Returns + ------- + str + The path to the base directory of all db files + """ + conn_handler = (conn_handler if conn_handler is not None + else SQLConnectionHandler()) + return conn_handler.execute_fetchone( + "SELECT base_work_dir FROM settings")[0] + + +def compute_checksum(path): + r"""Returns the checksum of the file pointed by path + + Parameters + ---------- + path : str + The path to compute the checksum + + Returns + ------- + int + The file checksum + """ + crc = 0 + filepaths = [] + if isdir(path): + for name, dirs, files in walk(path): + join_f = partial(join, name) + filepaths.extend(list(map(join_f, files))) + else: + filepaths.append(path) + + for fp in filepaths: + with open(fp, "Ub") as f: + # Go line by line so we don't need to load the entire file + for line in f: + if crc is None: + crc = crc32(line) + else: + crc = crc32(line, crc) + # We need the & 0xffffffff in order to get the same numeric value across + # all python versions and platforms + return crc & 0xffffffff + + +def insert_filepaths(filepaths, obj_id, table, filepath_table, conn_handler, + move_files=True): + r"""Inserts `filepaths` in the DB connected with `conn_handler`. Since + the files live outside the database, the directory in which the files + lives is controlled by the database, so it copies the filepaths from + its original location to the controlled directory. + + Parameters + ---------- + filepaths : iterable of tuples (str, int) + The list of paths to the raw files and its filepath type identifier + obj_id : int + Id of the object calling the functions + table : str + Table that holds the file data + filepath_table : str + Table that holds the filepath information + conn_handler : SQLConnectionHandler + The connection handler object connected to the DB + move_files : bool, optional + Whether or not to copy from the given filepaths to the db filepaths + default: True + + Returns + ------- + list + The filepath_id in the database for each added filepath + """ + new_filepaths = filepaths + if move_files: + # Get the base directory in which the type of data is stored + base_data_dir = join(get_db_files_base_dir(), table) + # Generate the new fileapths. Format: DataId_OriginalName + # Keeping the original name is useful for checking if the RawData + # alrady exists on the DB + db_path = partial(join, base_data_dir) + new_filepaths = [ + (db_path("%s_%s" % (obj_id, basename(path))), id) + for path, id in filepaths] + # Move the original files to the controlled DB directory + for old_fp, new_fp in zip(filepaths, new_filepaths): + move(old_fp[0], new_fp[0]) + + paths_w_checksum = [(path, id, compute_checksum(path)) + for path, id in new_filepaths] + + # Create the list of SQL values to add + values = ["('%s', %s, '%s', %s)" % (scrub_data(path), id, checksum, 1) + for path, id, checksum in paths_w_checksum] + # Insert all the filepaths at once and get the filepath_id back + ids = conn_handler.execute_fetchall( + "INSERT INTO qiita.{0} (filepath, filepath_type_id, checksum, " + "checksum_algorithm_id) VALUES {1} " + "RETURNING filepath_id".format(filepath_table, + ', '.join(values))) + + # we will receive a list of lists with a single element on it (the id), + # transform it to a list of ids + return [id[0] for id in ids] + + +def convert_to_id(value, table, conn_handler=None): + """Converts a string value to it's corresponding table identifier + + Parameters + ---------- + value : str + The string value to convert + table : str + The table that has the conversion + conn_handler : SQLConnectionHandler, optional + The sql connection object + + Returns + ------- + int + The id correspinding to the string + + Raises + ------ + IncompetentQiitaDeveloperError + The passed string has no associated id + """ + conn_handler = conn_handler if conn_handler else SQLConnectionHandler() + _id = conn_handler.execute_fetchone( + "SELECT {0}_id FROM qiita.{0} WHERE {0} = %s".format(table), + (value, )) + if _id is None: + raise IncompetentQiitaDeveloperError("%s not valid for table %s" + % (value, table)) + return _id[0] + + +def get_count(table): + """Counts the number of rows in a table + + Parameters + ---------- + table : str + The name of the table of which to count the rows + + Returns + ------- + int + """ + conn = SQLConnectionHandler() + sql = "SELECT count(1) FROM %s" % table + return conn.execute_fetchone(sql)[0] + + +def check_count(table, exp_count): + """Checks that the number of rows in a table equals the expected count + + Parameters + ---------- + table : str + The name of the table of which to count the rows + exp_count : int + The expected number of rows in the table + + Returns + ------- + bool + """ + obs_count = get_count(table) + return obs_count == exp_count + + +def get_preprocessed_params_tables(): + """returns a list of preprocessed parmaeter tables + + Returns + ------- + list or str + """ + sql = ("SELECT * FROM information_schema.tables WHERE table_schema = " + "'qiita' AND SUBSTR(table_name, 1, 13) = 'preprocessed_'") + conn = SQLConnectionHandler() + return [x[2] for x in conn.execute_fetchall(sql)] + + +def get_processed_params_tables(): + """Returns a list of all tables starting with "processed_params_" + + Returns + ------- + list of str + """ + sql = ("SELECT * FROM information_schema.tables WHERE table_schema = " + "'qiita' AND SUBSTR(table_name, 1, 17) = 'processed_params_'") + + conn = SQLConnectionHandler() + return [x[2] for x in conn.execute_fetchall(sql)] diff --git a/qiita_pet/__init__.py b/qiita_pet/__init__.py new file mode 100644 index 000000000..22f1e9179 --- /dev/null +++ b/qiita_pet/__init__.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- diff --git a/qiita_pet/handlers/__init__.py b/qiita_pet/handlers/__init__.py new file mode 100644 index 000000000..22f1e9179 --- /dev/null +++ b/qiita_pet/handlers/__init__.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +from __future__ import division + +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- diff --git a/qiita_pet/handlers/analysis_handlers.py b/qiita_pet/handlers/analysis_handlers.py new file mode 100644 index 000000000..7f450ec4c --- /dev/null +++ b/qiita_pet/handlers/analysis_handlers.py @@ -0,0 +1,176 @@ +r""" +Qitta analysis handlers for the Tornado webserver. + +""" +# ----------------------------------------------------------------------------- +# Copyright (c) 2014--, The Qiita Development Team. +# +# Distributed under the terms of the BSD 3-clause License. +# +# The full license is in the file LICENSE, distributed with this software. +# ----------------------------------------------------------------------------- +from __future__ import division +from tempfile import mkstemp +from os import close +from os.path import join + +from tornado.web import authenticated, asynchronous +from collections import defaultdict + +from qiita_pet.handlers.base_handlers import BaseHandler +from qiita_ware.run import run_analysis +from qiita_db.user import User +from qiita_db.analysis import Analysis +from qiita_db.study import Study +from qiita_db.data import ProcessedData +from qiita_db.metadata_template import SampleTemplate +from qiita_db.job import Job +from qiita_db.util import get_db_files_base_dir +# login code modified from https://gist.github.com/guillaumevincent/4771570 + + +class CreateAnalysisHandler(BaseHandler): + """Analysis creation""" + @authenticated + def get(self): + self.render('create_analysis.html', user=self.get_current_user()) + + +class SelectStudiesHandler(BaseHandler): + """Study selection""" + @authenticated + def post(self): + name = self.get_argument('name') + description = self.get_argument('description') + user = self.get_current_user() + # create list of studies + study_ids = {s.id for s in Study.get_public()} + userobj = User(user) + [study_ids.add(x) for x in userobj.private_studies] + [study_ids.add(x) for x in userobj.shared_studies] + + studies = [Study(i) for i in study_ids] + analysis = Analysis.create(User(user), name, description) + + self.render('select_studies.html', user=user, aid=analysis.id, + studies=studies) + + +class SelectCommandsHandler(BaseHandler): + """Select commands to be executed""" + @authenticated + def post(self): + analysis_id = self.get_argument('analysis-id') + study_args = self.get_arguments('studies') + split = [x.split("#") for x in study_args] + + # build dictionary of studies and datatypes selected + # as well a set of unique datatypes selected + study_dts = defaultdict(list) + data_types = set() + for study_id, data_type in split: + study_dts[study_id].append(data_type) + data_types.add(data_type) + + # sort the elements to have 16S be the first tho show on the tabs + data_types = sorted(list(data_types)) + + # FIXME: Pull out from the database, see #111 + commands = {'16S': ['Beta Diversity', 'Summarize Taxa'], + '18S': ['Beta Diversity', 'Summarize Taxa'], + 'Metabolomic': ['Beta Diversity'], + 'Metagenomic': ['Beta Diversity']} + + self.render('select_commands.html', user=self.get_current_user(), + commands=commands, data_types=data_types, aid=analysis_id) + + analysis = Analysis(analysis_id) + + for study_id in study_dts: + study = Study(study_id) + processed_data = {ProcessedData(pid).data_type: pid for pid in + study.processed_data} + + sample_ids = SampleTemplate(study.id).keys() + for data_type in study_dts[study.id]: + samples = [(processed_data[data_type], sid) for sid in + sample_ids] + analysis.add_samples(samples) + + +class AnalysisWaitHandler(BaseHandler): + @authenticated + def get(self, analysis_id): + analysis = Analysis(analysis_id) + commands = [] + for job in analysis.jobs: + jobject = Job(job) + commands.append("%s:%s" % (jobject.datatype, jobject.command[0])) + + self.render("analysis_waiting.html", user=self.get_current_user(), + aid=analysis_id, aname=analysis.name, + commands=commands) + + @authenticated + @asynchronous + def post(self, analysis_id): + command_args = self.get_arguments("commands") + split = [x.split("#") for x in command_args] + analysis = Analysis(analysis_id) + + commands = [] + # HARD CODED HACKY THING FOR DEMO, FIX Issue #164 + fp, mapping_file = mkstemp(suffix="_map_file.txt") + close(fp) + SampleTemplate(1).to_file(mapping_file) + study_fps = {} + for pd in Study(1).processed_data: + processed = ProcessedData(pd) + study_fps[processed.data_type] = processed.get_filepaths()[0][0] + for data_type, command in split: + opts = { + "--otu_table_fp": study_fps[data_type], + "--mapping_fp": mapping_file + } + if command == "Beta Diversity" and data_type in {'16S', '18S'}: + opts["--tree_fp"] = join(get_db_files_base_dir(), "reference", + "gg_97_otus_4feb2011.tre") + elif command == "Beta Diversity": + opts["--parameter_fp"] = join(get_db_files_base_dir(), + "reference", "params_qiime.txt") + Job.create(data_type, command, opts, analysis) + commands.append("%s: %s" % (data_type, command)) + user = self.get_current_user() + self.render("analysis_waiting.html", user=user, + aid=analysis_id, aname=analysis.name, + commands=commands) + # fire off analysis run here + # currently synch run so redirect done here. Will remove after demo + run_analysis(user, analysis) + + +class AnalysisResultsHandler(BaseHandler): + @authenticated + def get(self, aid): + analysis = Analysis(aid) + jobres = defaultdict(list) + for job in analysis.jobs: + jobject = Job(job) + jobres[jobject.datatype].append((jobject.command[0], + jobject.results)) + + self.render("analysis_results.html", user=self.get_current_user(), + jobres=jobres, aname=analysis.name, + basefolder=get_db_files_base_dir()) + + +class ShowAnalysesHandler(BaseHandler): + """Shows the user's analyses""" + def get(self): + user_id = self.get_current_user() + user = User(user_id) + + analyses = [Analysis(a) for a in + user.shared_analyses + user.private_analyses] + + self.render("show_analyses.html", user=user_id, analyses=analyses) diff --git a/qiita_pet/handlers/auth_handlers.py b/qiita_pet/handlers/auth_handlers.py new file mode 100644 index 000000000..b3ab7bc43 --- /dev/null +++ b/qiita_pet/handlers/auth_handlers.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python + +from tornado.escape import url_escape, json_encode + +from qiita_pet.handlers.base_handlers import BaseHandler +from qiita_core.util import send_email +from qiita_core.exceptions import IncorrectPasswordError, IncorrectEmailError +from qiita_db.user import User +from qiita_db.exceptions import QiitaDBUnknownIDError +# login code modified from https://gist.github.com/guillaumevincent/4771570 + + +class AuthCreateHandler(BaseHandler): + """User Creation""" + def get(self): + try: + error_message = self.get_argument("error") + # Tornado can raise an Exception directly, not a defined type + except: + error_message = "" + self.render("create_user.html", user=self.get_current_user(), + error=error_message) + + def post(self): + username = self.get_argument("username", "") + password = self.get_argument("pass", "") + info = {} + for info_column in ("name", "affiliation", "address", "phone"): + hold = self.get_argument(info_column, None) + if hold: + info[info_column] = hold + + created = User.create(username, password, info) + + if created: + send_email(username, "FORGE: Verify Email Address", "Please click " + "the following link to verify email address: " + "http://forge-dev.colorado.edu/auth/verify/%s" % msg) + self.redirect(u"/") + else: + error_msg = u"?error=" + url_escape(msg) + self.redirect(u"/auth/create/" + error_msg) + + +class AuthVerifyHandler(BaseHandler): + def get(self): + email = self.get_argument("email") + code = self.get_argument("code") + try: + User(email).level = 3 + msg = "Successfully verified user!" + except QiitaDBUnknownIDError: + msg = "Code not valid!" + + self.render("user_verified.html", user=None, error=msg) + + +class AuthLoginHandler(BaseHandler): + """user login, no page necessary""" + def post(self): + username = self.get_argument("username", "") + passwd = self.get_argument("password", "") + # check the user level + try: + if User(username).level == 4: # 4 is id for unverified + # email not verified so dont log in + msg = "Email not verified" + except QiitaDBUnknownIDError: + msg = "Unknown user" + + # Check the login information + login = None + try: + login = User.login(username, passwd) + except IncorrectEmailError: + msg = "Unknown user" + except IncorrectPasswordError: + msg = "Incorrect password" + + if login: + # everthing good so log in + self.set_current_user(username) + self.redirect("/") + return + self.render("index.html", user=None, loginerror=msg) + + def set_current_user(self, user): + if user: + self.set_secure_cookie("user", json_encode(user)) + else: + self.clear_cookie("user") + + +class AuthLogoutHandler(BaseHandler): + """Logout handler, no page necessary""" + def get(self): + self.clear_cookie("user") + self.redirect("/") diff --git a/qiita_pet/handlers/base_handlers.py b/qiita_pet/handlers/base_handlers.py new file mode 100644 index 000000000..4795a502b --- /dev/null +++ b/qiita_pet/handlers/base_handlers.py @@ -0,0 +1,46 @@ +from tornado.web import RequestHandler + + +class BaseHandler(RequestHandler): + def get_current_user(self): + '''Overrides default method of returning user curently connected''' + user = self.get_secure_cookie("user") + if user is None: + self.clear_cookie("user") + return '' + else: + return user.strip('" ') + + def write_error(self, status_code, **kwargs): + '''Overrides the error page created by Tornado''' + from traceback import format_exception + if self.settings.get("debug") and "exc_info" in kwargs: + exc_info = kwargs["exc_info"] + trace_info = ''.join(["%s
" % line for line in + format_exception(*exc_info)]) + request_info = ''.join(["%s: %s
" % + (k, self.request.__dict__[k]) for k in + self.request.__dict__.keys()]) + error = exc_info[1] + + self.render('error.html', error=error, trace_info=trace_info, + request_info=request_info, + user=self.get_current_user()) + + +class MainHandler(BaseHandler): + '''Index page''' + def get(self): + username = self.get_current_user() + completedanalyses = [] + self.render("index.html", user=username, analyses=completedanalyses) + + +class MockupHandler(BaseHandler): + def get(self): + self.render("mockup.html", user=self.get_current_user()) + + +class NoPageHandler(BaseHandler): + def get(self): + self.render("404.html", user=self.get_current_user()) \ No newline at end of file diff --git a/qiita_pet/handlers/websocket_handlers.py b/qiita_pet/handlers/websocket_handlers.py new file mode 100644 index 000000000..57962884b --- /dev/null +++ b/qiita_pet/handlers/websocket_handlers.py @@ -0,0 +1,63 @@ +# adapted from +# https://github.com/leporo/tornado-redis/blob/master/demos/websockets +from json import loads + +from tornadoredis import Client +from tornado.websocket import WebSocketHandler +from tornado.gen import engine, Task + +from qiita_ware.run import r_server + +# all messages are in json format. They must have the following format: +# 'analysis': analysis_id +# 'msg': message to print +# 'command': what command this is from in format datatype#command + + +class MessageHandler(WebSocketHandler): + def __init__(self, *args, **kwargs): + super(MessageHandler, self).__init__(*args, **kwargs) + self.redis = Client() + self.redis.connect() + + def get_current_user(self): + user = self.get_secure_cookie("user") + if user is None: + return '' + else: + return user.strip('" ') + + def on_message(self, msg): + msginfo = loads(msg) + # listens for handshake from page + if "user:" in msginfo['msg']: + self.channel = msginfo['msg'].split(':')[1] + # need to split the rest off to new func so it can be asynchronous + self.listen() + + # decorator turns the function into an asynchronous generator object + @engine + def listen(self): + # runs task given, with the yield required to get returned value + # equivalent of callback/wait pairing from tornado.gen + yield Task(self.redis.subscribe, self.channel) + if not self.redis.subscribed: + self.write_message('ERROR IN SUBSCRIPTION') + # listen from tornadoredis makes the listen object asynchronous + # if using standard redis lib, it blocks while listening + self.redis.listen(self.callback) + # fight race condition by loading from redis after listen started + # need to use std redis lib because tornadoredis is already subscribed + oldmessages = r_server.lrange(self.channel + ':messages', 0, -1) + if oldmessages is not None: + for message in oldmessages: + self.write_message(message) + + def callback(self, msg): + if msg.kind == 'message': + self.write_message(str(msg.body)) + + @engine + def on_close(self): + yield Task(self.redis.unsubscribe, self.channel) + self.redis.disconnect() diff --git a/qiita_pet/results/admin/jobname/placeholder.html b/qiita_pet/results/admin/jobname/placeholder.html new file mode 100644 index 000000000..4fc59e420 --- /dev/null +++ b/qiita_pet/results/admin/jobname/placeholder.html @@ -0,0 +1 @@ +THIS SHOULD SHOW UP! \ No newline at end of file diff --git a/qiita_pet/static/css/style.css b/qiita_pet/static/css/style.css new file mode 100644 index 000000000..797bcaf77 --- /dev/null +++ b/qiita_pet/static/css/style.css @@ -0,0 +1,5 @@ +#template-content{ + padding: 20px; + height: 100%; +} + diff --git a/qiita_pet/static/img/favicon.ico b/qiita_pet/static/img/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..49d9ea31f6d7fc2acd1150a43dfb3e0d3d0e2e1d GIT binary patch literal 32988 zcmeHP34B!5)qly%WHL!6$t0P1Zzc;95+Nk)VaFvxm^B0fhJDc%QIRE}R@5R;s1-_O zt0)K*>d)4twW8Lg{_ran<*VP%53JTtr4uKf7KDX}lcY%4A_!Z?3qm&H!}LZ$ zcz&uNlpznugcU%J*Fl$nE&*Kvx&(9y=n~K+pi4lPK*A+ZNv4GJ{7y5ZBr(u5R0_&B z+I^wfwra{!s+;4dOreUPjsFM33QCr|@@}c7h0y;05`*%+qEEih<@vhMT}7!54~=m6 zL*t!6y5!rCLV=fZBtKnY56Igce)$!Lr}?PE+w`u(8`_7u-e&hUmL{TJuCbIS`N*B` zrvlM0FUa$T{@W1@JNy^H6C!qYjP zW=gg48Zp?oAa~SVk~|}w#r|T!BU{KnUV|^GNzYMO3p|!JA#xd zO$cL3?xkc%FMUTyHm*Vbi*azV@-Gq`^R1>NI_m7HN3Ay znCAl-(z;=6Tuz3J9v>k*J*((Cn-pkyoYyz(IZHM9E?7|XP`c=Ez7^};b&*1GGbbEQ zPO-O*Z-gn?>>yKCUyi#uLMI5UFNJjkO5!-z*bmdh01ZVy&WAkjLE9eAx%Blg3Oz6Up1g?*e*wzk}RY@jl~>>~$y}w~@lD$z}=lw39h+ zB<64THaR34vhJ-XO5wcr0I{4}0qe2oZLY=`xKXN+mveuDW**$d7!$k1TDh?R88ge* zk&u*(IkOk@Cq(94&tJeGPhoRGT=W^{++wLF^a=0t*k5W?Q7jrlyic#+7oMM1K$J|Ox+Q_$bjB!9@yI)sW(J1^@l_UezHNHUt=%;<3oIjTR# zSZJMFAbc0tw_S$w6mSNpTXfL!nxHC*_iwV~PYRDy_A6-09zeMx_y5K< zo_1keKZtd13;K2=@4r~L_J}p|Hb==r$aeuurZ67(y%9NW-6Ker9s-L{r0 z{Na8K46nIx&kL|_7jAnXG|B00_Ca?W7S{24`R;G;(b%9fu80MO=h;GI72u+&6lzt^K4boRGMwF4SEp4 zc4b7u@5Ai|Ogylj?`vM}sG)w`Bv&P+v5u!IC?_R*Pp2xmkgt?#7RzF_z&8~6# zr8=6;>jm1r8SD1LVj#31eZCiRyjAj@a#vzrVJ;oO7zxV+bjuX}(CM~1eDd;0I2Z15 zU|Sg~|3Ub*dkCV=4W!Yh%O!7U(l=Zvxz&$-4|}e?`i<50s=s1f4hz@Gm{Cd=G{k5v zSL(?7m>|}XgLQL0AMsdF_&NMEUV?tkZO3`3k=K6gE5(BUu-Wc!c^qxurO-y)2Dl1u zt$V+Q_MC&xF*Jx}pk;)5mq4#?$n3M>bwjGNT|EqGMU-CJKv{h^k!|R+_d`xsID9l5 zYserw&mXV6Nc6V+&hBsilG`nMLnqMB>zOB)lj}KM<_OAvjnZ~b=6&WpV7oF^;aTt} z3<|uT=kVI6W{A08AYfipte^~&Z3ye@QR`yN=ppA0e_=DuLJy&RuVUUG6MfB};XL*! z=EET%&di}&%yqNEA3ES_%#Rb8^S3B*QQRA1m5KO&0M+v|O7>1hK|gU;pYEl;%fMc2 z;W~#oJPehrq~SH&;72H}+njcDAL)ur+)8Pz<3-W)ge>fdSQ~G41VcxiHIR$HX_%5H zhReDxud7NKTKAq9`MMO1c21RiO@n#9x~f|DXotVqgT3nt6{CZEma|tK!ZphC%9$@qLZM+wv6FyOUBdbQt^HTrF(};oV5nwp$4|AuZ;a zsRN#^NoX&h;dt$}SA8jo)s5Y8e=-JgNP(XJ8}wlj6N;m6ZjES z+8fQ?)SkS95XL#Lxx5ym%?+sQM%3%A{Lz#v`J3kF)sofjYhKEGm)#@(x?nh2Ksy~{ z^^WwC+m58cH`1V-|ew36I7pTRl(DC=KJ%i+5zD@QT3a=sId&5BJ z<;uBH@AwzSsl!e+-vs+Fi&OR+NN>}%v}Hrr-h^>I%~efB>ic`>rK7OUk7RnBOO`?w z`?ISkTbn-;=Dn(;{{NpC`4SP$>+q=#%5OxP8yFS1O<0fXu^z9o*HZ?sYkZ!MoX36$ zJ!Bfz!JA@@kqA@PUETx6lEIfqlOnW;WLSHw&_$3HPP@mb+}heoM8V`amv0j z!R5tb_hJY2;PW|;)$;+r1JjPi-qq)$&?r3P>0{%fuJXRA4 zd}ewLsI*n<{u60Ai7c>_yq<_rrxwHy=_HTNkhwtg6;Ru5CCu~j+W;F1{Xd{m54F2L zffeF4Rn1%Lzb1iYb(IyL9X^UtKRz=g0yK8CzXTea@XNHd)S??h*HgW-!nZ9QK{9H(^^8jwND zO9}IxQ1~9bt4z-`J)aZVhC!*P;_@8}?*BRs^FgWP)w;)!cCPcP(-k>OLLc(I0@T+1 ze#CX8tMozoucG8r_#8l5LTuW==X?X(M$Q2q0xI=aT#nlgydB8jK-L3a0IF@J6k2va z2d)5~%Uvq6e=ERuQ#|%}1EZ~x>UL`Vhmq$_U@`DN0n*S29{;SfYydt2+z;gO@V~%k zfqd860K6E;>xmA!1at}L63``}OF)-^E&*KvU64Q-Xx+dnp#DDr5P}Z>h7zD;wv%OB z;nvE-jwk_d@7N}W?SA441a^S11-eceE6>BX^w}<7V^7#emF3T(+$tht@eS0i_ia=d zmk$WS-{WiKAHc?H4{W1b6zo6 zy8Q_Z?g1_ZUIL8Ru)-Gs*k{g=0--;{R&kv((E47DWzc5prx3<)VMj9*Z&8-y4tsP$ zTG5q%HdtJzGAlMxM$eV7hg(7!7cGNL=*oYZ#R(@;a)!Td%B;9vVHLKw@`j#f*(q^d zaoIMxU<%ctoprD|{y<1F!uAetFp`bSyG^G$q|NO|2phdNZXu>nO>b zvkz|$e(9P>ESD|=WY*W=k?+p0qfy9QkZj8Qg5NfovMVViYuHmGcd$JHIoOs;@}916 zd8iEUg9aB&mN#eh+xdyLY!#&yH}JXs@~#%nHVd%R`Y~)+zQ=i!EX7ZzV0`C=ZPO!m z%wpC4Vd?I9Ee1Mc z#_nu8>I^hr%I_WVeqb5w09PWfb}hILtP~lr+cqz_3=CF-_MF2AoG3d0>-`C{3OH0s&F!!BOhrON5ceg4J=k3b(5N#1Wcb`-7 zo2w3JMO*zh|5FpM?hb!r3C;{x;XgH& z@Eg8Z^sJlZ<2{J$3c5-fN98#q4=lF0mpoud?}_iY#wCI=i_b@G;I2B7>hQ|9LxjJ-7~+Qg4XiYHyUjN-!$iqhOWJ)F}>uL(-}Qiy<~RJJ(!X+^mW0I zGOd8mT0waepT#w7q6YE*ESD6_q9J&vJTu9d{)Hjcg>$2MZOOIdD7}eF@WxL<`|Gj> zKK7BNU^-d*?mTL*erZ5dX6i_O*B+!=sfKP6vWjf@?}wvFhIBlbtN1(4CX-xaTWl$@@2d4~+?~>QhC^yF_c*Pi`<}_6?tTQ|;ad@h)>> zSnm(WKjXI>(4Vj3GiqAi1lIMB0eR2sP{-o8Ph5EARx+i!rff^WZ3e&Rx8zS>hj&0e zjE7n1y9V^#T-0Nx>V3q`5&m4pH0GfOM=gnXBNoKllTi*&Xnf&J>YZNPuq&(YcCz;R zDP{J5WDj`H2k*IfV?KlRZgqb6`TW!fWd3gfd0z`d_?(R~F8XK+*Ne~QMbpTZY_zQg z0n<~mDR*P|?KbXhT60DHeOaN0J3^u0vt0h@FLLpQ^%A^|sNwP=X@#CwWZ&5co0hN#G(N-wP;u{omjp2Rv8cwoe7#3ETzT1mrq*xb`4jhbR(YJt-UL z2L9XVN&ME=bE;T%mx9_y2i%9dH2?ho%PZV}<1K5^bQ&pn|1rc`w(eF__V5N{stuejFXBPLOz3Ju4eqrskW|!GH;ZSDz`UM5k zXafHS$2XZ;ef(c41y_(iqx+(XNydy8Lq_=l&<3C@%q*Tk{c?uyTW=O?c%IJH=H)Q| zM_fJLu&0W?hxk8w(u(IV!#mG~&`WmW?(i1Rvy=|94M65`IWPkF9}FM$O5VmcKlca(g76L@dteN?#vMbs1^m$3~P4V1I^8T)aBI0Y{6rF;(qB~2YQ6oDhaJAj2iCvYo}%Z|3zBKi#Ay^`0;mB7uw z9l&8g9drrk63``}OF)-^E&*Kvx&(9y=n~K+pi4lPfGz=D0=fir3Fs2gC7??{mw+w- hT>`oUbP4DZ&?TTtK$n0n0bK&R1at}L5;$uK{6DcpSCaq$ literal 0 HcmV?d00001 diff --git a/qiita_pet/static/img/logo-clear.png b/qiita_pet/static/img/logo-clear.png new file mode 100644 index 0000000000000000000000000000000000000000..e0557e557d50bc3655a5ea65605f34702981184c GIT binary patch literal 13936 zcmV-$Hjl}PP)4Tx07wm;mUmPX*B8g%%xo{TU6vwc>AklFq%OTkl_mFQv@x1^BM1TV}0C2duqR=S6Xn?LjUp6xrb&~O43j*Nv zEr418u3H3zGns$s|L;SQD-ufpfWpxLJ03rmi*g~#S@{x?OrJ!Vo{}kJ7$ajbnjp%m zGEV!%=70KpVow?KvV}a4moSaFCQKV= zXBIPnpP$8-NG!rR+)R#`$7JVZi#Wn10DSspSrkx`)s~4C+0n+?(b2-z5-tDd^^cpM zz5W?wz5V3zGUCskL5!X++LzcbT23thtSPiMTfS&1I{|204}j|3FPi>70OSh+Xzlyz zdl<5LNtZ}OE>>3g`T3RtKG#xK(9i3CI(+v0d-&=+OWAp!Ysd8Ar*foO5~i%E+?=c& zshF87;&Ay)i~kOm zCIB-Z!^JGdti+UJsxgN!t(Y#%b<8kk67vyD#cE*9urAm@Y#cTXn~yERR$}Y1E!Yd# zo7hq8Ya9;8z!~A3Z~?e@Tn26#t`xT$*Ni)h>&K1Yrto;Y8r}@=h7ZGY@Dh9xekcA2 z{tSKqKZ<`tAQQ9+wgf*y0zpVvOQ<9qCY&Y=5XJ~ILHOG0j2XwBQ%7jM`P2tv~{#P+6CGu9Y;5!2hua>CG_v;z4S?CC1rc%807-x z8s$^ULkxsr$OvR)G0GUn7`GVjR5Vq*RQM{JRGL%DRgX~5SKp(4L49HleU9rK?wsN|$L8GCfHh1tA~lw29MI^|n9|hJ z^w$(=?$kW5IibbS^3=-Es?a*EHLgw5cGnhYS7@Kne#%s4dNH$@Rm?8tq>hG8fR0pW zzfP~tjINRHeBHIW&AJctNO~;2RJ{tlPQ6KeZT(RF<@$~KcMXUJEQ54|9R}S7(}qTd zv4$HA+YFx=sTu_uEj4O1x^GN1_Ap*-Tx)#81ZToB$u!w*a?KPrbudjgtugI0gUuYx z1ZKO<`pvQC&gMe%TJu2*iiMX&o<*a@uqDGX#B!}=o8@yWeX9hktybMuAFUm%v#jf^ z@7XBX1lg>$>9G0T*3_13TVs2}j%w#;x5}>F?uEUXJ>Pzh{cQ)DL#V?BhfaqNj!uqZ z$0o;dCw-@6r(I5iEIKQkRm!^LjCJ;QUgdn!`K^nii^S!a%Wtk0u9>cfU7yS~n#-SC zH+RHM*Nx-0-)+d9>7MMq&wa>4$AjZh>+#4_&y(j_?>XjW;+5fb#Ot}YwYS*2#e16V z!d}5X>x20C`xN{1`YQR(_pSDQ=%?$K=GW*q>F?mb%>QfvHXt})YrtTjW*|4PA#gIt zDQHDdS1=_wD!4lMQHW`XIHV&K4h;(37J7f4!93x-wlEMD7`83!LAX));_x3Ma1r4V zH4%>^Z6cRPc1O{olA;bry^i*dE{nc5-*~=serJq)Okzw!%yg_zYWi`#ol25V;v^kU#wN!mA5MPH z3FFjqrcwe^cBM>m+1wr6XFN|{1#g`1#xLiOrMjh-r#?w@OWT$Wgg6&&5F%x&L(6hXP*!%2{VOVIa)adIsGCtQITk9vCHD^izmgw;`&@D zcVTY3gpU49^+=7S>!rha?s+wNZ}MaEj~6Hw2n%|am@e70WNfM5(r=exmT{MLF4tMU zX8G_6uNC`OLMu~NcCOM}Rk&(&wg2ivYe;J{*Zj2BdTsgISLt?eJQu}$~QLORDCnMIdyYynPb_W zEx0YhEw{FMY&}%2SiZD;WLxOA)(U1tamB0cN!u@1+E?z~LE0hRF;o>&)xJ}I=a!xC ztJAA*)_B)6@6y<{Y1i~_-tK`to_m`1YVIxB`);3L-|hYW`&(-bYby`n4&)tpTo+T< z{VnU;hI;k-lKKw^g$IWYMIP#EaB65ctZ}%k5pI+=jvq-pa_u{x@7kLzn)Wv{noEv? zqtc^Kzfb=D*0JDYoyS?nn|?6(VOI;SrMMMpUD7()mfkkh9^c-7BIrbChiga6kCs0k zJgIZC=9KcOveTr~g{NoFEIl)IR&;jaT-v#j&ZN$J=i|=b=!)p-y%2oi(nY_E=exbS z&s=i5bn>#xz3Ke>~2=f&N;yEFGz-^boBexUH6@}b7V+Mi8+ZXR+R zIyLMw-18{v(Y+Dw$g^K^e|bMz_?Y^*a!h-y;fd{&ljDBl*PbqTI{HlXY-Xb9SH)j< zJvV;-!*8Cy^-RW1j=m7TnEk!K#kH>Pd+!XpfWyo%>>z4D1qBp{L=c%72;z!cG;xb@%Zp25)R&h` zo<^h3Eyl<*iu($Ant)+fP$Qx&ih?2vvcnA90K+Wzc6Yu1uiK}m@0~j^AdqP0eBZfM zb*fICQ&p!&sc#`@t!*?b;y2B;EX&)MW`aNFmv_Lc_l38y?EdwDwSR2?dR#Cb zwuQzA+p!Yq8X~_5Xomq?0-FKF*jTm#SPtBtZg{>??z1tYu14609{9!^V#ok#EP?j| zy&L+W5AZnPQNT{XcEDCZ8YK;p?RS8cz-_>Fz|VjSfb&q_N`wY{gKYGW(jkpuU>FvR zLzZ7LmTsV351b5K2mCs)9?UN}jx#3Q=b)v?4Cgh*|IK=U#sCbwIpTwW;r{o)H>xxJ!H;0cUV>SP`noRe(wU`27UtE1mrTzvWEkE0Z#>v0JZ=UST_$lJm9c@q%H*m9&g+BFCl9y zWRmV79&87YZiH`p4{+dtX5t`h4ZH_P4<-;tgh;3ajsqUwkbXhAV}MTs1ricLNDmoK ze~R>ptQVmT{6sB6xuGNN0S#}qcMg%@!J~5K+81?2mN-S2PDuaxX%mk>HZh#tdN1kklv%m zVm>gCkw_#)AYGp^B1_ls2SDRL-N>3yv(1C+87qC7pO@w_-GUx2tk*wBS0@@cTLOb5m+bvB0If*!yCixK7^{-#&}LRlW^ z8O1yPk2D#y{Wb{j2Fons4+04^c9CNU{*M?Vw+U@1A7QYC5PcfMZ*UJ_K*b325&r;4 zpl1quKrzUdrK!l@ZzD{v19fo%EC&)qRx*|R#g;MH7W^T^&l7heeA9aXV=P8ULwMgG zdG5!;UWsYK5}kV_E(nZ=cT)l)#w$aGrnmd1~0fc zM*Ky8oau4Go{(hmLib4f9@DCcb|a+Cdx0 zhBgo#7{l{gE^l4I^RUmycLdWt9iRPjkok(%v%rH5@(g($1g!xc0~A`iY-4;N^0l+B zz~J>U#8ZGqi1yO6@{n~7kguq12TlR~BZM?6Q-<*{fQ}-Dw*d6Bfn9-oem)8GmBkrv(|h_3>Mp&W^^zT9uLXO#9I(h!{|OY#px zy_&ux>(G522d}x`;dKW3Kq?P8fgvFNjIUotL*#`bKqkxezJA>Mh3gPQY6kjyWbmI| zlTVg*l27ybgVhQ&;mt#QB?|$li{sb9^ll)%HYe#m_YHUgU0KJWhG5noYQboD6iWz= zj&DuhPWgy)Nlx!XtVaRuIqwVsEdG>|Wv%$|7S$;AZpzr8CjnP6zV1S@;YxD@-Im%&4 zDU-r{kQ36tdc~l{7Pi4M2n~#&k>rt1zZmfXt&KMf_C%qo*x^PtmbBM= zI!~0OA4sKdW)C&5+1Jwg zsaQ$vG7-@0i=g3Vpb#Dg{0vCQcnAPp4lZ9JZk9ZRd!0P>$=(+_}ihP1T3AL5^rDFi(6{Ja6Yg($Z%#K&kQdg@2Mj5?=g6b<-m|bQGoZRsCXmOb-7(H~>0y5F7Kp4Ub5SkZ@leHbX-%QJ)((roLoP|7z zDW@>+a?iq2;90Nk99odmvWeHCX6@RDimOPXFmC|NsxoV8={#$pN*H5Ys2uc!EF~!| zK_kub;1hWQByfL|Ne>n_MvMHnSP=Xi#Ef+_|6l-hM!m)x@1=e*0A5a0S4pR^p-r50 zJni!E^*}h&=V2eok{r%92Oyt!B{J>~XM%3}2E0N4kQS#dJ4B5wD0+;x- zjI#Z_gBAp{J}kkSqZ}HQTby?r7UzMnL8VVx)7NdwYU&(@E3?xhuJv#v78z$NeRM>t z6P=>`N2^EX_MkDU66`7KU#i6$6hNMtfe14W80hQ}=J7A_76A$9$J&lgk`DTB3rOG< z6QMp`!+D!b$vu=s8+dofgM<0AREe8P>M=sUPIdr!$tS!f4*K5yqL6(~s0Pyb1W8*2 zelvj(7b7M8kU@$e!hn*~r7-VCr(NDO_?{}5)!XX*4yF;=Q_EJ`gG>Kp+3I=B*{<5M zS4XCmK8#LT6Z5P$oZ{TGtrGZ77!miA8CoF&$=B;U`1wLhFD^54IWoRV&pZTa(TmNFHQ(A4X2LrusrB8@p%yH z;#VuH4`s@V^C9!VBY*EBZTDF+2fj|c1~RAddP3=!%$1~5H1y7<*A8;Af;SG~N*lm5odgkB3I z$i7fQ>VBH9bA$GWC@R&VHR#~3&WKCh7d8`zhQF^=U==qT>d%RPWcLt`ER%2m|zGs9Yn8d-;kUmrt zX<2^5dP`h%TSfx;8A#?K2*oQL30b0x(9*?=_g;&PRq2_&?uN`Zvmeq>_|sXX-SzRZ zF2fs^A(c>uALk1e=CwJ5(gx{E>ihxAp?sVnAya*z(E$09it*+l*-b!i4?Ggc=jeQ6 ziNlds!n=9zuR6iUjTEY-o zRm6~v$Cb4+E88An)p!%u4(iyi(|hZxp#5Da`5MGLp^I~tSrfN>9^7m_{d6|ye5Ib9 z(!H)KwG`A(s}qm1($C?>TROJ3@Ii<{M-G-4hnX~fS_Uy)stoF4=(xqtG&Iy@4h#rs z2v-7k16f7}o$bsAa;bd^{5;hPkAz1c?;_xK;8q~ZQWx@e1o8t?PXoIeYs}xqCoEqg)(h{GSSFtK0Jj`>XWwc z@5Dy~CjiTU4F&ekLqM~dA~f|%dl{OB3IWnQ8l%4yV>vEWdQ|%^&Ink4#KUZXC2Msi z%472$`7AP{>=x_^apF=B>yf$Rtf{3dV5A=@SNDS1|3J0NeXh9`Us^Lfa1s5)?E$_#;{OdXJ4(*`b zL~~d|Grktk5I9KP5cb+jO`P5ASnBtVUzW4pcqc#2fS!vWy1`g$n+*E@natYvGuav9 zmD@89AhMFF+TU0GDD_|&(K&%QPsua@@rZ~9zzM@;mSw0HJ!u*6W1s=#BRe>B-3g+( z`1Z3i1>%*-C;C(N&w%u{mx4~uXuQ2tkYb1=8e_!3suHt2>V>5nH}uPC7jsqQc;pR+ zwyogGK6O3&9T?sG==9QIZc$#IqwPz;UIHOsxYgdvsAw>1yEQs({ji$h>f^|Y#Xmu> z?DVEiYG$7@+)~ZVBUyHK8fc}fs%^`I&6jRe`o;0*qGjdQ*c036?Vr`CNVDA$OK(kR zwJGMr)>&_?-)fbZ0h2~6Ie1wU>z*dlgw>Z6y||?9?dnF1V=l^m><{S zjSIJ*Ika`FvN;!eM~=E7nv;K|*9`XDzO!Yu!^$0ljtS_xYlqUCHz{k)7fLUENk`T~ zzn+-P*(l>;((-7|{1Ydn2M^@9@5pjU?=kLDB<|mmLIn9MLO* zG9Y50EJC0r1k$eu&!51<&Np{@tV=o?@fSe92q=b&Xb`8@ABeKy{h`@Qfscbc4LAZQ z1Bxi_Y?EGYCc=^6HTy5bjckMlz<)F%i-1jmVt}Hvb`3EmFxLaX6mv<>s8C{RkmA^fF@b+)rVmhmTzgREBt4VNYrXY z4c&yd6Q(fP)kC_>W3h&=hhI>95yO}^N=hw|Bd5|>vv_i5s;_sOZKMw(A zu`Omhg7U0q4)A1PxZfmBLmq^B_;)~NvLT(Zb4d>XQYgf1XHJM1xH4#Zcof{5ka?<)HyR1o!BGBBQ!7DYpo_JlXw z89&R6qF^z`xD)jlBSe~$lI=3K;nz=(OylQW>g%A<5P5$n#_tz1`&FLK4-7zq;59IK zzVq~moC4M?%b`d~6y@JsGbGqN(;m;q-VcxZ@6*n0*SOyF*R{6}r~J6wUsLtqGW#ET z@nW-?l=}7*>JsQwRB%E9r+MsO%k}B)H4JiA4KCk>PYJnz&ByK`8+X^6HX()D8wlBao`B_S||ABjbS3q(Kosvycj4Tr(Ef*b;#%*mgUEV zm-l{ieP)p>b&xDeq)Z61*9$1m`oxIqltZ3rKf^2p{bMvf;Wu-7uqbz>teBZL0w8Y)vE>n#+@!8IHGdRwZhN#G8 z*Wo#^u^s`E6isxXZ-n;XJDxuA!{SuGl6E z_n1}*XX%DTsBMuRlY75Y?fuwN`qx>uy%-bX*4Xq-_gHh1&(Y0uagkdaQYZ)NI|z$N zrbl3y2Bb+RjzgKR*r^D^9*Hc%Yc}wH9Ws!T_a*4d{XC(es^G*ayt1^hsMNuI+A0zN z^iTpcM5bw^J%#9pq*M28K<-T&4A6<5n8QQ)q60}1XT9u$(4aEgfV7Q=E51KK5L*4- zAz5zEzmQI2w5+MycH$1Pb4@m{cyNj<0j(t2F|lZ?&(+S2a{Zumr|!$tz>~RjHv=XW z@%%pIih;*E05iz?i)EeMu>+sb>*n=QOW!z3W#zQQ9+VQ3h;Fexy~AmhSnd|*&&R{U zw#Rv-Z1A0xg$2)KdG0;YEc^Gk+x%Xn+B@G|(&IjoOE!fO{LYnS^?ZoWs~%wzomtWO3_2H`^taGik(RVdzE$}^NnKweH99w zSw#lrvK?WNdOinnjh`X3aJiH?WQa`CgC{yEg_3(ul7{+p>J;W9`O7q2P0{`Kh~$R$ zGzrrP*HS%nJnKVKYv{+C^!Y9nsEl8F`hdvOlLvbZZ4v&DgLOWiG;A_BE7$&datJ;WGYIYwE&vJj8^IQ8wH8==4j_S=5Ie zX_gx%2*~3gNWa~VIKLii4i?E0!LKn6oxME0K$);doU40^pq%cFouM(VM8}%{Jd63v<1@HbOxRc zJPSw&WU?&kAkBDTq?wTR2A#qJUl-b8HRbGEgEJEqNP0^MgG5)aeRGX_M(=!U<~=95 zpUiF&x#~Acoi++@I-;l;W5q6|mqE|UN*8%iyLB79^%(s*8i1vX6Syw$C0*yM^@FX1 zdoJXhYiHYC(7bEhxOXM?iBr(hM`F`UXGN!Nny3p4<~hp!yY^xq;G%wzHD%%IrM*>F zuO;TM;AHSFhP>{O#B_=gQabWTXZs#T{B_a*bp{DEmLa_hup2N8QU{uE%j89M!E!8R zsYfS2!TJa?c;$$HY$%~03~<;x_F)!ioIJ*$LIz>vvOJ4~stBTfS<^PeX*itt^s>D$ zfv6kL1)YJ^krOTqtdl$h$`}2)iV(tjCMjR%7t)LXjTarGZDsqb3jpgAe zF*3KXx;W=3U6Pb<o(QebBlGET zlz!?dv!{V9dc#!|3XsipGL`)0iI6Z4$gKqj*$gn_VE{dgX@qA01?0dWb+1N7zF$rZ zg$EUhU+FWlk4!&>4o?d~YZ~VDHB)|@$x z9({NRYewmARTJ&}mH)<$o;$&GoIPmBBz5cE%HFuf);KjAp>U^G7dQ?s*_@^5aYJ7? z?>t1Vvo|ij#)?G#19SIAG@=B)V|+~8v)uRc=EC^?=oIFUV2cAW)hi9gbT@|w~yR6#C^1PoS>;j}0Na-k~<3JkM z7T^__;r!19T#}8)04*Ja9^T_ge<$0l)TiLzl+K8Bvh7Nl)3l#vKZmbEmV%c9PP&vw zG?ztwt@M^Hr$GZ=vJ%8| zB~!K6Nu2qk$&4`FP?MD=^Tgm-ZlYg_ctIv5{(E5p&G3XmUX{EHB#1uZVMx1)ya-eD zkTk-pfdaxO9({61Zs7U+`otUa1&6;&1qjJ2dXvl-qRpxxb@}|iWF{c+;)o0115y^W zppQsPNl2l64)LM^m&n-6O>bdPAFyou#fXJjqSc6mPW-^OtsYgUw(epL8|LT<6SAyP zqjq}L)h|(wbgR;H-JgP)sD8^&^@lV8F!CN#nlw&t(G9Fw%u?jkNP@B6uYVGQ%>ehoI0k_wsY~uNxCact_d2p96j0|JZB*NH%HlzW=-ETpOwuX zs-i=8^|K~+U+Cc-$qmkk+zoEe4u8>iJ$NScx!ieS{5z3&%>eK9S3P_5*?j8?&(O?ic05m*aVeuZ2fMP*?!plR!;U>5CM`V_Q z#AS{U9#HlfJrS2iGXYSqkp~)=b(sK5$Ow{If*JQgPWl~@<~Buu`qCH~5+3Cb_?w|l z8l$<+LD&>znLJ5dYhUcxjl56E2cVv;@8AF=TNC_0#>Z=6J+LIc!le_-BMuF@PdOvn zjtxu3Qv6dWkF9N$>n&S~Pos+m2W`g=?Y{k{=e_*2){8#SdiIP35hd~`2<$>4kUeF?}pI(^oCULKAK$bHm zelC?DE`uSYI-!)o{wlmY(m)7uaNyR&=!!6kzKc>n#7+GO?*J0QOPnmxT(UWsGzJs; zapSo`>keEGB#1oeT)tS*&o3{A$)qt%KfJ<7$IS2~lnu3j>TUIJ*ULL>^~3ggjx32W8sSgKz{;3}4zJI(&w_c0PX~ z7qp-*#K!>%!W)jy4C!cOBU21=3=8^T#tnLlK~4|yl1roPEt6%EOScT*k|bxHyEv$7pfi3&pz!!vY9(~p>?fqWNrEovTq(>jO@H2B z8en+<<;eEnL_Z5VDdUW|9gy}Kol&04zm)fjALIb7!gq6ebCVkYPqa>9yE*ifLPW=Z z7YdpSL@dwr7|GdOdy00<{bF@BXW6;Jx2JIDP?eJ?YCX&?Z2NO;3!Zh(=ru1fP%TFf zed%0xe!{!=*V=vNG0*$pJv<~V$F{)V7i_`Ga@CpUsW-|#t1ms%cvZ9o%VsdR^@@gm z;+6U$&Sj7wo<+PnX_rbR@sS0`W7|@h7<=NIq#U%|?rR`#(j%D)9Kwfs3z-1+2azft#E&6>T7ed3vT0pex6 z?9g5%60VBHaNjvcuJp z3e~*jysT&6{Ginw-uIQyh8jdb8INz4S5a}Zot^bxDjI8L^kcv8+KHE!s-lW7V;tV@ zA-8wGnd*Cg9T(}+{}QKk{*iik_7AN$YL;W}n-5ae_bM6m3VVkWAhbCUVuMpFJpqk{ z2euVJ1GcmKCy|ydn!54by-=7N{=R-zn3r+FHekSOD0P2alBf@XQJ{Fnt$rTI9z{$c zVZBeSAc3aAf%_~{VIW5m}7yIB}=ue1YEgzz*4JLtzurVl96`E!Mqdb ze^Uqdiznh9uiP2cX{Ek)>=9H34eRBVZJMu3*+v7dSFy|S=Hd^^YRuM3W*QGMQ{b`I z;j1rvHo;30?HsSRCHks+o&IW+9v&T%IP3U9iNdyn^}ToWL~R3sywfut!rNUD*Q>vB z^%8uGXiF(}adB)&p`j1@M@7BeqIQe*S?1}?dU$YNWf?pFsIZR*=cv;qEm+Xidyb-k3Fu}e8q-nJrj4T>(;OnlWxAM%F&PG zE|vXwRxj=R;V#d4YMkf(=0;avHWFvxj`-XVjIt;EYBv>!u~s@`2d;9T{@=Ks@|yNu z7_YtOpTJ*EOvd-PaFE+twgq+n4%k-uLqGOd@)U^I+VJ|=G<`;a<^?$)Vanw_zEwJI}i+4K|9|tz`Tt`m4VP3{r zpD|YQBAGI%NQ)uzfdOWi0m`H?iXpB-+?<+O2LWj@KvN&m&47G_NT>oP``#pjpXG>Q zaP$J7vJN3{3FM&w`NGC~n=i}gXaxVWe^QKbOg9`Taz4_6 zU3$@cznuswcEz2WFHc~=s@zv!uF{(})L?JN#=7um3B7HrgXh(rw-`3??pxZMHAQ>V zCV6@~jE)A4lek{D8cQ+yDjr8f-v{g|GR-O8A6)bHH!>JHg}Hy?b89}S!8OrTU6gAs zi^*sHv{kOGY6tXG)LTC!YW{9``>MXq_j_!^<4~PAxBpy2)B2+N8BNX~{BrbQfkz{a z0Gb#TW@T_fCnF89nh^kB=6weE6p-G{98M5&&nLz>7;!NwL=8VD6PG52>45uNBcEF* z0gjDKfUSu0%IXo8BOsnKk&@JZ!mkWAMLR|99?@g)_t2w{cv(-L+$_;vJ%o+KJy3Qi z@JX+#YMuV$UASGlN$c|Dxd!xo1$w`!z-xgWrO9~O!vQb@C^Uv){o;lB*WylexA~jx zqWo>Rj=DAcDHcAn4m_#WKSq6Pfgb|7lrDxRTMT#hF(%Gtq_4tcY?>I=6)*61)L0Q} zbWx`vMrI=MTjb-F^;%x?`)PX0ADMC(M8q>0Vql=i9|084hG<536I3z2^l%XJ-pj~U zOQc6EqD}@^}1s#W|veF)U)rx%8qD5(r5hfJH19A3 zdg1OPc5`IP+6VN6vCwBi8&tz_;f6kae0W0Lg*qw`)(a42_)v~k&;nPn#rSqN-?r%h zfgY7}r0YhWgIde* zhK!l{RK^*Azn3jl*Q*nbTcLNaUS;LRWqE#GN zoV?2s-wNb?*=96ePIgF|j^<#1y!RJFS)Nae#*zW2etzM?^J2fmi2WRYvzVpgp2Ax^ z3oucSrEuAeLwUSS7)oTzyp-LxJwb zXzDAJZW#5IBR$S=EOozv&$tcq>fzY5QnRsTK2F0eoF2Vz&;`9_*k4XRLT}pH#adzvY=hTZZLr&_ zUq#ldbK)!2*lc`bC0u7Xjt-#5=?ncrG(nx9B+EYLSQQ9(xN~tu3cI$BI|>_2N|{@izIf_=13PcWzBsDtdcl#-{tI z2@gTW&};R~)K?rY4!Bt*$hSA}jf4YlJe8F94T1)=$Nup94eY7>tY;5(v!B`D>}R%v zdXq*l2Aho5{vHxTk@W>tv9`B%`hJ^uhra->3eQz>3uNsPOJsy0H6bumyBHm7mR4DJ;Dc9h zvkoyr#t#<2XlMxOeZszJ5cq~G#w2~ZM}D8j)Q>*eV_8wGLCWk;q408ptC0KQ5pH64 zV1Kn5miak6U}q#3MY1A4vnFrY!Y|K8U0jUXc5Bd^VMym^O~c(HpKAOT23-;iFQ;0H zmu18E*5LhVLpl%T;*)!J_+nd+?%2U~;=_(pS^q9+jP=0pOf)3Hr|BwJ-~lh1^h zSN9#awQ8Q&Z2h&I`#b~WMTEOyjA<~dlbIMuRbq(Zf0Fb=@N0Cza;@fIxiNN?6iTmI znVrMG;c1!eK|d>dw`$S4jNh6RwOA&dPx2bWgVY1*#-s68OnhYCKz_1>pT4754osbL ze1hTM(H$h!4oJ1s+vs1<6BiMJa<;j5OinBzH-?A02hxp-%eKbfvxE)M3>N5zTDJow z&ln{pPtq{`Z#nuCD%{QkC9kob{p4Tx07wm;mUmPX*B8g%%xo{TU6vwc>AklFq%OTkl_mFQv@x1^BM1TV}0C2duqR=S6Xn?LjUp6xrb&~O43j*Nv zEr418u3H3zGns$s|L;SQD-ufpfWpxLJ03rmi*g~#S@{x?OrJ!Vo{}kJ7$ajbnjp%m zGEV!%=70KpVow?KvV}a4moSaFCQKV= zXBIPnpP$8-NG!rR+)R#`$7JVZi#Wn10DSspSrkx`)s~4C+0n+?(b2-z5-tDd^^cpM zz5W?wz5V3zGUCskL5!X++LzcbT23thtSPiMTfS&1I{|204}j|3FPi>70OSh+Xzlyz zdl<5LNtZ}OE>>3g`T3RtKG#xK(9i3CI(+v0d-&=+OWAp!Ysd8Ar*foO5~i%E+?=c& zshF87;&Ay)i~kOm zCIB-Z!^JGdti+UJsxgN!t(Y#%b<8kk67vyD#cE*9urAm@Y#cTXn~yERR$}Y1E!Yd# zo7hq8Ya9;8z!~A3Z~?e@Tn26#t`xT$*Ni)h>&K1Yrto;Y8r}@=h7ZGY@Dh9xekcA2 z{tSKqKZ<`tAQQ9+wgf*y0zpVvOQ<9qCY&Y=5XJ~ILHOG0j2XwBQ%7jM`P2tv~{#P+6CGu9Y;5!2hua>CG_v;z4S?CC1rc%807-x z8s$^ULkxsr$OvR)G0GUn7`GVjR5Vq*RQM{JRGL%DRgX~5SKp(4L49HleU9rK?wsN|$L8GCfHh1tA~lw29MI^|n9|hJ z^w$(=?$kW5IibbS^3=-Es?a*EHLgw5cGnhYS7@Kne#%s4dNH$@Rm?8tq>hG8fR0pW zzfP~tjINRHeBHIW&AJctNO~;2RJ{tlPQ6KeZT(RF<@$~KcMXUJEQ54|9R}S7(}qTd zv4$HA+YFx=sTu_uEj4O1x^GN1_Ap*-Tx)#81ZToB$u!w*a?KPrbudjgtugI0gUuYx z1ZKO<`pvQC&gMe%TJu2*iiMX&o<*a@uqDGX#B!}=o8@yWeX9hktybMuAFUm%v#jf^ z@7XBX1lg>$>9G0T*3_13TVs2}j%w#;x5}>F?uEUXJ>Pzh{cQ)DL#V?BhfaqNj!uqZ z$0o;dCw-@6r(I5iEIKQkRm!^LjCJ;QUgdn!`K^nii^S!a%Wtk0u9>cfU7yS~n#-SC zH+RHM*Nx-0-)+d9>7MMq&wa>4$AjZh>+#4_&y(j_?>XjW;+5fb#Ot}YwYS*2#e16V z!d}5X>x20C`xN{1`YQR(_pSDQ=%?$K=GW*q>F?mb%>QfvHXt})YrtTjW*|4PA#gIt zDQHDdS1=_wD!4lMQHW`XIHV&K4h;(37J7f4!93x-wlEMD7`83!LAX));_x3Ma1r4V zH4%>^Z6cRPc1O{olA;bry^i*dE{nc5-*~=serJq)Okzw!%yg_zYWi`#ol25V;v^kU#wN!mA5MPH z3FFjqrcwe^cBM>m+1wr6XFN|{1#g`1#xLiOrMjh-r#?w@OWT$Wgg6&&5F%x&L(6hXP*!%2{VOVIa)adIsGCtQITk9vCHD^izmgw;`&@D zcVTY3gpU49^+=7S>!rha?s+wNZ}MaEj~6Hw2n%|am@e70WNfM5(r=exmT{MLF4tMU zX8G_6uNC`OLMu~NcCOM}Rk&(&wg2ivYe;J{*Zj2BdTsgISLt?eJQu}$~QLORDCnMIdyYynPb_W zEx0YhEw{FMY&}%2SiZD;WLxOA)(U1tamB0cN!u@1+E?z~LE0hRF;o>&)xJ}I=a!xC ztJAA*)_B)6@6y<{Y1i~_-tK`to_m`1YVIxB`);3L-|hYW`&(-bYby`n4&)tpTo+T< z{VnU;hI;k-lKKw^g$IWYMIP#EaB65ctZ}%k5pI+=jvq-pa_u{x@7kLzn)Wv{noEv? zqtc^Kzfb=D*0JDYoyS?nn|?6(VOI;SrMMMpUD7()mfkkh9^c-7BIrbChiga6kCs0k zJgIZC=9KcOveTr~g{NoFEIl)IR&;jaT-v#j&ZN$J=i|=b=!)p-y%2oi(nY_E=exbS z&s=i5bn>#xz3Ke>~2=f&N;yEFGz-^boBexUH6@}b7V+Mi8+ZXR+R zIyLMw-18{v(Y+Dw$g^K^e|bMz_?Y^*a!h-y;fd{&ljDBl*PbqTI{HlXY-Xb9SH)j< zJvV;-!*8Cy^-RW1j=m7TnEk!3v#nqp=cX#vRO_EIl@&@06A|hfXwn9MgSs%4(ZPia(t$kRnw$_^3 zmtSkYTD7fJTdh*9Qd$)i0;nihtbz{|5k&-fBq8A)lI-ricfS9Bc4oaf`>R^uVHL&BisP5c_{F$Vdhd=Dex{uc-7J( z4)Y~{UZ+CkcY#rXVPg z4(rLDfi{MOG(uWC?1r*4Ng61FtwTg2k#XSHXzIyjkgQL8Nf_^1&>m#CWq6qDebIlA z2COHefs8@NuQmok!?VU0HpD-K+HQ?7l(oww>x&W#^&NvcwuS1W@n;9^K_S-~knBe~ z06p$L_z%+n4MPv3FoMq`G#g{EL#Q$z?Xwg01fC~c6a?_5(S4W7Z)8IG+y3sHZe zks*8d4~e!f`DxPt4UvIM-pfNqpohjB*Hc3c+*5{NZ-l!|eb(s1hG^4PE-q>aTK05u z-|wec0~wD9L*!z@cDNmu?X@+Id1$QmWQg~aAyR*q7gz0ev6L-?l_l&sPbD#6-*BDQ z!>9UJcOne&iVh6X8pS%aw2x`+-spTi+(VWdI^iAl%>mq3Ay1x>X`hEQiug1}+Mv+! zcCJRxV-3%GA;xz8cCKmUN1Y?Krz`6nhoR1`2;sWJ^4PTX*5@0Bd*cRaPu6|;dr$+q zLV0ywj!J0|x-Ga_PRLQ-UKZO*o}}e7cgEz^*?#$`^r-6J2Y5GrqJZJ~W*FJg z5Q3dySI1$Q=oMYytj56zyQj#!j|Ebx6gS*gqq6=8G#naBF3|ZnlyR&n+||uuZ^jsO zFdFUahDFeN)b|SLSr5xQYNw^o2mMi??+Q8BB$LS{oumiaKA;A)_k;2#BiOl@U7Xl! z8|(QaI_gTJx?G-fZ)nvTT?{hLGjiIwp_MHhTRGQ5nBGPp%OhJp?fGCtm1x%z@V0a0 z7NlnYxvwG6c-h3bw)7K0|7M7u4+#E_^0Hmf=V1BYnPnAR48^TPV28HX$+m-6Kvg}A zy4vgDJ*~}5J&eUQ{UWQFk6p5e6Zt+W9f-i;xjk>k;fioS!WIPL**qlo zD%hctmGIRBLTBpE$8^Gawp@mwdB9+906!jYOB%k!WmyVC{csrM#Yhvf`$!M}7?w#- z2-9;|=e&C~=)c55KsJ3kch|q9%O(G=DL*8}hZXcgR;P`L3OiIu^)e9CKEVuNWhZDl|>70&#Ef2PZv{tr`srEprZQLGU)hQNQG4Kl40GlsG(JQvs zSgXXDzivg*!8{BZ-lO=rN$Y^N0M(a-?fy9q5#M~8w*zpK8>wHUO#bH$X$>X>>y z0D(ijZ4;7v7k1U4Jy|;&8(=;~kMIS~m_D8J!7uyY_5tgq5$+FXaWBF`^wnU4 z`mxx_PL-%?;h@EB8o=x9JR{`pL|}o|2EnC>r~o#;O+C1_K&3bs?p9&nZobLX^cLY` ziqmz<%rocCR=bm4Lxj5XF$~{q@@VKBA=qZd^oSdvr#~Skgxk%urbqk>76Nk7uLb=* z>Ebr@CQC<@k#%b0$Aais7~r1}coSTJ@GiEQxJ^U|%aG35+rKo#a2=4Kjj`}L8z+q4 zLE7>ifW|f;gjb(-H5iWVqrZC%PfOoH6??phZP)FGynit=Te-rrhSPdi0E>qkj5Hih zbAjc10r=X3yqXVam%%W`7^3#tZRsxoeUp*R(l0fm4_V}**R+QJAk=pm=;tHY?Ipb` zNB)fl#~Q!6+HBNT_=t_UeppKULvNAJN9 z`Ha00!O|ePcfmPpn(!e-;8n%Hr~+}FE9JNH=GMP1^5Tg+;f(f`xIYrezhKh%1xUHs z5%PTCKHPL-NtuKpDi||*fDLI6bQwQ40t7aY&=Ed6l)#POTAli2e+tk!_TB1T$<1=! zXd5~yvb`y3d^WK2J2K!f>s<7dp*?ynKZbE`J>?lj935FF%e3y{Vc$TShG^S^Z{;0oEOSzOOuf~8~)+uc_(5|a6UZZm4T9PyR+)ggT(LT`_3CM z%2OnanPPTekXiwr=Zymzw{xgKorgFVupDy$$cT}gAA6~CNcypmd+Hjk68aLlUQ#XzDqX7Bk3b}d0 z5SOJU#Q%?_s&YVn;)L=8(VDbSFiK06npjq8(~Lp$b2X(*`Bi0?^SU~HcQKg_ng@o3 z2N5{Q?r0If$+Mn7BRRsNm<|JF%g|xb>@>h2uXjZ3w_?oNEn+FWA{I12;-W@vj`YN8Yx7ntp;x>4 z?ypi+WsT9Ay6-yIWVf5m?4ax+SHjaCPkxxTa42s<;P6ab9wp;xn?jU5W%3cV z0T(!*MUDrfsl{=zFnyk(F9^}I3l#FlLN2bhw~!WTK6oIo+=A9UE;a)HP{av@2ynTR zfSHFsn=wK*(3ryc2g1-tqprh1I~ZlX5$jiZk=KmtT3j!I{Letn1%_7ZN~)@oA--~s zb7k}qQL}b4q-&#eHL2_%EMf+r#s*)h=iEFe-%t2wL~1rnSCu7Qf;?w|6Lp4rso-A` zr|UQ&Hs#5goAlPklS4$_i7P~+3l!L_)Ki9cSOJhF4Mr0Iq;eusz1-pjBUys{Hu%}{ z8S*_Hu}-ZADE3*Tk3*osGj}o4VKCzBtSRWYCy?TZ!mSd5<;OFNFQ8Mnn4S`RH1IqNu{IjS*-!$x&VpQ=S{;mFrzzQZ;cLqFEaaEB64&p)pSwPk4#TaEIYk5<<98#)H*6m3@K29nvzoN2$V`FA;UdH&BUO_d<3{s z#j+i~T+dscfNTn#0bEFYcgztDh$}@Pt9SA~8QhPMV8^oF{s;#cKFn(mh)a*Y3X6k( z8KkgRwEViGthXt*vAN7d7-=-n@WZNN9o^93md?g$><@m!^fasr1nb@Dp*Y(P8|xa)4`mC3tmV~vSWpcpfQVi4fUwl( z${o}S(+J+NLd?^@dakNd(_ozGhNcTX{I3=gN;(WI=xr#VRc0=(x&i2r=TO*aaI=k( zbo_Fz%?-8J+VY5dmR(oq^$V5ODZ<2Jv^IM;`ZPU((2i9&_(<@O&D1+fa7{ z=tm(v4uSN!I^!YWtplDOT7gprgIfe1NM`CJ9qG$~TaB3BlfZtdLty{G^{5Jz_eGkz zBZIIGNl&|r0`DHgM`~e^t7H5SLBQ+f3BS42bArK21)H(YGA=6RZ~Q=|Tu1gv;+uw! zuWpLYu0PqEP<8>RPsRlEwumd9@jdlFk(&Cu)%fCylyt6(M%`oH@`El*>RN^@-kcJz zZu6lrB{&{;f%V3tQVhs<#2|$7!iXv?bY=g1cjf+h?pn`NC61K9RB+H@2M#Q3g7aJh zPy9A)e>v6_Z_1e)-lfbIxQK$WepI8>%0uVh0!4ijys6~&hGgHja=x)$_1>tTvr;!u%%hi;Up~+Tt|Ac-;Lrxv!1kkFpBN>Pk%?lMrVUR@# zFmiO6guOvvB?e(SFerj1W1dQ-DI_BfyM&p<^_1{aL4sRTFwRGOSJp&l*8eNB@A=`4 z63n&I^56|lfRb>j4UKUw(47i}F9rTn27yZ734n8e;g?;e^^daWO@Z$0Y$O;X-=btT z`BNJfbSBE?n+%pwE<#wAJm8E;XSZn+>Df>k)@GCDHS;xZ#5)1n%b|HD4D!2(-`oiS zMrsyQ^Km22LizCsl!xuJply)4)(M--7!m}ioq`Bajkp>YdKj-=jr>vQ`~`^#@h`w* zJ?JP|ferBu@N;vC?s$=FgNU~vF|qWpyjdHTaF~^kuX+bX{}hfp`o?}SdOq=mc!B8C z6;+#NHlfC?O`dq8)emZ$J*OCQoY&zYQXeN1b^3ds|uXPR~L`$P-TsZ56HbrF78 zP>x__2;*4!UPd{GPU?Vp48+srh%M+!U}&6DoQ6Q}re#2^%Me@O{4)vJu7zw1k+!^T z{W^wj`tD?m zw>{$dc+0@fohVYf&@v%DP$d1a*oc|}Cw2x5^G;OyBW%Y1)JfF+0fwj_S?V$Bactj>AO4k3hPN#~@0Dc5$BLH+2yHrL^ZhAOaP zUJ8R;0r@!%&@zXSkS4#*gPxB8Jy{jMI9XLXoRq3kuZl8@2sN>6RH`a|3(KA{Wk~*b z+R;7Lrqe6DWmR$Yivx<(fie9^8$$7@q@(Z(o~nqs-hufMRn;rXdD?CRR{Q<%WC)9` zc&$360#AqHOH<>^##u&IetVrGwhHMD#l45lZTd9xDg4Ed8ONZ8h{^nJoQNhCWPdS!y2e_CGfCmL}Z3tw2815QL50^2iml zu3Ytnacy}Kbh_BoA8voTEsncBr)LavYLmvofY;GHd}KW#<7#}#2r;%{iI@k8>|k|LMX{W*zEORuA4?tRbEyTINM<%`EoA7A2 z8H8LO39SO6>kdJL-3jd#cIs@@XT1^)hrHVW=sCRRA>@<^yBZt<39JuI&j7DAKgc@L z~nJE4?h!#R!Bjj?H<0w~P@Sl&Z$^p}KLqnh+la zgG$M2%rSLddag;Fq{6$v3Wt7J>#x_gd~Ik!&F|l7nM}C`R;Sd;r0*oL z2SiUu|5%7Xqog5T6pxDYhjtNziy~s1D+UKC^*0<>dmPr$C_F!b%^Et-BIT%lGLTrj zh!L*DhIF1=ik_@gNpW3}^j^l}%7-yOnOgYt#&vRL9k(bhQdRNCAln1^!hI`IRd$T5 zsau-EAgNFeA15L{3W0428x1ZqK%=GZ9>zTF5yb3p=H&t^o5nW66cX10>)71Q^iKF#} znC;VT457u4E;oCe72tgYt}ip?ly9dxfb+_SF-ESiv6yu#Fl2IrlmEoR=h@U6?_EW& zqeeC)L~-%!ffw{YxO?=J*G?>z_pfRam%X}8#PeLyjIP_xbvsCJ-M>(b>=hI5G^K)H zy|vAq(WpQ1@g2>(ogY4m6orukf);ObAmryGQF$m<*GD*kIzo!bmGGGNM3nnmIeYyY zSyTV0n$mZemr6Yz5$X-hcSqof(rY1uq;jk=eZy2q1?MtG;!$rKU;y7XxTMeS5Ee0? zjpUZf!Nw~w--1>E$~Zi(Gnve@VCgx$4>g(0vtVUhfs9=A7LN~?k2Lj@#u_8*(rv`l zr=uc5es4?-KbZ8Ze>eUa{mbQ$<@vBZ%Ek%n;H%&_&RQ2&zMH^wwm zXjv^s6&QNOqx#Vt^Q$rUSXf*pY8vb1VlMX^dABj#IDJ%)1(zJqeQZUSh=1yWP40d) zNN~BdJSMiJ0x@>sR-o|#iz9mFxrc5_`1FLS9oRgNNVgZ}jIb>k;L+g@EX^9_+y)XX zVo;R>{@0hyXQA(Zi2cgJ@R+3-h_CTe{zdSVH)DhHFY@uVYk_;XH?edYwxs@r`R<`| zPQyE#VjPb!5Azx*l`H7aB3LCM*#^S-Uq#7!1lu`n5X}X9fiwrAL;8mDOi~*#cFE-H z$k>iRBMDoC_CD}1!_cwihA7jsxF&DHs|Xxya>=4`P)ED-MbNFsHqA=J1j{ph)%Mr& z=O)8RHub?6ZH>c z#62iAzH|aTVavp_0kT?Pbd6Ssi4{Z1jjLB4mSFnxFKv1BYtPh+&y^R5dyeWM)+Bs! zM4=;|YD|eAuEdjQNYXFn${rCZ21jIk(dUY<#d*owW73_9J?@7bN6v7SeA*A>YrY#> zjurE4e?sXE$qDh%a^9*|Jf^(QjpU8OCi(O9&wf(1z%Yv=1>5|6HaUW(sLlQ3j&oN z23-z*oUakG`$Kmc4Dn!;L*^P&VDrLw zOzXDdsmnoV&-1vx6JabuZv@hCzDeL->t7MR1aCKn9vNX043XZOpmmr|ruBg`jtuqL zjW;;$M`)d*k?EmW$?IVx_o^wq`{d&t8Ju@KKQ_B|v9J8&){N;}z}0Khiu8Wf*9XOB z-Bq7#x%&A=cS=Qp`f|T~@zOR=)MLI_f~P87Aj$ilkT4)Wp6?X>v#zyVbdCLf2Olpw zva;EWj&QL{kH<5YLHOyA@EDvHs)nJ?hhaX0ZJck)C*I$TRpktJksFZ(kW_fh-Y;>Q z=mxfkCzKdRX4hYgDaL%yk4{}ZxnECp`2oG}Tr4i=8<9uwNhMII_-x?fg!$8896vRM zVZ~+}p+!xMkraYUUy>8E|>HdRp&@&RU4awFu06N<*jgqO~ zTP;T(kf&1sxQ0$(y zhh?z*?9G7UoxsXb1UaXol#c*8{8AtGrsWpb66j_E@?JOjET@6;e;#>YqJn9FtuvDb ztsWV|f@#do{?H)#W?^i0{cTdJ>oE^|62?@IhsejFwy&V_VXCU)h~qJYFFiOd)S?#g z+d-Hset$`m+qlhBHyv0gux1gxAO&x+QpC-HaPU+RSMtPFC&n*Ulgl6RQiU)f5h92XM>;#dDAhMeG}U`=SC76M&o(;lQ2#FQ3CXWjLmF# z#m7D+Jor6qXJr4brzi8BK{d5IzVZE0J&R#BK`oa1Wl%BYF`x|*3VRjGJ`H&shS&)d zvNI3|QZBEc4F`I96XoBdUQ z!wKvp_Jz(vOyD*5=V(Yhn9c%?3%NIz}4j?|;T8MDI zDbK}YB@AkTDW})W25XFA`Lt}TAIPgopc~A@hXF{XxByUr#wC*iuwikvBZ47G#s>Usn>5E%Uedk(W0mgNeoP<`|kecf6ucR4h2EyBOFdBJWtf zO&m0*QRY2a|B#$rH&f1Bw_fy)_r>b?c&}^nazAjO9N8P5@MSmfFK`nDmq+K+v2G1o zw#AE6i1l$Y%E>$yLJLdWd}qq~38jC#dfASvm*E-t!z*^Ug9}{o>hZm`JlOkG{i7m+ zo_Mh!2quE87J)+#vN{E*4i$djm9pt<`5T5?+Jj@Xp*OcRfc{96CcUOX%y!WDIN!2E zye(rkf!D-~fd>~r*`Rd+`u9+<#T4qi5TccX+crF%POy1QTOO2wJXmIVuzuU-5lCl) zrF{nlTM-;8_alQ|%+Mg`>60?wRwu^)gaf)aVaOf_ovg#-xN3J+UG*ZYSdUyRmSJ9a zB_@c!pN#_&1B%!OLVf|8-ov^^#eJ)Hh&yX>EF2AqN1Yxx+Wf%V-#_+P@#4~AFOdDA z;vnM7>06SC%{}LB(MnV}#DN37l~~AN&M0x1T)l-2^IFVfSLCVu27E2NMKm=>MJm5X z0 zH0h-NWjX5{0v?>_5twJe%EM^_+tseehj$)=a<-p!gpCoGs6?svb)xN$t$#mc4>m>N z`ZXWWzG%zBg>6}Vh%5FeUkTuRlH}gRG7fF@G?(Ma$xHYWMRheAgAf$D?r$;qx(usm z4^p`Y6*%$*9K0nA#AaeEnALj9AJGM`rJUEbYbWxyS`>T%Z|m|(NNhrDt$VH zYa+NhC=wP5H5R)CuEX&cLE#K!01xPV`=w3tZ`fiwzi*y@`-q-yHyrp~xh=J2g&&Nm znz!+-XDUTL=Dw|=s&|#7qSP*=aGsZo$JfB) z4wU6GMx6&^yaeTcK*%;mb{H2gzXA`AfVSO8rf*)*a-RKngmB#~x7Q~@2an_0>V{=| z4mix9ri`I;KM$+e+n7(>YVMFVQWgB?vCzxEmtm%ZfUVNxZ?A3*2xAn9WvlICb zt(EAE@W~i^>c^g%Sx7uRpGKUjD!o~qUZL>x=oRJzPV5O?fJgY04zKO0m)U8woWEKI z;7&0zg!zQ|oouKzeikRo;~c}o)|=f9?kf^#@a$jGY2yOjV}{nYIV^|AeM{gH_RKJy z=Jmm^o=380VGw-vRZiQqO`T9N-gEs~P;?cX($Df|ZU(VVFc9#w)kk4<#ehcQ51M{5g-c4a45CmUUQ%c9+VCD6mx2-5SK3<2zwlb-_ zx4Zc2HjwS0B$NS9>|R5X!NNC?D76<9@xrp0-#g!pJkgMH`p4W?f4Q>xi@!L$N4~cM zF9=1Q1M(x{X)MCNf{$SF?%l1!y6_gqz$zwB9N^xipR^@Mq%#Tfo%M z;eq0eVXxvQ;Cwf1EXdOEmWjh2$z-J?)ZYTn#bKDkY&Li(Z_3J~oR+^Xf+JXowlI(N zFt%;6@n?|N3t^KfqsQdp>q0~)F%8hhnNDVYH{o?#b~Qof5-?r#5q}}264mKO(W^)t z@4|V*iBZ>IkmtzR_}baz{5cIzLeF1&YO`2z>w)<aX_f(p*5}R z&Rx0YwJu@EUG1cnmku527{oKhuW8_RQ;W{>b4BHkI>J$QjMzrhMC8_(^Ay>!$zZ0CNiM zG5bVhWh0tqd)(sM7)EC4bD?M%xQ7X2+6Y;=#S}Iqruk%sMntg2q75mXrH5r6jC~V4 z-Ug5Tkx#z%#vl!^D`0rHW4OH<=?w^cCHISnZ$;n~f_PmK!u8`a{dA5A0G|baJfNT2 zaI+le9KEDJ2d}n{NF~*`Vsq+$LkS)o(R1v89`5wv&u)(2I5a<~>Qg93KeH()iwc)p zK9jq<;&-Ea2K~CloD|NWz1x)XmhSN3x*V?z^T2{UH-dM$(v9IwtdmkSGkEeqt}h`~pqNrad|q7kv2 z8~HSx_f0;dFh3d|&vxLE2!ftSo?6{57in z7B*B6x-Myu0a1X;TCU;tG9BgDxDR#F63#}zq8Ke;uv2=5gA z43Fi%N7KNIYWKzPbZBVI{o?z+_)9JeGyJxr6OTdq4Fp}M$qz>~Hde=%|NjXC!3gm| z8^Y>V@FMX2CgQaSq$iHn3EITD;n@fT-XP4kHpFXd=t!basT(o8-vBxq zOpapH`v1JD>oK*J~pvSO$ahT-7(`;slw4KGOjT;Wq*#%YkbHm8R1# zgKsqghZ_r2ct$3h$v7-4WV{&fi#Mw&+%M|>(Q(}t(5(#7lFxhKyAd%x zxt;&A4efy~>kB&jr<`^Hk}F;WMl=$FH8dOZ^D4gtpYQ@G7mr5BcUhW6`WM&y>t_CG zQZ9TxpRcx+gz5HrT2oZ=OkI<1jZq7q;@2Qt~Q%5a8O_+zs~`A)HN<%U+DR zPF&J-?BocVH$Ps07TgL&-eC^% zYCft`L{5IKpkjM#OOSL$61NcR5xokFB(^c-EvuV3T(){ARh;akf_t!WpNByg(}3W# z$m|V|kY3IcWd+brOL}P4^qP74hc^%9uSKBe+Z7OBx05S;c&yWLeHcO^0(HpQ%d5zL z5ngdVuImw4C(A6bK3x}{HID$^nV=bkyzU6ZO(3iXj|K3C|3G@J$w!poGX%QAi*s@) z@*;>^fV&)li^Db9jU?e72ec?t4-&e8cv>1wpLxV2q^k5wK>0tET+ZQ7oyikFYg^zo zuR`7=%-8P1v7U8ncK8J~^~o*=Vf-DlU;g!PZubs5eyb@+RmAYedtR^&o!W@7 z247a9+h32MH^xJaV#iR1&U9ji%nsWMr;&ngsT;uHTR5bV2V)ez7p%qg1K5MQFAt~4 zGeqqq*gnx3T&q0)hkQ|-fXAm-xHWaR^F|E~`R#D{P}vz{3Q==dr7qI*p$`fu6!Wuz z`3zy*@!ltI-e6(Kt}pvKjLS6lg!dkxHF6p@!TELET($7qK3+ur6+(=1g)>Rk)XoPD zg$nrYHFg_sP`>)L<2n~goN<&9$49f5V&(j@-x{3bc8{HgCD9M%-`D*(7k`sSC9s`T zt_`MW_abYMq*4>gN8)wvA7L6f8DApMoH3tlMGa45t7$&gJBw1vUEbohay5zPqi$iu zcMidwZ~}xm8%Nv*P#P?XsGG0quW+R0q_C8eGBb1pVWa2>&z4!9jKh7xYwC%>x_8Cb z>XdlIw_x+z<6+A!zKu2ht{Q8RcRL1m?yzM9RVVbsR!q#pe%%f-!}lI9#jwnkG+y2k zQ}{7^=3`H<2%BF=AvqZzYq>XXc0DKP8ZeoO_B|{fYw1h49p+^U{1I%uq-!hxvS19=g6TqumjpLtIFt z4N+Y(GU~m*_9`qn&c$d}fXqUawV+aLv8n!;O5KR>_56g)l9Ngf$8(m6_^$Y1OeRav z!MEcq;v9wflbB~d_>@(vF3T$}(f`7bRkpV;_2MS~); z5zrGpjr*tX6+^lWcgU(Xyi=e$GkGT%i+Y5 z?zK3VA+=5{-rto@e|&>z2wq()#t4YfIoJkUd>8A`<#KMVe)2-0a?$TKVgJn;?-gSV zx0%n6Q>qE`KZUQSiUaWFr+Z@li>a:hover,.dropdown-menu>li>a:focus{background-image:-webkit-linear-gradient(top,#f5f5f5 0,#e8e8e8 100%);background-image:linear-gradient(to bottom,#f5f5f5 0,#e8e8e8 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0);background-color:#e8e8e8}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{background-image:-webkit-linear-gradient(top,#428bca 0,#357ebd 100%);background-image:linear-gradient(to bottom,#428bca 0,#357ebd 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff357ebd', GradientType=0);background-color:#357ebd}.navbar-default{background-image:-webkit-linear-gradient(top,#fff 0,#f8f8f8 100%);background-image:linear-gradient(to bottom,#fff 0,#f8f8f8 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff', endColorstr='#fff8f8f8', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);border-radius:4px;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 5px rgba(0,0,0,.075);box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 5px rgba(0,0,0,.075)}.navbar-default .navbar-nav>.active>a{background-image:-webkit-linear-gradient(top,#ebebeb 0,#f3f3f3 100%);background-image:linear-gradient(to bottom,#ebebeb 0,#f3f3f3 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff3f3f3', GradientType=0);-webkit-box-shadow:inset 0 3px 9px rgba(0,0,0,.075);box-shadow:inset 0 3px 9px rgba(0,0,0,.075)}.navbar-brand,.navbar-nav>li>a{text-shadow:0 1px 0 rgba(255,255,255,.25)}.navbar-inverse{background-image:-webkit-linear-gradient(top,#3c3c3c 0,#222 100%);background-image:linear-gradient(to bottom,#3c3c3c 0,#222 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff3c3c3c', endColorstr='#ff222222', GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.navbar-inverse .navbar-nav>.active>a{background-image:-webkit-linear-gradient(top,#222 0,#282828 100%);background-image:linear-gradient(to bottom,#222 0,#282828 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff222222', endColorstr='#ff282828', GradientType=0);-webkit-box-shadow:inset 0 3px 9px rgba(0,0,0,.25);box-shadow:inset 0 3px 9px rgba(0,0,0,.25)}.navbar-inverse .navbar-brand,.navbar-inverse .navbar-nav>li>a{text-shadow:0 -1px 0 rgba(0,0,0,.25)}.navbar-static-top,.navbar-fixed-top,.navbar-fixed-bottom{border-radius:0}.alert{text-shadow:0 1px 0 rgba(255,255,255,.2);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.25),0 1px 2px rgba(0,0,0,.05);box-shadow:inset 0 1px 0 rgba(255,255,255,.25),0 1px 2px rgba(0,0,0,.05)}.alert-success{background-image:-webkit-linear-gradient(top,#dff0d8 0,#c8e5bc 100%);background-image:linear-gradient(to bottom,#dff0d8 0,#c8e5bc 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffc8e5bc', GradientType=0);border-color:#b2dba1}.alert-info{background-image:-webkit-linear-gradient(top,#d9edf7 0,#b9def0 100%);background-image:linear-gradient(to bottom,#d9edf7 0,#b9def0 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffb9def0', GradientType=0);border-color:#9acfea}.alert-warning{background-image:-webkit-linear-gradient(top,#fcf8e3 0,#f8efc0 100%);background-image:linear-gradient(to bottom,#fcf8e3 0,#f8efc0 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fff8efc0', GradientType=0);border-color:#f5e79e}.alert-danger{background-image:-webkit-linear-gradient(top,#f2dede 0,#e7c3c3 100%);background-image:linear-gradient(to bottom,#f2dede 0,#e7c3c3 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffe7c3c3', GradientType=0);border-color:#dca7a7}.progress{background-image:-webkit-linear-gradient(top,#ebebeb 0,#f5f5f5 100%);background-image:linear-gradient(to bottom,#ebebeb 0,#f5f5f5 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffebebeb', endColorstr='#fff5f5f5', GradientType=0)}.progress-bar{background-image:-webkit-linear-gradient(top,#428bca 0,#3071a9 100%);background-image:linear-gradient(to bottom,#428bca 0,#3071a9 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff3071a9', GradientType=0)}.progress-bar-success{background-image:-webkit-linear-gradient(top,#5cb85c 0,#449d44 100%);background-image:linear-gradient(to bottom,#5cb85c 0,#449d44 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5cb85c', endColorstr='#ff449d44', GradientType=0)}.progress-bar-info{background-image:-webkit-linear-gradient(top,#5bc0de 0,#31b0d5 100%);background-image:linear-gradient(to bottom,#5bc0de 0,#31b0d5 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de', endColorstr='#ff31b0d5', GradientType=0)}.progress-bar-warning{background-image:-webkit-linear-gradient(top,#f0ad4e 0,#ec971f 100%);background-image:linear-gradient(to bottom,#f0ad4e 0,#ec971f 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff0ad4e', endColorstr='#ffec971f', GradientType=0)}.progress-bar-danger{background-image:-webkit-linear-gradient(top,#d9534f 0,#c9302c 100%);background-image:linear-gradient(to bottom,#d9534f 0,#c9302c 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9534f', endColorstr='#ffc9302c', GradientType=0)}.list-group{border-radius:4px;-webkit-box-shadow:0 1px 2px rgba(0,0,0,.075);box-shadow:0 1px 2px rgba(0,0,0,.075)}.list-group-item.active,.list-group-item.active:hover,.list-group-item.active:focus{text-shadow:0 -1px 0 #3071a9;background-image:-webkit-linear-gradient(top,#428bca 0,#3278b3 100%);background-image:linear-gradient(to bottom,#428bca 0,#3278b3 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff3278b3', GradientType=0);border-color:#3278b3}.panel{-webkit-box-shadow:0 1px 2px rgba(0,0,0,.05);box-shadow:0 1px 2px rgba(0,0,0,.05)}.panel-default>.panel-heading{background-image:-webkit-linear-gradient(top,#f5f5f5 0,#e8e8e8 100%);background-image:linear-gradient(to bottom,#f5f5f5 0,#e8e8e8 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5', endColorstr='#ffe8e8e8', GradientType=0)}.panel-primary>.panel-heading{background-image:-webkit-linear-gradient(top,#428bca 0,#357ebd 100%);background-image:linear-gradient(to bottom,#428bca 0,#357ebd 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff428bca', endColorstr='#ff357ebd', GradientType=0)}.panel-success>.panel-heading{background-image:-webkit-linear-gradient(top,#dff0d8 0,#d0e9c6 100%);background-image:linear-gradient(to bottom,#dff0d8 0,#d0e9c6 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffdff0d8', endColorstr='#ffd0e9c6', GradientType=0)}.panel-info>.panel-heading{background-image:-webkit-linear-gradient(top,#d9edf7 0,#c4e3f3 100%);background-image:linear-gradient(to bottom,#d9edf7 0,#c4e3f3 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffd9edf7', endColorstr='#ffc4e3f3', GradientType=0)}.panel-warning>.panel-heading{background-image:-webkit-linear-gradient(top,#fcf8e3 0,#faf2cc 100%);background-image:linear-gradient(to bottom,#fcf8e3 0,#faf2cc 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffcf8e3', endColorstr='#fffaf2cc', GradientType=0)}.panel-danger>.panel-heading{background-image:-webkit-linear-gradient(top,#f2dede 0,#ebcccc 100%);background-image:linear-gradient(to bottom,#f2dede 0,#ebcccc 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2dede', endColorstr='#ffebcccc', GradientType=0)}.well{background-image:-webkit-linear-gradient(top,#e8e8e8 0,#f5f5f5 100%);background-image:linear-gradient(to bottom,#e8e8e8 0,#f5f5f5 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffe8e8e8', endColorstr='#fff5f5f5', GradientType=0);border-color:#dcdcdc;-webkit-box-shadow:inset 0 1px 3px rgba(0,0,0,.05),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 3px rgba(0,0,0,.05),0 1px 0 rgba(255,255,255,.1)} \ No newline at end of file diff --git a/qiita_pet/static/vendor/css/bootstrap.min.css b/qiita_pet/static/vendor/css/bootstrap.min.css new file mode 100644 index 000000000..679272d25 --- /dev/null +++ b/qiita_pet/static/vendor/css/bootstrap.min.css @@ -0,0 +1,7 @@ +/*! + * Bootstrap v3.1.1 (http://getbootstrap.com) + * Copyright 2011-2014 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + */ + +/*! normalize.css v3.0.0 | MIT License | git.io/normalize */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background:0 0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{font-size:2em;margin:.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{-moz-box-sizing:content-box;box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type=checkbox],input[type=radio]{box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid silver;margin:0 2px;padding:.35em .625em .75em}legend{border:0;padding:0}textarea{overflow:auto}optgroup{font-weight:700}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}@media print{*{text-shadow:none!important;color:#000!important;background:transparent!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}select{background:#fff!important}.navbar{display:none}.table td,.table th{background-color:#fff!important}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px solid #000}.table{border-collapse:collapse!important}.table-bordered th,.table-bordered td{border:1px solid #ddd!important}}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}:before,:after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:62.5%;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333;background-color:#fff}input,button,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#428bca;text-decoration:none}a:hover,a:focus{color:#2a6496;text-decoration:underline}a:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.img-responsive,.thumbnail>img,.thumbnail a>img,.carousel-inner>.item>img,.carousel-inner>.item>a>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;transition:all .2s ease-in-out;display:inline-block;max-width:100%;height:auto}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0,0,0,0);border:0}h1,h2,h3,h4,h5,h6,.h1,.h2,.h3,.h4,.h5,.h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small,.h1 small,.h2 small,.h3 small,.h4 small,.h5 small,.h6 small,h1 .small,h2 .small,h3 .small,h4 .small,h5 .small,h6 .small,.h1 .small,.h2 .small,.h3 .small,.h4 .small,.h5 .small,.h6 .small{font-weight:400;line-height:1;color:#999}h1,.h1,h2,.h2,h3,.h3{margin-top:20px;margin-bottom:10px}h1 small,.h1 small,h2 small,.h2 small,h3 small,.h3 small,h1 .small,.h1 .small,h2 .small,.h2 .small,h3 .small,.h3 .small{font-size:65%}h4,.h4,h5,.h5,h6,.h6{margin-top:10px;margin-bottom:10px}h4 small,.h4 small,h5 small,.h5 small,h6 small,.h6 small,h4 .small,.h4 .small,h5 .small,.h5 .small,h6 .small,.h6 .small{font-size:75%}h1,.h1{font-size:36px}h2,.h2{font-size:30px}h3,.h3{font-size:24px}h4,.h4{font-size:18px}h5,.h5{font-size:14px}h6,.h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:200;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}small,.small{font-size:85%}cite{font-style:normal}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-muted{color:#999}.text-primary{color:#428bca}a.text-primary:hover{color:#3071a9}.text-success{color:#3c763d}a.text-success:hover{color:#2b542c}.text-info{color:#31708f}a.text-info:hover{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:hover{color:#66512c}.text-danger{color:#a94442}a.text-danger:hover{color:#843534}.bg-primary{color:#fff;background-color:#428bca}a.bg-primary:hover{background-color:#3071a9}.bg-success{background-color:#dff0d8}a.bg-success:hover{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:hover{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:hover{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:hover{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ul,ol{margin-top:0;margin-bottom:10px}ul ul,ol ul,ul ol,ol ol{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none;margin-left:-5px}.list-inline>li{display:inline-block;padding-left:5px;padding-right:5px}dl{margin-top:0;margin-bottom:20px}dt,dd{line-height:1.42857143}dt{font-weight:700}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;clear:left;text-align:right;overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[title],abbr[data-original-title]{cursor:help;border-bottom:1px dotted #999}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote p:last-child,blockquote ul:last-child,blockquote ol:last-child{margin-bottom:0}blockquote footer,blockquote small,blockquote .small{display:block;font-size:80%;line-height:1.42857143;color:#999}blockquote footer:before,blockquote small:before,blockquote .small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0;text-align:right}.blockquote-reverse footer:before,blockquote.pull-right footer:before,.blockquote-reverse small:before,blockquote.pull-right small:before,.blockquote-reverse .small:before,blockquote.pull-right .small:before{content:''}.blockquote-reverse footer:after,blockquote.pull-right footer:after,.blockquote-reverse small:after,blockquote.pull-right small:after,.blockquote-reverse .small:after,blockquote.pull-right .small:after{content:'\00A0 \2014'}blockquote:before,blockquote:after{content:""}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;white-space:nowrap;border-radius:4px}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:3px;box-shadow:inset 0 -1px 0 rgba(0,0,0,.25)}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;word-break:break-all;word-wrap:break-word;color:#333;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}.row{margin-left:-15px;margin-right:-15px}.col-xs-1,.col-sm-1,.col-md-1,.col-lg-1,.col-xs-2,.col-sm-2,.col-md-2,.col-lg-2,.col-xs-3,.col-sm-3,.col-md-3,.col-lg-3,.col-xs-4,.col-sm-4,.col-md-4,.col-lg-4,.col-xs-5,.col-sm-5,.col-md-5,.col-lg-5,.col-xs-6,.col-sm-6,.col-md-6,.col-lg-6,.col-xs-7,.col-sm-7,.col-md-7,.col-lg-7,.col-xs-8,.col-sm-8,.col-md-8,.col-lg-8,.col-xs-9,.col-sm-9,.col-md-9,.col-lg-9,.col-xs-10,.col-sm-10,.col-md-10,.col-lg-10,.col-xs-11,.col-sm-11,.col-md-11,.col-lg-11,.col-xs-12,.col-sm-12,.col-md-12,.col-lg-12{position:relative;min-height:1px;padding-left:15px;padding-right:15px}.col-xs-1,.col-xs-2,.col-xs-3,.col-xs-4,.col-xs-5,.col-xs-6,.col-xs-7,.col-xs-8,.col-xs-9,.col-xs-10,.col-xs-11,.col-xs-12{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:0}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:0}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-10,.col-sm-11,.col-sm-12{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:0}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:0}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-10,.col-md-11,.col-md-12{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:0}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:0}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-10,.col-lg-11,.col-lg-12{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:0}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:0}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{max-width:100%;background-color:transparent}th{text-align:left}.table{width:100%;margin-bottom:20px}.table>thead>tr>th,.table>tbody>tr>th,.table>tfoot>tr>th,.table>thead>tr>td,.table>tbody>tr>td,.table>tfoot>tr>td{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>th,.table>caption+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>td,.table>thead:first-child>tr:first-child>td{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>thead>tr>th,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>tbody>tr>td,.table-condensed>tfoot>tr>td{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>tbody>tr>td,.table-bordered>tfoot>tr>td{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>thead>tr>td{border-bottom-width:2px}.table-striped>tbody>tr:nth-child(odd)>td,.table-striped>tbody>tr:nth-child(odd)>th{background-color:#f9f9f9}.table-hover>tbody>tr:hover>td,.table-hover>tbody>tr:hover>th{background-color:#f5f5f5}table col[class*=col-]{position:static;float:none;display:table-column}table td[class*=col-],table th[class*=col-]{position:static;float:none;display:table-cell}.table>thead>tr>td.active,.table>tbody>tr>td.active,.table>tfoot>tr>td.active,.table>thead>tr>th.active,.table>tbody>tr>th.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>tbody>tr.active>td,.table>tfoot>tr.active>td,.table>thead>tr.active>th,.table>tbody>tr.active>th,.table>tfoot>tr.active>th{background-color:#f5f5f5}.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover,.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr.active:hover>th{background-color:#e8e8e8}.table>thead>tr>td.success,.table>tbody>tr>td.success,.table>tfoot>tr>td.success,.table>thead>tr>th.success,.table>tbody>tr>th.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>tbody>tr.success>td,.table>tfoot>tr.success>td,.table>thead>tr.success>th,.table>tbody>tr.success>th,.table>tfoot>tr.success>th{background-color:#dff0d8}.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover,.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr.success:hover>th{background-color:#d0e9c6}.table>thead>tr>td.info,.table>tbody>tr>td.info,.table>tfoot>tr>td.info,.table>thead>tr>th.info,.table>tbody>tr>th.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>tbody>tr.info>td,.table>tfoot>tr.info>td,.table>thead>tr.info>th,.table>tbody>tr.info>th,.table>tfoot>tr.info>th{background-color:#d9edf7}.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover,.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr.info:hover>th{background-color:#c4e3f3}.table>thead>tr>td.warning,.table>tbody>tr>td.warning,.table>tfoot>tr>td.warning,.table>thead>tr>th.warning,.table>tbody>tr>th.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>tbody>tr.warning>td,.table>tfoot>tr.warning>td,.table>thead>tr.warning>th,.table>tbody>tr.warning>th,.table>tfoot>tr.warning>th{background-color:#fcf8e3}.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover,.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr.warning:hover>th{background-color:#faf2cc}.table>thead>tr>td.danger,.table>tbody>tr>td.danger,.table>tfoot>tr>td.danger,.table>thead>tr>th.danger,.table>tbody>tr>th.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>tbody>tr.danger>td,.table>tfoot>tr.danger>td,.table>thead>tr.danger>th,.table>tbody>tr.danger>th,.table>tfoot>tr.danger>th{background-color:#f2dede}.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover,.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr.danger:hover>th{background-color:#ebcccc}@media (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;overflow-x:scroll;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd;-webkit-overflow-scrolling:touch}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>thead>tr>th,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tfoot>tr>td{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>thead>tr>th:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.table-responsive>.table-bordered>thead>tr>th:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>th,.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>td{border-bottom:0}}fieldset{padding:0;margin:0;border:0;min-width:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;margin-bottom:5px;font-weight:700}input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=radio],input[type=checkbox]{margin:4px 0 0;margin-top:1px \9;line-height:normal}input[type=file]{display:block}input[type=range]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type=file]:focus,input[type=radio]:focus,input[type=checkbox]:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#555}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#555;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s,box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 8px rgba(102,175,233,.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{cursor:not-allowed;background-color:#eee;opacity:1}textarea.form-control{height:auto}input[type=search]{-webkit-appearance:none}input[type=date]{line-height:34px}.form-group{margin-bottom:15px}.radio,.checkbox{display:block;min-height:20px;margin-top:10px;margin-bottom:10px;padding-left:20px}.radio label,.checkbox label{display:inline;font-weight:400;cursor:pointer}.radio input[type=radio],.radio-inline input[type=radio],.checkbox input[type=checkbox],.checkbox-inline input[type=checkbox]{float:left;margin-left:-20px}.radio+.radio,.checkbox+.checkbox{margin-top:-5px}.radio-inline,.checkbox-inline{display:inline-block;padding-left:20px;margin-bottom:0;vertical-align:middle;font-weight:400;cursor:pointer}.radio-inline+.radio-inline,.checkbox-inline+.checkbox-inline{margin-top:0;margin-left:10px}input[type=radio][disabled],input[type=checkbox][disabled],.radio[disabled],.radio-inline[disabled],.checkbox[disabled],.checkbox-inline[disabled],fieldset[disabled] input[type=radio],fieldset[disabled] input[type=checkbox],fieldset[disabled] .radio,fieldset[disabled] .radio-inline,fieldset[disabled] .checkbox,fieldset[disabled] .checkbox-inline{cursor:not-allowed}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}textarea.input-sm,select[multiple].input-sm{height:auto}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.33;border-radius:6px}select.input-lg{height:46px;line-height:46px}textarea.input-lg,select[multiple].input-lg{height:auto}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.has-feedback .form-control-feedback{position:absolute;top:25px;right:0;display:block;width:34px;height:34px;line-height:34px;text-align:center}.has-success .help-block,.has-success .control-label,.has-success .radio,.has-success .checkbox,.has-success .radio-inline,.has-success .checkbox-inline{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;border-color:#3c763d;background-color:#dff0d8}.has-success .form-control-feedback{color:#3c763d}.has-warning .help-block,.has-warning .control-label,.has-warning .radio,.has-warning .checkbox,.has-warning .radio-inline,.has-warning .checkbox-inline{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;border-color:#8a6d3b;background-color:#fcf8e3}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .help-block,.has-error .control-label,.has-error .radio,.has-error .checkbox,.has-error .radio-inline,.has-error .checkbox-inline{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;border-color:#a94442;background-color:#f2dede}.has-error .form-control-feedback{color:#a94442}.form-control-static{margin-bottom:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .radio,.form-inline .checkbox{display:inline-block;margin-top:0;margin-bottom:0;padding-left:0;vertical-align:middle}.form-inline .radio input[type=radio],.form-inline .checkbox input[type=checkbox]{float:none;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .control-label,.form-horizontal .radio,.form-horizontal .checkbox,.form-horizontal .radio-inline,.form-horizontal .checkbox-inline{margin-top:0;margin-bottom:0;padding-top:7px}.form-horizontal .radio,.form-horizontal .checkbox{min-height:27px}.form-horizontal .form-group{margin-left:-15px;margin-right:-15px}.form-horizontal .form-control-static{padding-top:7px}@media (min-width:768px){.form-horizontal .control-label{text-align:right}}.form-horizontal .has-feedback .form-control-feedback{top:0;right:15px}.btn{display:inline-block;margin-bottom:0;font-weight:400;text-align:center;vertical-align:middle;cursor:pointer;background-image:none;border:1px solid transparent;white-space:nowrap;padding:6px 12px;font-size:14px;line-height:1.42857143;border-radius:4px;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.btn:focus,.btn:active:focus,.btn.active:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn:hover,.btn:focus{color:#333;text-decoration:none}.btn:active,.btn.active{outline:0;background-image:none;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;pointer-events:none;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default:hover,.btn-default:focus,.btn-default:active,.btn-default.active,.open .dropdown-toggle.btn-default{color:#333;background-color:#ebebeb;border-color:#adadad}.btn-default:active,.btn-default.active,.open .dropdown-toggle.btn-default{background-image:none}.btn-default.disabled,.btn-default[disabled],fieldset[disabled] .btn-default,.btn-default.disabled:hover,.btn-default[disabled]:hover,fieldset[disabled] .btn-default:hover,.btn-default.disabled:focus,.btn-default[disabled]:focus,fieldset[disabled] .btn-default:focus,.btn-default.disabled:active,.btn-default[disabled]:active,fieldset[disabled] .btn-default:active,.btn-default.disabled.active,.btn-default[disabled].active,fieldset[disabled] .btn-default.active{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#428bca;border-color:#357ebd}.btn-primary:hover,.btn-primary:focus,.btn-primary:active,.btn-primary.active,.open .dropdown-toggle.btn-primary{color:#fff;background-color:#3276b1;border-color:#285e8e}.btn-primary:active,.btn-primary.active,.open .dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled,.btn-primary[disabled],fieldset[disabled] .btn-primary,.btn-primary.disabled:hover,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary:hover,.btn-primary.disabled:focus,.btn-primary[disabled]:focus,fieldset[disabled] .btn-primary:focus,.btn-primary.disabled:active,.btn-primary[disabled]:active,fieldset[disabled] .btn-primary:active,.btn-primary.disabled.active,.btn-primary[disabled].active,fieldset[disabled] .btn-primary.active{background-color:#428bca;border-color:#357ebd}.btn-primary .badge{color:#428bca;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success:hover,.btn-success:focus,.btn-success:active,.btn-success.active,.open .dropdown-toggle.btn-success{color:#fff;background-color:#47a447;border-color:#398439}.btn-success:active,.btn-success.active,.open .dropdown-toggle.btn-success{background-image:none}.btn-success.disabled,.btn-success[disabled],fieldset[disabled] .btn-success,.btn-success.disabled:hover,.btn-success[disabled]:hover,fieldset[disabled] .btn-success:hover,.btn-success.disabled:focus,.btn-success[disabled]:focus,fieldset[disabled] .btn-success:focus,.btn-success.disabled:active,.btn-success[disabled]:active,fieldset[disabled] .btn-success:active,.btn-success.disabled.active,.btn-success[disabled].active,fieldset[disabled] .btn-success.active{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info:hover,.btn-info:focus,.btn-info:active,.btn-info.active,.open .dropdown-toggle.btn-info{color:#fff;background-color:#39b3d7;border-color:#269abc}.btn-info:active,.btn-info.active,.open .dropdown-toggle.btn-info{background-image:none}.btn-info.disabled,.btn-info[disabled],fieldset[disabled] .btn-info,.btn-info.disabled:hover,.btn-info[disabled]:hover,fieldset[disabled] .btn-info:hover,.btn-info.disabled:focus,.btn-info[disabled]:focus,fieldset[disabled] .btn-info:focus,.btn-info.disabled:active,.btn-info[disabled]:active,fieldset[disabled] .btn-info:active,.btn-info.disabled.active,.btn-info[disabled].active,fieldset[disabled] .btn-info.active{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning:hover,.btn-warning:focus,.btn-warning:active,.btn-warning.active,.open .dropdown-toggle.btn-warning{color:#fff;background-color:#ed9c28;border-color:#d58512}.btn-warning:active,.btn-warning.active,.open .dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled,.btn-warning[disabled],fieldset[disabled] .btn-warning,.btn-warning.disabled:hover,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning:hover,.btn-warning.disabled:focus,.btn-warning[disabled]:focus,fieldset[disabled] .btn-warning:focus,.btn-warning.disabled:active,.btn-warning[disabled]:active,fieldset[disabled] .btn-warning:active,.btn-warning.disabled.active,.btn-warning[disabled].active,fieldset[disabled] .btn-warning.active{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger:hover,.btn-danger:focus,.btn-danger:active,.btn-danger.active,.open .dropdown-toggle.btn-danger{color:#fff;background-color:#d2322d;border-color:#ac2925}.btn-danger:active,.btn-danger.active,.open .dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled,.btn-danger[disabled],fieldset[disabled] .btn-danger,.btn-danger.disabled:hover,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger:hover,.btn-danger.disabled:focus,.btn-danger[disabled]:focus,fieldset[disabled] .btn-danger:focus,.btn-danger.disabled:active,.btn-danger[disabled]:active,fieldset[disabled] .btn-danger:active,.btn-danger.disabled.active,.btn-danger[disabled].active,fieldset[disabled] .btn-danger.active{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{color:#428bca;font-weight:400;cursor:pointer;border-radius:0}.btn-link,.btn-link:active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:hover,.btn-link:focus,.btn-link:active{border-color:transparent}.btn-link:hover,.btn-link:focus{color:#2a6496;text-decoration:underline;background-color:transparent}.btn-link[disabled]:hover,fieldset[disabled] .btn-link:hover,.btn-link[disabled]:focus,fieldset[disabled] .btn-link:focus{color:#999;text-decoration:none}.btn-lg,.btn-group-lg>.btn{padding:10px 16px;font-size:18px;line-height:1.33;border-radius:6px}.btn-sm,.btn-group-sm>.btn{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-xs,.btn-group-xs>.btn{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%;padding-left:0;padding-right:0}.btn-block+.btn-block{margin-top:5px}input[type=submit].btn-block,input[type=reset].btn-block,input[type=button].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition:height .35s ease;transition:height .35s ease}@font-face{font-family:'Glyphicons Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix) format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff) format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular) format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\2a"}.glyphicon-plus:before{content:"\2b"}.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px solid;border-right:4px solid transparent;border-left:4px solid transparent}.dropdown{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;font-size:14px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,.175);box-shadow:0 6px 12px rgba(0,0,0,.175);background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#333;white-space:nowrap}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus{text-decoration:none;color:#262626;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{color:#fff;text-decoration:none;outline:0;background-color:#428bca}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{color:#999}.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{text-decoration:none;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);cursor:not-allowed}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{left:auto;right:0}.dropdown-menu-left{left:0;right:auto}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#999}.dropdown-backdrop{position:fixed;left:0;right:0;bottom:0;top:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px solid;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:1px}@media (min-width:768px){.navbar-right .dropdown-menu{left:auto;right:0}.navbar-right .dropdown-menu-left{left:0;right:auto}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;float:left}.btn-group>.btn:hover,.btn-group-vertical>.btn:hover,.btn-group>.btn:focus,.btn-group-vertical>.btn:focus,.btn-group>.btn:active,.btn-group-vertical>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn.active{z-index:2}.btn-group>.btn:focus,.btn-group-vertical>.btn:focus{outline:0}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child>.btn:last-child,.btn-group>.btn-group:first-child>.dropdown-toggle{border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn-group:last-child>.btn:first-child{border-bottom-left-radius:0;border-top-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-left:8px;padding-right:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-left:12px;padding-right:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-right-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-bottom-left-radius:4px;border-top-right-radius:0;border-top-left-radius:0}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-right-radius:0;border-top-left-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{float:none;display:table-cell;width:1%}.btn-group-justified>.btn-group .btn{width:100%}[data-toggle=buttons]>.btn>input[type=radio],[data-toggle=buttons]>.btn>input[type=checkbox]{display:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*=col-]{float:none;padding-left:0;padding-right:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.33;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn,select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn,select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn{height:auto}.input-group-addon,.input-group-btn,.input-group .form-control{display:table-cell}.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child),.input-group .form-control:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:400;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type=radio],.input-group-addon input[type=checkbox]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group-btn:last-child>.btn-group:not(:last-child)>.btn{border-bottom-right-radius:0;border-top-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:first-child>.btn-group:not(:first-child)>.btn{border-bottom-left-radius:0;border-top-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:hover,.input-group-btn>.btn:focus,.input-group-btn>.btn:active{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{margin-left:-1px}.nav{margin-bottom:0;padding-left:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:hover,.nav>li>a:focus{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#999}.nav>li.disabled>a:hover,.nav>li.disabled>a:focus{color:#999;text-decoration:none;background-color:transparent;cursor:not-allowed}.nav .open>a,.nav .open>a:hover,.nav .open>a:focus{background-color:#eee;border-color:#428bca}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:hover,.nav-tabs>li.active>a:focus{color:#555;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent;cursor:default}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{text-align:center;margin-bottom:5px}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:hover,.nav-pills>li.active>a:focus{color:#fff;background-color:#428bca}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{text-align:center;margin-bottom:5px}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-right-radius:0;border-top-left-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:4px}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{max-height:340px;overflow-x:visible;padding-right:15px;padding-left:15px;border-top:1px solid transparent;box-shadow:inset 0 1px 0 rgba(255,255,255,.1);-webkit-overflow-scrolling:touch}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;box-shadow:none}.navbar-collapse.collapse{display:block!important;height:auto!important;padding-bottom:0;overflow:visible!important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{padding-left:0;padding-right:0}}.container>.navbar-header,.container-fluid>.navbar-header,.container>.navbar-collapse,.container-fluid>.navbar-collapse{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container>.navbar-header,.container-fluid>.navbar-header,.container>.navbar-collapse,.container-fluid>.navbar-collapse{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-top,.navbar-fixed-bottom{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;padding:15px;font-size:18px;line-height:20px;height:50px}.navbar-brand:hover,.navbar-brand:focus{text-decoration:none}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;margin-right:15px;padding:9px 10px;margin-top:8px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;box-shadow:none}.navbar-nav .open .dropdown-menu>li>a,.navbar-nav .open .dropdown-menu .dropdown-header{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:hover,.navbar-nav .open .dropdown-menu>li>a:focus{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}.navbar-nav.navbar-right:last-child{margin-right:-15px}}@media (min-width:768px){.navbar-left{float:left!important}.navbar-right{float:right!important}}.navbar-form{margin-left:-15px;margin-right:-15px;padding:10px 15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);box-shadow:inset 0 1px 0 rgba(255,255,255,.1),0 1px 0 rgba(255,255,255,.1);margin-top:8px;margin-bottom:8px}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .radio,.navbar-form .checkbox{display:inline-block;margin-top:0;margin-bottom:0;padding-left:0;vertical-align:middle}.navbar-form .radio input[type=radio],.navbar-form .checkbox input[type=checkbox]{float:none;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}}@media (min-width:768px){.navbar-form{width:auto;border:0;margin-left:0;margin-right:0;padding-top:0;padding-bottom:0;-webkit-box-shadow:none;box-shadow:none}.navbar-form.navbar-right:last-child{margin-right:-15px}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-right-radius:0;border-top-left-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-left:15px;margin-right:15px}.navbar-text.navbar-right:last-child{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:hover,.navbar-default .navbar-brand:focus{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:hover,.navbar-default .navbar-nav>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:hover,.navbar-default .navbar-nav>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:hover,.navbar-default .navbar-nav>.disabled>a:focus{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:hover,.navbar-default .navbar-toggle:focus{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:hover,.navbar-default .navbar-nav>.open>a:focus{background-color:#e7e7e7;color:#555}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#999}.navbar-inverse .navbar-brand:hover,.navbar-inverse .navbar-brand:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#999}.navbar-inverse .navbar-nav>li>a{color:#999}.navbar-inverse .navbar-nav>li>a:hover,.navbar-inverse .navbar-nav>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:hover,.navbar-inverse .navbar-nav>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:hover,.navbar-inverse .navbar-nav>.disabled>a:focus{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:hover,.navbar-inverse .navbar-toggle:focus{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:hover,.navbar-inverse .navbar-nav>.open>a:focus{background-color:#080808;color:#fff}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#999}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#999}.navbar-inverse .navbar-link:hover{color:#fff}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{content:"/\00a0";padding:0 5px;color:#ccc}.breadcrumb>.active{color:#999}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;line-height:1.42857143;text-decoration:none;color:#428bca;background-color:#fff;border:1px solid #ddd;margin-left:-1px}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-bottom-left-radius:4px;border-top-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-bottom-right-radius:4px;border-top-right-radius:4px}.pagination>li>a:hover,.pagination>li>span:hover,.pagination>li>a:focus,.pagination>li>span:focus{color:#2a6496;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>span,.pagination>.active>a:hover,.pagination>.active>span:hover,.pagination>.active>a:focus,.pagination>.active>span:focus{z-index:2;color:#fff;background-color:#428bca;border-color:#428bca;cursor:default}.pagination>.disabled>span,.pagination>.disabled>span:hover,.pagination>.disabled>span:focus,.pagination>.disabled>a,.pagination>.disabled>a:hover,.pagination>.disabled>a:focus{color:#999;background-color:#fff;border-color:#ddd;cursor:not-allowed}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-bottom-left-radius:6px;border-top-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-bottom-right-radius:6px;border-top-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-bottom-left-radius:3px;border-top-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-bottom-right-radius:3px;border-top-right-radius:3px}.pager{padding-left:0;margin:20px 0;list-style:none;text-align:center}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:hover,.pager li>a:focus{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:hover,.pager .disabled>a:focus,.pager .disabled>span{color:#999;background-color:#fff;cursor:not-allowed}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:700;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}.label[href]:hover,.label[href]:focus{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#999}.label-default[href]:hover,.label-default[href]:focus{background-color:gray}.label-primary{background-color:#428bca}.label-primary[href]:hover,.label-primary[href]:focus{background-color:#3071a9}.label-success{background-color:#5cb85c}.label-success[href]:hover,.label-success[href]:focus{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:hover,.label-info[href]:focus{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:hover,.label-warning[href]:focus{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:hover,.label-danger[href]:focus{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:700;color:#fff;line-height:1;vertical-align:baseline;white-space:nowrap;text-align:center;background-color:#999;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-xs .badge{top:0;padding:1px 5px}a.badge:hover,a.badge:focus{color:#fff;text-decoration:none;cursor:pointer}a.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#428bca;background-color:#fff}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding:30px;margin-bottom:30px;color:inherit;background-color:#eee}.jumbotron h1,.jumbotron .h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.container .jumbotron{border-radius:6px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron{padding-left:60px;padding-right:60px}.jumbotron h1,.jumbotron .h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.thumbnail>img,.thumbnail a>img{margin-left:auto;margin-right:auto}a.thumbnail:hover,a.thumbnail:focus,a.thumbnail.active{border-color:#428bca}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:700}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable{padding-right:35px}.alert-dismissable .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{background-color:#dff0d8;border-color:#d6e9c6;color:#3c763d}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{background-color:#d9edf7;border-color:#bce8f1;color:#31708f}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{background-color:#fcf8e3;border-color:#faebcc;color:#8a6d3b}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{background-color:#f2dede;border-color:#ebccd1;color:#a94442}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{overflow:hidden;height:20px;margin-bottom:20px;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.1);box-shadow:inset 0 1px 2px rgba(0,0,0,.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#428bca;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,.15);-webkit-transition:width .6s ease;transition:width .6s ease}.progress-striped .progress-bar{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-size:40px 40px}.progress.active .progress-bar{-webkit-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent)}.media,.media-body{overflow:hidden;zoom:1}.media,.media .media{margin-top:15px}.media:first-child{margin-top:0}.media-object{display:block}.media-heading{margin:0 0 5px}.media>.pull-left{margin-right:10px}.media>.pull-right{margin-left:10px}.media-list{padding-left:0;list-style:none}.list-group{margin-bottom:20px;padding-left:0}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-right-radius:4px;border-top-left-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}a.list-group-item{color:#555}a.list-group-item .list-group-item-heading{color:#333}a.list-group-item:hover,a.list-group-item:focus{text-decoration:none;background-color:#f5f5f5}a.list-group-item.active,a.list-group-item.active:hover,a.list-group-item.active:focus{z-index:2;color:#fff;background-color:#428bca;border-color:#428bca}a.list-group-item.active .list-group-item-heading,a.list-group-item.active:hover .list-group-item-heading,a.list-group-item.active:focus .list-group-item-heading{color:inherit}a.list-group-item.active .list-group-item-text,a.list-group-item.active:hover .list-group-item-text,a.list-group-item.active:focus .list-group-item-text{color:#e1edf7}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:hover,a.list-group-item-success:focus{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,a.list-group-item-success.active:hover,a.list-group-item-success.active:focus{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:hover,a.list-group-item-info:focus{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,a.list-group-item-info.active:hover,a.list-group-item-info.active:focus{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:hover,a.list-group-item-warning:focus{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,a.list-group-item-warning.active:hover,a.list-group-item-warning.active:focus{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:hover,a.list-group-item-danger:focus{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,a.list-group-item-danger.active:hover,a.list-group-item-danger.active:focus{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,.05);box-shadow:0 1px 1px rgba(0,0,0,.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-right-radius:3px;border-top-left-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group{margin-bottom:0}.panel>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-right-radius:3px;border-top-left-radius:3px}.panel>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.panel>.table,.panel>.table-responsive>.table{margin-bottom:0}.panel>.table:first-child,.panel>.table-responsive:first-child>.table:first-child{border-top-right-radius:3px;border-top-left-radius:3px}.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table:last-child,.panel>.table-responsive:last-child>.table:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child th,.panel>.table>tbody:first-child>tr:first-child td{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{border:0;margin-bottom:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px;overflow:hidden}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse .panel-body{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse .panel-body{border-top-color:#ddd}.panel-default>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#428bca}.panel-primary>.panel-heading{color:#fff;background-color:#428bca;border-color:#428bca}.panel-primary>.panel-heading+.panel-collapse .panel-body{border-top-color:#428bca}.panel-primary>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#428bca}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse .panel-body{border-top-color:#d6e9c6}.panel-success>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse .panel-body{border-top-color:#bce8f1}.panel-info>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse .panel-body{border-top-color:#faebcc}.panel-warning>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse .panel-body{border-top-color:#ebccd1}.panel-danger>.panel-footer+.panel-collapse .panel-body{border-bottom-color:#ebccd1}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.05);box-shadow:inset 0 1px 1px rgba(0,0,0,.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover,.close:focus{color:#000;text-decoration:none;cursor:pointer;opacity:.5;filter:alpha(opacity=50)}button.close{padding:0;cursor:pointer;background:0 0;border:0;-webkit-appearance:none}.modal-open{overflow:hidden}.modal{display:none;overflow:auto;overflow-y:scroll;position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);transform:translate(0,-25%);-webkit-transition:-webkit-transform .3s ease-out;-moz-transition:-moz-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out}.modal.in .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);transform:translate(0,0)}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 3px 9px rgba(0,0,0,.5);box-shadow:0 3px 9px rgba(0,0,0,.5);background-clip:padding-box;outline:0}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0;filter:alpha(opacity=0)}.modal-backdrop.in{opacity:.5;filter:alpha(opacity=50)}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5;min-height:16.42857143px}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:20px}.modal-footer{margin-top:15px;padding:19px 20px 20px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-left:5px;margin-bottom:0}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,.5);box-shadow:0 5px 15px rgba(0,0,0,.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1030;display:block;visibility:visible;font-size:12px;line-height:1.4;opacity:0;filter:alpha(opacity=0)}.tooltip.in{opacity:.9;filter:alpha(opacity=90)}.tooltip.top{margin-top:-3px;padding:5px 0}.tooltip.right{margin-left:3px;padding:0 5px}.tooltip.bottom{margin-top:3px;padding:5px 0}.tooltip.left{margin-left:-3px;padding:0 5px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;text-decoration:none;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{bottom:0;left:5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;right:5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;left:5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;right:5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1010;display:none;max-width:276px;padding:1px;text-align:left;background-color:#fff;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);white-space:normal}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{margin:0;padding:8px 14px;font-size:14px;font-weight:400;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{border-width:10px;content:""}.popover.top>.arrow{left:50%;margin-left:-11px;border-bottom-width:0;border-top-color:#999;border-top-color:rgba(0,0,0,.25);bottom:-11px}.popover.top>.arrow:after{content:" ";bottom:1px;margin-left:-10px;border-bottom-width:0;border-top-color:#fff}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-left-width:0;border-right-color:#999;border-right-color:rgba(0,0,0,.25)}.popover.right>.arrow:after{content:" ";left:1px;bottom:-10px;border-left-width:0;border-right-color:#fff}.popover.bottom>.arrow{left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,.25);top:-11px}.popover.bottom>.arrow:after{content:" ";top:1px;margin-left:-10px;border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,.25)}.popover.left>.arrow:after{content:" ";right:1px;border-right-width:0;border-left-color:#fff;bottom:-10px}.carousel{position:relative}.carousel-inner{position:relative;overflow:hidden;width:100%}.carousel-inner>.item{display:none;position:relative;-webkit-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>img,.carousel-inner>.item>a>img{line-height:1}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;left:0;bottom:0;width:15%;opacity:.5;filter:alpha(opacity=50);font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6)}.carousel-control.left{background-image:-webkit-linear-gradient(left,color-stop(rgba(0,0,0,.5) 0),color-stop(rgba(0,0,0,.0001) 100%));background-image:linear-gradient(to right,rgba(0,0,0,.5) 0,rgba(0,0,0,.0001) 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1)}.carousel-control.right{left:auto;right:0;background-image:-webkit-linear-gradient(left,color-stop(rgba(0,0,0,.0001) 0),color-stop(rgba(0,0,0,.5) 100%));background-image:linear-gradient(to right,rgba(0,0,0,.0001) 0,rgba(0,0,0,.5) 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1)}.carousel-control:hover,.carousel-control:focus{outline:0;color:#fff;text-decoration:none;opacity:.9;filter:alpha(opacity=90)}.carousel-control .icon-prev,.carousel-control .icon-next,.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right{position:absolute;top:50%;z-index:5;display:inline-block}.carousel-control .icon-prev,.carousel-control .glyphicon-chevron-left{left:50%}.carousel-control .icon-next,.carousel-control .glyphicon-chevron-right{right:50%}.carousel-control .icon-prev,.carousel-control .icon-next{width:20px;height:20px;margin-top:-10px;margin-left:-10px;font-family:serif}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;margin-left:-30%;padding-left:0;list-style:none;text-align:center}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;border:1px solid #fff;border-radius:10px;cursor:pointer;background-color:#000 \9;background-color:rgba(0,0,0,0)}.carousel-indicators .active{margin:0;width:12px;height:12px;background-color:#fff}.carousel-caption{position:absolute;left:15%;right:15%;bottom:20px;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-prev,.carousel-control .icon-next{width:30px;height:30px;margin-top:-15px;margin-left:-15px;font-size:30px}.carousel-caption{left:20%;right:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.clearfix:before,.clearfix:after,.container:before,.container:after,.container-fluid:before,.container-fluid:after,.row:before,.row:after,.form-horizontal .form-group:before,.form-horizontal .form-group:after,.btn-toolbar:before,.btn-toolbar:after,.btn-group-vertical>.btn-group:before,.btn-group-vertical>.btn-group:after,.nav:before,.nav:after,.navbar:before,.navbar:after,.navbar-header:before,.navbar-header:after,.navbar-collapse:before,.navbar-collapse:after,.pager:before,.pager:after,.panel-body:before,.panel-body:after,.modal-footer:before,.modal-footer:after{content:" ";display:table}.clearfix:after,.container:after,.container-fluid:after,.row:after,.form-horizontal .form-group:after,.btn-toolbar:after,.btn-group-vertical>.btn-group:after,.nav:after,.navbar:after,.navbar-header:after,.navbar-collapse:after,.pager:after,.panel-body:after,.modal-footer:after{clear:both}.center-block{display:block;margin-left:auto;margin-right:auto}.pull-right{float:right!important}.pull-left{float:left!important}.hide{display:none!important}.show{display:block!important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none!important;visibility:hidden!important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-xs,.visible-sm,.visible-md,.visible-lg{display:none!important}@media (max-width:767px){.visible-xs{display:block!important}table.visible-xs{display:table}tr.visible-xs{display:table-row!important}th.visible-xs,td.visible-xs{display:table-cell!important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block!important}table.visible-sm{display:table}tr.visible-sm{display:table-row!important}th.visible-sm,td.visible-sm{display:table-cell!important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block!important}table.visible-md{display:table}tr.visible-md{display:table-row!important}th.visible-md,td.visible-md{display:table-cell!important}}@media (min-width:1200px){.visible-lg{display:block!important}table.visible-lg{display:table}tr.visible-lg{display:table-row!important}th.visible-lg,td.visible-lg{display:table-cell!important}}@media (max-width:767px){.hidden-xs{display:none!important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none!important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none!important}}@media (min-width:1200px){.hidden-lg{display:none!important}}.visible-print{display:none!important}@media print{.visible-print{display:block!important}table.visible-print{display:table}tr.visible-print{display:table-row!important}th.visible-print,td.visible-print{display:table-cell!important}}@media print{.hidden-print{display:none!important}} \ No newline at end of file diff --git a/qiita_pet/static/vendor/css/jquery.qtip.min.css b/qiita_pet/static/vendor/css/jquery.qtip.min.css new file mode 100644 index 000000000..fc172a464 --- /dev/null +++ b/qiita_pet/static/vendor/css/jquery.qtip.min.css @@ -0,0 +1,2 @@ +/* qTip2 v2.2.0 basic css3 | qtip2.com | Licensed MIT, GPL | Thu Nov 21 2013 20:35:00 */ +.qtip{position:absolute;left:-28000px;top:-28000px;display:none;max-width:280px;min-width:50px;font-size:10.5px;line-height:12px;direction:ltr;box-shadow:none;padding:0}.qtip-content{position:relative;padding:5px 9px;overflow:hidden;text-align:left;word-wrap:break-word}.qtip-titlebar{position:relative;padding:5px 35px 5px 10px;overflow:hidden;border-width:0 0 1px;font-weight:700}.qtip-titlebar+.qtip-content{border-top-width:0!important}.qtip-close{position:absolute;right:-9px;top:-9px;cursor:pointer;outline:medium none;border-width:1px;border-style:solid;border-color:transparent}.qtip-titlebar .qtip-close{right:4px;top:50%;margin-top:-9px}* html .qtip-titlebar .qtip-close{top:16px}.qtip-titlebar .ui-icon,.qtip-icon .ui-icon{display:block;text-indent:-1000em;direction:ltr}.qtip-icon,.qtip-icon .ui-icon{-moz-border-radius:3px;-webkit-border-radius:3px;border-radius:3px;text-decoration:none}.qtip-icon .ui-icon{width:18px;height:14px;line-height:14px;text-align:center;text-indent:0;font:400 bold 10px/13px Tahoma,sans-serif;color:inherit;background:transparent none no-repeat -100em -100em}.qtip-focus{}.qtip-hover{}.qtip-default{border-width:1px;border-style:solid;border-color:#F1D031;background-color:#FFFFA3;color:#555}.qtip-default .qtip-titlebar{background-color:#FFEF93}.qtip-default .qtip-icon{border-color:#CCC;background:#F1F1F1;color:#777}.qtip-default .qtip-titlebar .qtip-close{border-color:#AAA;color:#111} .qtip-light{background-color:#fff;border-color:#E2E2E2;color:#454545}.qtip-light .qtip-titlebar{background-color:#f1f1f1} .qtip-dark{background-color:#505050;border-color:#303030;color:#f3f3f3}.qtip-dark .qtip-titlebar{background-color:#404040}.qtip-dark .qtip-icon{border-color:#444}.qtip-dark .qtip-titlebar .ui-state-hover{border-color:#303030} .qtip-cream{background-color:#FBF7AA;border-color:#F9E98E;color:#A27D35}.qtip-cream .qtip-titlebar{background-color:#F0DE7D}.qtip-cream .qtip-close .qtip-icon{background-position:-82px 0} .qtip-red{background-color:#F78B83;border-color:#D95252;color:#912323}.qtip-red .qtip-titlebar{background-color:#F06D65}.qtip-red .qtip-close .qtip-icon{background-position:-102px 0}.qtip-red .qtip-icon{border-color:#D95252}.qtip-red .qtip-titlebar .ui-state-hover{border-color:#D95252} .qtip-green{background-color:#CAED9E;border-color:#90D93F;color:#3F6219}.qtip-green .qtip-titlebar{background-color:#B0DE78}.qtip-green .qtip-close .qtip-icon{background-position:-42px 0} .qtip-blue{background-color:#E5F6FE;border-color:#ADD9ED;color:#5E99BD}.qtip-blue .qtip-titlebar{background-color:#D0E9F5}.qtip-blue .qtip-close .qtip-icon{background-position:-2px 0}.qtip-shadow{-webkit-box-shadow:1px 1px 3px 1px rgba(0,0,0,.15);-moz-box-shadow:1px 1px 3px 1px rgba(0,0,0,.15);box-shadow:1px 1px 3px 1px rgba(0,0,0,.15)}.qtip-rounded,.qtip-tipsy,.qtip-bootstrap{-moz-border-radius:5px;-webkit-border-radius:5px;border-radius:5px}.qtip-rounded .qtip-titlebar{-moz-border-radius:4px 4px 0 0;-webkit-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.qtip-youtube{-moz-border-radius:2px;-webkit-border-radius:2px;border-radius:2px;-webkit-box-shadow:0 0 3px #333;-moz-box-shadow:0 0 3px #333;box-shadow:0 0 3px #333;color:#fff;border-width:0;background:#4A4A4A;background-image:-webkit-gradient(linear,left top,left bottom,color-stop(0,#4A4A4A),color-stop(100%,#000));background-image:-webkit-linear-gradient(top,#4A4A4A 0,#000 100%);background-image:-moz-linear-gradient(top,#4A4A4A 0,#000 100%);background-image:-ms-linear-gradient(top,#4A4A4A 0,#000 100%);background-image:-o-linear-gradient(top,#4A4A4A 0,#000 100%)}.qtip-youtube .qtip-titlebar{background-color:#4A4A4A;background-color:rgba(0,0,0,0)}.qtip-youtube .qtip-content{padding:.75em;font:12px arial,sans-serif;filter:progid:DXImageTransform.Microsoft.Gradient(GradientType=0, StartColorStr=#4a4a4a, EndColorStr=#000000);-ms-filter:"progid:DXImageTransform.Microsoft.Gradient(GradientType=0, StartColorStr=#4a4a4a, EndColorStr=#000000);"}.qtip-youtube .qtip-icon{border-color:#222}.qtip-youtube .qtip-titlebar .ui-state-hover{border-color:#303030}.qtip-jtools{background:#232323;background:rgba(0,0,0,.7);background-image:-webkit-gradient(linear,left top,left bottom,from(#717171),to(#232323));background-image:-moz-linear-gradient(top,#717171,#232323);background-image:-webkit-linear-gradient(top,#717171,#232323);background-image:-ms-linear-gradient(top,#717171,#232323);background-image:-o-linear-gradient(top,#717171,#232323);border:2px solid #ddd;border:2px solid rgba(241,241,241,1);-moz-border-radius:2px;-webkit-border-radius:2px;border-radius:2px;-webkit-box-shadow:0 0 12px #333;-moz-box-shadow:0 0 12px #333;box-shadow:0 0 12px #333}.qtip-jtools .qtip-titlebar{background-color:transparent;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr=#717171, endColorstr=#4A4A4A);-ms-filter:"progid:DXImageTransform.Microsoft.gradient(startColorstr=#717171, endColorstr=#4A4A4A)"}.qtip-jtools .qtip-content{filter:progid:DXImageTransform.Microsoft.gradient(startColorstr=#4A4A4A, endColorstr=#232323);-ms-filter:"progid:DXImageTransform.Microsoft.gradient(startColorstr=#4A4A4A, endColorstr=#232323)"}.qtip-jtools .qtip-titlebar,.qtip-jtools .qtip-content{background:transparent;color:#fff;border:0 dashed transparent}.qtip-jtools .qtip-icon{border-color:#555}.qtip-jtools .qtip-titlebar .ui-state-hover{border-color:#333}.qtip-cluetip{-webkit-box-shadow:4px 4px 5px rgba(0,0,0,.4);-moz-box-shadow:4px 4px 5px rgba(0,0,0,.4);box-shadow:4px 4px 5px rgba(0,0,0,.4);background-color:#D9D9C2;color:#111;border:0 dashed transparent}.qtip-cluetip .qtip-titlebar{background-color:#87876A;color:#fff;border:0 dashed transparent}.qtip-cluetip .qtip-icon{border-color:#808064}.qtip-cluetip .qtip-titlebar .ui-state-hover{border-color:#696952;color:#696952}.qtip-tipsy{background:#000;background:rgba(0,0,0,.87);color:#fff;border:0 solid transparent;font-size:11px;font-family:'Lucida Grande',sans-serif;font-weight:700;line-height:16px;text-shadow:0 1px #000}.qtip-tipsy .qtip-titlebar{padding:6px 35px 0 10px;background-color:transparent}.qtip-tipsy .qtip-content{padding:6px 10px}.qtip-tipsy .qtip-icon{border-color:#222;text-shadow:none}.qtip-tipsy .qtip-titlebar .ui-state-hover{border-color:#303030}.qtip-tipped{border:3px solid #959FA9;-moz-border-radius:3px;-webkit-border-radius:3px;border-radius:3px;background-color:#F9F9F9;color:#454545;font-weight:400;font-family:serif}.qtip-tipped .qtip-titlebar{border-bottom-width:0;color:#fff;background:#3A79B8;background-image:-webkit-gradient(linear,left top,left bottom,from(#3A79B8),to(#2E629D));background-image:-webkit-linear-gradient(top,#3A79B8,#2E629D);background-image:-moz-linear-gradient(top,#3A79B8,#2E629D);background-image:-ms-linear-gradient(top,#3A79B8,#2E629D);background-image:-o-linear-gradient(top,#3A79B8,#2E629D);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr=#3A79B8, endColorstr=#2E629D);-ms-filter:"progid:DXImageTransform.Microsoft.gradient(startColorstr=#3A79B8, endColorstr=#2E629D)"}.qtip-tipped .qtip-icon{border:2px solid #285589;background:#285589}.qtip-tipped .qtip-icon .ui-icon{background-color:#FBFBFB;color:#555}.qtip-bootstrap{font-size:14px;line-height:20px;color:#333;padding:1px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.qtip-bootstrap .qtip-titlebar{padding:8px 14px;margin:0;font-size:14px;font-weight:400;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;-webkit-border-radius:5px 5px 0 0;-moz-border-radius:5px 5px 0 0;border-radius:5px 5px 0 0}.qtip-bootstrap .qtip-titlebar .qtip-close{right:11px;top:45%;border-style:none}.qtip-bootstrap .qtip-content{padding:9px 14px}.qtip-bootstrap .qtip-icon{background:transparent}.qtip-bootstrap .qtip-icon .ui-icon{width:auto;height:auto;float:right;font-size:20px;font-weight:700;line-height:18px;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.qtip-bootstrap .qtip-icon .ui-icon:hover{color:#000;text-decoration:none;cursor:pointer;opacity:.4;filter:alpha(opacity=40)}.qtip:not(.ie9haxors) div.qtip-content,.qtip:not(.ie9haxors) div.qtip-titlebar{filter:none;-ms-filter:none}.qtip .qtip-tip{margin:0 auto;overflow:hidden;z-index:10}x:-o-prefocus,.qtip .qtip-tip{visibility:hidden}.qtip .qtip-tip,.qtip .qtip-tip .qtip-vml,.qtip .qtip-tip canvas{position:absolute;color:#123456;background:transparent;border:0 dashed transparent}.qtip .qtip-tip canvas{top:0;left:0}.qtip .qtip-tip .qtip-vml{behavior:url(#default#VML);display:inline-block;visibility:visible}#qtip-overlay{position:fixed;left:0;top:0;width:100%;height:100%}#qtip-overlay.blurs{cursor:pointer}#qtip-overlay div{position:absolute;left:0;top:0;width:100%;height:100%;background-color:#000;opacity:.7;filter:alpha(opacity=70);-ms-filter:"alpha(Opacity=70)"}.qtipmodal-ie6fix{position:absolute!important} \ No newline at end of file diff --git a/qiita_pet/static/vendor/js/bootstrap.min.js b/qiita_pet/static/vendor/js/bootstrap.min.js new file mode 100644 index 000000000..b04a0e82f --- /dev/null +++ b/qiita_pet/static/vendor/js/bootstrap.min.js @@ -0,0 +1,6 @@ +/*! + * Bootstrap v3.1.1 (http://getbootstrap.com) + * Copyright 2011-2014 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + */ +if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one(a.support.transition.end,function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b()})}(jQuery),+function(a){"use strict";var b='[data-dismiss="alert"]',c=function(c){a(c).on("click",b,this.close)};c.prototype.close=function(b){function c(){f.trigger("closed.bs.alert").remove()}var d=a(this),e=d.attr("data-target");e||(e=d.attr("href"),e=e&&e.replace(/.*(?=#[^\s]*$)/,""));var f=a(e);b&&b.preventDefault(),f.length||(f=d.hasClass("alert")?d:d.parent()),f.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one(a.support.transition.end,c).emulateTransitionEnd(150):c())};var d=a.fn.alert;a.fn.alert=function(b){return this.each(function(){var d=a(this),e=d.data("bs.alert");e||d.data("bs.alert",e=new c(this)),"string"==typeof b&&e[b].call(d)})},a.fn.alert.Constructor=c,a.fn.alert.noConflict=function(){return a.fn.alert=d,this},a(document).on("click.bs.alert.data-api",b,c.prototype.close)}(jQuery),+function(a){"use strict";var b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d),this.isLoading=!1};b.DEFAULTS={loadingText:"loading..."},b.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",f.resetText||d.data("resetText",d[e]()),d[e](f[b]||this.options[b]),setTimeout(a.proxy(function(){"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c))},this),0)},b.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")&&(c.prop("checked")&&this.$element.hasClass("active")?a=!1:b.find(".active").removeClass("active")),a&&c.prop("checked",!this.$element.hasClass("active")).trigger("change")}a&&this.$element.toggleClass("active")};var c=a.fn.button;a.fn.button=function(c){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof c&&c;e||d.data("bs.button",e=new b(this,f)),"toggle"==c?e.toggle():c&&e.setState(c)})},a.fn.button.Constructor=b,a.fn.button.noConflict=function(){return a.fn.button=c,this},a(document).on("click.bs.button.data-api","[data-toggle^=button]",function(b){var c=a(b.target);c.hasClass("btn")||(c=c.closest(".btn")),c.button("toggle"),b.preventDefault()})}(jQuery),+function(a){"use strict";var b=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=this.sliding=this.interval=this.$active=this.$items=null,"hover"==this.options.pause&&this.$element.on("mouseenter",a.proxy(this.pause,this)).on("mouseleave",a.proxy(this.cycle,this))};b.DEFAULTS={interval:5e3,pause:"hover",wrap:!0},b.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},b.prototype.getActiveIndex=function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},b.prototype.to=function(b){var c=this,d=this.getActiveIndex();return b>this.$items.length-1||0>b?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){c.to(b)}):d==b?this.pause().cycle():this.slide(b>d?"next":"prev",a(this.$items[b]))},b.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},b.prototype.next=function(){return this.sliding?void 0:this.slide("next")},b.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},b.prototype.slide=function(b,c){var d=this.$element.find(".item.active"),e=c||d[b](),f=this.interval,g="next"==b?"left":"right",h="next"==b?"first":"last",i=this;if(!e.length){if(!this.options.wrap)return;e=this.$element.find(".item")[h]()}if(e.hasClass("active"))return this.sliding=!1;var j=a.Event("slide.bs.carousel",{relatedTarget:e[0],direction:g});return this.$element.trigger(j),j.isDefaultPrevented()?void 0:(this.sliding=!0,f&&this.pause(),this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid.bs.carousel",function(){var b=a(i.$indicators.children()[i.getActiveIndex()]);b&&b.addClass("active")})),a.support.transition&&this.$element.hasClass("slide")?(e.addClass(b),e[0].offsetWidth,d.addClass(g),e.addClass(g),d.one(a.support.transition.end,function(){e.removeClass([b,g].join(" ")).addClass("active"),d.removeClass(["active",g].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger("slid.bs.carousel")},0)}).emulateTransitionEnd(1e3*d.css("transition-duration").slice(0,-1))):(d.removeClass("active"),e.addClass("active"),this.sliding=!1,this.$element.trigger("slid.bs.carousel")),f&&this.cycle(),this)};var c=a.fn.carousel;a.fn.carousel=function(c){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},b.DEFAULTS,d.data(),"object"==typeof c&&c),g="string"==typeof c?c:f.slide;e||d.data("bs.carousel",e=new b(this,f)),"number"==typeof c?e.to(c):g?e[g]():f.interval&&e.pause().cycle()})},a.fn.carousel.Constructor=b,a.fn.carousel.noConflict=function(){return a.fn.carousel=c,this},a(document).on("click.bs.carousel.data-api","[data-slide], [data-slide-to]",function(b){var c,d=a(this),e=a(d.attr("data-target")||(c=d.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"")),f=a.extend({},e.data(),d.data()),g=d.attr("data-slide-to");g&&(f.interval=!1),e.carousel(f),(g=d.attr("data-slide-to"))&&e.data("bs.carousel").to(g),b.preventDefault()}),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var b=a(this);b.carousel(b.data())})})}(jQuery),+function(a){"use strict";var b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d),this.transitioning=null,this.options.parent&&(this.$parent=a(this.options.parent)),this.options.toggle&&this.toggle()};b.DEFAULTS={toggle:!0},b.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},b.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b=a.Event("show.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.$parent&&this.$parent.find("> .panel > .in");if(c&&c.length){var d=c.data("bs.collapse");if(d&&d.transitioning)return;c.collapse("hide"),d||c.data("bs.collapse",null)}var e=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[e](0),this.transitioning=1;var f=function(){this.$element.removeClass("collapsing").addClass("collapse in")[e]("auto"),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return f.call(this);var g=a.camelCase(["scroll",e].join("-"));this.$element.one(a.support.transition.end,a.proxy(f,this)).emulateTransitionEnd(350)[e](this.$element[0][g])}}},b.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse").removeClass("in"),this.transitioning=1;var d=function(){this.transitioning=0,this.$element.trigger("hidden.bs.collapse").removeClass("collapsing").addClass("collapse")};return a.support.transition?void this.$element[c](0).one(a.support.transition.end,a.proxy(d,this)).emulateTransitionEnd(350):d.call(this)}}},b.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()};var c=a.fn.collapse;a.fn.collapse=function(c){return this.each(function(){var d=a(this),e=d.data("bs.collapse"),f=a.extend({},b.DEFAULTS,d.data(),"object"==typeof c&&c);!e&&f.toggle&&"show"==c&&(c=!c),e||d.data("bs.collapse",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.collapse.Constructor=b,a.fn.collapse.noConflict=function(){return a.fn.collapse=c,this},a(document).on("click.bs.collapse.data-api","[data-toggle=collapse]",function(b){var c,d=a(this),e=d.attr("data-target")||b.preventDefault()||(c=d.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,""),f=a(e),g=f.data("bs.collapse"),h=g?"toggle":d.data(),i=d.attr("data-parent"),j=i&&a(i);g&&g.transitioning||(j&&j.find('[data-toggle=collapse][data-parent="'+i+'"]').not(d).addClass("collapsed"),d[f.hasClass("in")?"addClass":"removeClass"]("collapsed")),f.collapse(h)})}(jQuery),+function(a){"use strict";function b(b){a(d).remove(),a(e).each(function(){var d=c(a(this)),e={relatedTarget:this};d.hasClass("open")&&(d.trigger(b=a.Event("hide.bs.dropdown",e)),b.isDefaultPrevented()||d.removeClass("open").trigger("hidden.bs.dropdown",e))})}function c(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}var d=".dropdown-backdrop",e="[data-toggle=dropdown]",f=function(b){a(b).on("click.bs.dropdown",this.toggle)};f.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=c(e),g=f.hasClass("open");if(b(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(''}),b.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),b.prototype.constructor=b,b.prototype.getDefaults=function(){return b.DEFAULTS},b.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content")[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},b.prototype.hasContent=function(){return this.getTitle()||this.getContent()},b.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},b.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")},b.prototype.tip=function(){return this.$tip||(this.$tip=a(this.options.template)),this.$tip};var c=a.fn.popover;a.fn.popover=function(c){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof c&&c;(e||"destroy"!=c)&&(e||d.data("bs.popover",e=new b(this,f)),"string"==typeof c&&e[c]())})},a.fn.popover.Constructor=b,a.fn.popover.noConflict=function(){return a.fn.popover=c,this}}(jQuery),+function(a){"use strict";function b(c,d){var e,f=a.proxy(this.process,this);this.$element=a(a(c).is("body")?window:c),this.$body=a("body"),this.$scrollElement=this.$element.on("scroll.bs.scroll-spy.data-api",f),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||(e=a(c).attr("href"))&&e.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.offsets=a([]),this.targets=a([]),this.activeTarget=null,this.refresh(),this.process()}b.DEFAULTS={offset:10},b.prototype.refresh=function(){var b=this.$element[0]==window?"offset":"position";this.offsets=a([]),this.targets=a([]);{var c=this;this.$body.find(this.selector).map(function(){var d=a(this),e=d.data("target")||d.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[b]().top+(!a.isWindow(c.$scrollElement.get(0))&&c.$scrollElement.scrollTop()),e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){c.offsets.push(this[0]),c.targets.push(this[1])})}},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,d=c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(b>=d)return g!=(a=f.last()[0])&&this.activate(a);if(g&&b<=e[0])return g!=(a=f[0])&&this.activate(a);for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(!e[a+1]||b<=e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){this.activeTarget=b,a(this.selector).parentsUntil(this.options.target,".active").removeClass("active");var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),d.trigger("activate.bs.scrollspy")};var c=a.fn.scrollspy;a.fn.scrollspy=function(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=c,this},a(window).on("load",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);b.scrollspy(b.data())})})}(jQuery),+function(a){"use strict";var b=function(b){this.element=a(b)};b.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a")[0],f=a.Event("show.bs.tab",{relatedTarget:e});if(b.trigger(f),!f.isDefaultPrevented()){var g=a(d);this.activate(b.parent("li"),c),this.activate(g,g.parent(),function(){b.trigger({type:"shown.bs.tab",relatedTarget:e})})}}},b.prototype.activate=function(b,c,d){function e(){f.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),b.addClass("active"),g?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu")&&b.closest("li.dropdown").addClass("active"),d&&d()}var f=c.find("> .active"),g=d&&a.support.transition&&f.hasClass("fade");g?f.one(a.support.transition.end,e).emulateTransitionEnd(150):e(),f.removeClass("in")};var c=a.fn.tab;a.fn.tab=function(c){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new b(this)),"string"==typeof c&&e[c]()})},a.fn.tab.Constructor=b,a.fn.tab.noConflict=function(){return a.fn.tab=c,this},a(document).on("click.bs.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(b){b.preventDefault(),a(this).tab("show")})}(jQuery),+function(a){"use strict";var b=function(c,d){this.options=a.extend({},b.DEFAULTS,d),this.$window=a(window).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(c),this.affixed=this.unpin=this.pinnedOffset=null,this.checkPosition()};b.RESET="affix affix-top affix-bottom",b.DEFAULTS={offset:0},b.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(b.RESET).addClass("affix");var a=this.$window.scrollTop(),c=this.$element.offset();return this.pinnedOffset=c.top-a},b.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},b.prototype.checkPosition=function(){if(this.$element.is(":visible")){var c=a(document).height(),d=this.$window.scrollTop(),e=this.$element.offset(),f=this.options.offset,g=f.top,h=f.bottom;"top"==this.affixed&&(e.top+=d),"object"!=typeof f&&(h=g=f),"function"==typeof g&&(g=f.top(this.$element)),"function"==typeof h&&(h=f.bottom(this.$element));var i=null!=this.unpin&&d+this.unpin<=e.top?!1:null!=h&&e.top+this.$element.height()>=c-h?"bottom":null!=g&&g>=d?"top":!1;if(this.affixed!==i){this.unpin&&this.$element.css("top","");var j="affix"+(i?"-"+i:""),k=a.Event(j+".bs.affix");this.$element.trigger(k),k.isDefaultPrevented()||(this.affixed=i,this.unpin="bottom"==i?this.getPinnedOffset():null,this.$element.removeClass(b.RESET).addClass(j).trigger(a.Event(j.replace("affix","affixed"))),"bottom"==i&&this.$element.offset({top:c-h-this.$element.height()}))}}};var c=a.fn.affix;a.fn.affix=function(c){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof c&&c;e||d.data("bs.affix",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.affix.Constructor=b,a.fn.affix.noConflict=function(){return a.fn.affix=c,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var b=a(this),c=b.data();c.offset=c.offset||{},c.offsetBottom&&(c.offset.bottom=c.offsetBottom),c.offsetTop&&(c.offset.top=c.offsetTop),b.affix(c)})})}(jQuery); \ No newline at end of file diff --git a/qiita_pet/static/vendor/js/dropdown.min.js b/qiita_pet/static/vendor/js/dropdown.min.js new file mode 100644 index 000000000..4de512fde --- /dev/null +++ b/qiita_pet/static/vendor/js/dropdown.min.js @@ -0,0 +1,8 @@ +/* ======================================================================== + * Bootstrap: dropdown.js v3.1.1 + * http://getbootstrap.com/javascript/#dropdowns + * ======================================================================== + * Copyright 2011-2014 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ ++function(e){"use strict";function i(r){e(t).remove();e(n).each(function(){var t=s(e(this));var n={relatedTarget:this};if(!t.hasClass("open"))return;t.trigger(r=e.Event("hide.bs.dropdown",n));if(r.isDefaultPrevented())return;t.removeClass("open").trigger("hidden.bs.dropdown",n)})}function s(t){var n=t.attr("data-target");if(!n){n=t.attr("href");n=n&&/#[A-Za-z]/.test(n)&&n.replace(/.*(?=#[^\s]*$)/,"")}var r=n&&e(n);return r&&r.length?r:t.parent()}var t=".dropdown-backdrop";var n="[data-toggle=dropdown]";var r=function(t){e(t).on("click.bs.dropdown",this.toggle)};r.prototype.toggle=function(t){var n=e(this);if(n.is(".disabled, :disabled"))return;var r=s(n);var o=r.hasClass("open");i();if(!o){if("ontouchstart"in document.documentElement&&!r.closest(".navbar-nav").length){e('