From 0474ebf81cf5a2fe169e190d14d4b18f9ebdf4d1 Mon Sep 17 00:00:00 2001 From: Javier de la Rosa Date: Thu, 15 Mar 2018 23:07:54 -0700 Subject: [PATCH] Initial commit for the new Histonets web interface :fire: --- .coveragerc | 5 + .dockerignore | 4 + .editorconfig | 33 + .envs/.local/.celery | 5 + .envs/.local/.django | 3 + .envs/.local/.postgres | 5 + .gitattributes | 1 + .gitignore | 297 ++++++++ .pylintrc | 14 + .travis.yml | 11 + CONTRIBUTORS.txt | 1 + LICENSE | 10 + Procfile | 2 + README.rst | 131 ++++ compose/local/cantaloupe/Dockerfile | 47 ++ .../local/cantaloupe/cantaloupe.properties | 666 ++++++++++++++++++ compose/local/django/Dockerfile | 54 ++ compose/local/django/celery/beat/start.sh | 10 + compose/local/django/celery/worker/start.sh | 9 + compose/local/django/start.sh | 10 + compose/production/caddy/Caddyfile | 14 + compose/production/caddy/Dockerfile | 3 + compose/production/cantaloupe/Dockerfile | 48 ++ .../cantaloupe/cantaloupe.properties.example | 648 +++++++++++++++++ compose/production/django/Dockerfile | 63 ++ .../production/django/celery/beat/start.sh | 8 + .../production/django/celery/worker/start.sh | 8 + compose/production/django/entrypoint.sh | 40 ++ compose/production/django/gunicorn.sh | 6 + compose/production/postgres/Dockerfile | 10 + compose/production/postgres/backup.sh | 25 + compose/production/postgres/list-backups.sh | 10 + compose/production/postgres/restore.sh | 58 ++ config/__init__.py | 0 config/settings/__init__.py | 0 config/settings/base.py | 258 +++++++ config/settings/local.py | 68 ++ config/settings/production.py | 201 ++++++ config/settings/test.py | 58 ++ config/urls.py | 37 + config/wsgi.py | 42 ++ dev.sh | 3 + docs/Makefile | 153 ++++ docs/__init__.py | 1 + docs/conf.py | 243 +++++++ docs/deploy.rst | 4 + docs/docker_ec2.rst | 186 +++++ docs/index.rst | 26 + docs/install.rst | 4 + docs/make.bat | 190 +++++ histonets/__init__.py | 2 + histonets/contrib/__init__.py | 5 + histonets/contrib/sites/__init__.py | 5 + .../contrib/sites/migrations/0001_initial.py | 31 + .../migrations/0002_alter_domain_unique.py | 20 + .../0003_set_site_domain_and_name.py | 42 ++ .../contrib/sites/migrations/__init__.py | 5 + histonets/static/css/project.css | 13 + histonets/static/fonts/.gitkeep | 0 histonets/static/images/favicon.ico | Bin 0 -> 8348 bytes histonets/static/js/project.js | 21 + .../static/sass/custom_bootstrap_vars.scss | 0 histonets/static/sass/project.scss | 37 + histonets/taskapp/__init__.py | 0 histonets/taskapp/celery.py | 34 + histonets/templates/403_csrf.html | 9 + histonets/templates/404.html | 9 + histonets/templates/500.html | 13 + .../templates/account/account_inactive.html | 12 + histonets/templates/account/base.html | 10 + histonets/templates/account/email.html | 80 +++ .../templates/account/email_confirm.html | 32 + histonets/templates/account/login.html | 48 ++ histonets/templates/account/logout.html | 22 + .../templates/account/password_change.html | 17 + .../templates/account/password_reset.html | 26 + .../account/password_reset_done.html | 17 + .../account/password_reset_from_key.html | 25 + .../account/password_reset_from_key_done.html | 10 + histonets/templates/account/password_set.html | 17 + histonets/templates/account/signup.html | 23 + .../templates/account/signup_closed.html | 12 + .../templates/account/verification_sent.html | 13 + .../account/verified_email_required.html | 24 + histonets/templates/base.html | 112 +++ histonets/templates/pages/about.html | 1 + histonets/templates/pages/home.html | 1 + histonets/templates/users/user_detail.html | 36 + histonets/templates/users/user_form.html | 17 + histonets/templates/users/user_list.html | 17 + histonets/users/__init__.py | 0 histonets/users/adapters.py | 13 + histonets/users/admin.py | 39 + histonets/users/apps.py | 16 + histonets/users/migrations/0001_initial.py | 43 ++ histonets/users/migrations/__init__.py | 0 histonets/users/models.py | 17 + histonets/users/tests/__init__.py | 0 histonets/users/tests/factories.py | 11 + histonets/users/tests/test_admin.py | 40 ++ histonets/users/tests/test_models.py | 19 + histonets/users/tests/test_urls.py | 51 ++ histonets/users/tests/test_views.py | 64 ++ histonets/users/urls.py | 27 + histonets/users/views.py | 44 ++ local.yml | 74 ++ locale/README.rst | 6 + manage.py | 29 + merge_production_dotenvs_in_dotenv.py | 70 ++ production.yml | 72 ++ pytest.ini | 2 + requirements.txt | 3 + requirements/base.txt | 23 + requirements/local.txt | 25 + requirements/production.txt | 12 + run.sh | 2 + runtime.txt | 1 + setup.cfg | 7 + utility/install_os_dependencies.sh | 96 +++ utility/install_python_dependencies.sh | 39 + utility/requirements-jessie.apt | 23 + utility/requirements-stretch.apt | 23 + utility/requirements-trusty.apt | 23 + utility/requirements-xenial.apt | 23 + 124 files changed, 5488 insertions(+) create mode 100644 .coveragerc create mode 100644 .dockerignore create mode 100644 .editorconfig create mode 100644 .envs/.local/.celery create mode 100644 .envs/.local/.django create mode 100644 .envs/.local/.postgres create mode 100644 .gitattributes create mode 100644 .gitignore create mode 100644 .pylintrc create mode 100644 .travis.yml create mode 100644 CONTRIBUTORS.txt create mode 100644 LICENSE create mode 100644 Procfile create mode 100644 README.rst create mode 100644 compose/local/cantaloupe/Dockerfile create mode 100644 compose/local/cantaloupe/cantaloupe.properties create mode 100644 compose/local/django/Dockerfile create mode 100644 compose/local/django/celery/beat/start.sh create mode 100644 compose/local/django/celery/worker/start.sh create mode 100644 compose/local/django/start.sh create mode 100644 compose/production/caddy/Caddyfile create mode 100644 compose/production/caddy/Dockerfile create mode 100644 compose/production/cantaloupe/Dockerfile create mode 100644 compose/production/cantaloupe/cantaloupe.properties.example create mode 100644 compose/production/django/Dockerfile create mode 100644 compose/production/django/celery/beat/start.sh create mode 100644 compose/production/django/celery/worker/start.sh create mode 100644 compose/production/django/entrypoint.sh create mode 100644 compose/production/django/gunicorn.sh create mode 100644 compose/production/postgres/Dockerfile create mode 100644 compose/production/postgres/backup.sh create mode 100644 compose/production/postgres/list-backups.sh create mode 100644 compose/production/postgres/restore.sh create mode 100644 config/__init__.py create mode 100644 config/settings/__init__.py create mode 100644 config/settings/base.py create mode 100644 config/settings/local.py create mode 100644 config/settings/production.py create mode 100644 config/settings/test.py create mode 100644 config/urls.py create mode 100644 config/wsgi.py create mode 100755 dev.sh create mode 100644 docs/Makefile create mode 100644 docs/__init__.py create mode 100644 docs/conf.py create mode 100644 docs/deploy.rst create mode 100644 docs/docker_ec2.rst create mode 100644 docs/index.rst create mode 100644 docs/install.rst create mode 100644 docs/make.bat create mode 100644 histonets/__init__.py create mode 100644 histonets/contrib/__init__.py create mode 100644 histonets/contrib/sites/__init__.py create mode 100644 histonets/contrib/sites/migrations/0001_initial.py create mode 100644 histonets/contrib/sites/migrations/0002_alter_domain_unique.py create mode 100644 histonets/contrib/sites/migrations/0003_set_site_domain_and_name.py create mode 100644 histonets/contrib/sites/migrations/__init__.py create mode 100644 histonets/static/css/project.css create mode 100644 histonets/static/fonts/.gitkeep create mode 100644 histonets/static/images/favicon.ico create mode 100644 histonets/static/js/project.js create mode 100644 histonets/static/sass/custom_bootstrap_vars.scss create mode 100644 histonets/static/sass/project.scss create mode 100644 histonets/taskapp/__init__.py create mode 100644 histonets/taskapp/celery.py create mode 100644 histonets/templates/403_csrf.html create mode 100644 histonets/templates/404.html create mode 100644 histonets/templates/500.html create mode 100644 histonets/templates/account/account_inactive.html create mode 100644 histonets/templates/account/base.html create mode 100644 histonets/templates/account/email.html create mode 100644 histonets/templates/account/email_confirm.html create mode 100644 histonets/templates/account/login.html create mode 100644 histonets/templates/account/logout.html create mode 100644 histonets/templates/account/password_change.html create mode 100644 histonets/templates/account/password_reset.html create mode 100644 histonets/templates/account/password_reset_done.html create mode 100644 histonets/templates/account/password_reset_from_key.html create mode 100644 histonets/templates/account/password_reset_from_key_done.html create mode 100644 histonets/templates/account/password_set.html create mode 100644 histonets/templates/account/signup.html create mode 100644 histonets/templates/account/signup_closed.html create mode 100644 histonets/templates/account/verification_sent.html create mode 100644 histonets/templates/account/verified_email_required.html create mode 100644 histonets/templates/base.html create mode 100644 histonets/templates/pages/about.html create mode 100644 histonets/templates/pages/home.html create mode 100644 histonets/templates/users/user_detail.html create mode 100644 histonets/templates/users/user_form.html create mode 100644 histonets/templates/users/user_list.html create mode 100644 histonets/users/__init__.py create mode 100644 histonets/users/adapters.py create mode 100644 histonets/users/admin.py create mode 100644 histonets/users/apps.py create mode 100644 histonets/users/migrations/0001_initial.py create mode 100644 histonets/users/migrations/__init__.py create mode 100644 histonets/users/models.py create mode 100644 histonets/users/tests/__init__.py create mode 100644 histonets/users/tests/factories.py create mode 100644 histonets/users/tests/test_admin.py create mode 100644 histonets/users/tests/test_models.py create mode 100644 histonets/users/tests/test_urls.py create mode 100644 histonets/users/tests/test_views.py create mode 100644 histonets/users/urls.py create mode 100644 histonets/users/views.py create mode 100644 local.yml create mode 100644 locale/README.rst create mode 100755 manage.py create mode 100644 merge_production_dotenvs_in_dotenv.py create mode 100644 production.yml create mode 100644 pytest.ini create mode 100644 requirements.txt create mode 100644 requirements/base.txt create mode 100644 requirements/local.txt create mode 100644 requirements/production.txt create mode 100755 run.sh create mode 100644 runtime.txt create mode 100644 setup.cfg create mode 100755 utility/install_os_dependencies.sh create mode 100755 utility/install_python_dependencies.sh create mode 100644 utility/requirements-jessie.apt create mode 100644 utility/requirements-stretch.apt create mode 100644 utility/requirements-trusty.apt create mode 100644 utility/requirements-xenial.apt diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..00828bf --- /dev/null +++ b/.coveragerc @@ -0,0 +1,5 @@ +[run] +include = histonets/* +omit = *migrations*, *tests* +plugins = + django_coverage_plugin diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..e63c0c1 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,4 @@ +.* +!.coveragerc +!.env +!.pylintrc diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..74b0ded --- /dev/null +++ b/.editorconfig @@ -0,0 +1,33 @@ +# http://editorconfig.org + +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*.{py,rst,ini}] +indent_style = space +indent_size = 4 + +[*.py] +line_length=120 +known_first_party=histonets +multi_line_output=3 +default_section=THIRDPARTY + +[*.{html,css,scss,json,yml}] +indent_style = space +indent_size = 2 + +[*.md] +trim_trailing_whitespace = false + +[Makefile] +indent_style = tab + +[nginx.conf] +indent_style = space +indent_size = 2 diff --git a/.envs/.local/.celery b/.envs/.local/.celery new file mode 100644 index 0000000..0542bf0 --- /dev/null +++ b/.envs/.local/.celery @@ -0,0 +1,5 @@ +# Celery +# ------------------------------------------------------------------------------ +CELERY_BROKER_URL=redis://redis:6379/0 +POSTGRES_USER=EGXfeCxBUhDOoeEPkHlbfOIfcexAXykZ + diff --git a/.envs/.local/.django b/.envs/.local/.django new file mode 100644 index 0000000..630aa89 --- /dev/null +++ b/.envs/.local/.django @@ -0,0 +1,3 @@ +# General +# ------------------------------------------------------------------------------ +USE_DOCKER=yes diff --git a/.envs/.local/.postgres b/.envs/.local/.postgres new file mode 100644 index 0000000..8ab80c7 --- /dev/null +++ b/.envs/.local/.postgres @@ -0,0 +1,5 @@ +# PostgreSQL +# ------------------------------------------------------------------------------ +POSTGRES_DB=histonets +POSTGRES_USER=EGXfeCxBUhDOoeEPkHlbfOIfcexAXykZ +POSTGRES_PASSWORD=9Xll5fyBrjgIRMjWyrA9ohAggMu4EDl7GlKB3EfVm5MeEoSh11Hq2pKVXbJ3a4CA diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..176a458 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +* text=auto diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..0d2971f --- /dev/null +++ b/.gitignore @@ -0,0 +1,297 @@ +### Python template +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +staticfiles/ +assets/ + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# Environments +.venv +venv/ +ENV/ + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + + +### Node template +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Typescript v1 declaration files +typings/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + + +### Linux template +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + + +### VisualStudioCode template +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json + + + + + +### Windows template +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +Desktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msm +*.msp + +# Windows shortcuts +*.lnk + + +### macOS template +# General +*.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + + +### SublimeText template +# Cache files for Sublime Text +*.tmlanguage.cache +*.tmPreferences.cache +*.stTheme.cache + +# Workspace files are user-specific +*.sublime-workspace + +# Project files should be checked into the repository, unless a significant +# proportion of contributors will probably not be using Sublime Text +# *.sublime-project + +# SFTP configuration file +sftp-config.json + +# Package control specific files +Package Control.last-run +Package Control.ca-list +Package Control.ca-bundle +Package Control.system-ca-bundle +Package Control.cache/ +Package Control.ca-certs/ +Package Control.merged-ca-bundle +Package Control.user-ca-bundle +oscrypto-ca-bundle.crt +bh_unicode_properties.cache + +# Sublime-github package stores a github token in this file +# https://packagecontrol.io/packages/sublime-github +GitHub.sublime-settings + + +### Vim template +# Swap +[._]*.s[a-v][a-z] +[._]*.sw[a-p] +[._]s[a-v][a-z] +[._]sw[a-p] + +# Session +Session.vim + +# Temporary +.netrwhist + +# Auto-generated tag files +tags + + +### VirtualEnv template +# Virtualenv +# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ +[Bb]in +[Ii]nclude +[Ll]ib +[Ll]ib64 +[Ll]ocal +[Ss]cripts +pyvenv.cfg +pip-selfcheck.json + + + + +histonets/media/ +compose/production/cantaloupe/cantaloupe.properties + +# Added to maintain local compose files which are ignored by something above. +# See issue https://github.com/pydanny/cookiecutter-django/issues/1321 +!/compose/local/ + +.env +.envs/* +!.envs/.local/ diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..4bd6910 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,14 @@ +[MASTER] +load-plugins=pylint_common, pylint_django, pylint_celery + +[FORMAT] +max-line-length=120 + +[MESSAGES CONTROL] +disable=missing-docstring,invalid-name + +[DESIGN] +max-parents=13 + +[TYPECHECK] +generated-members=REQUEST,acl_users,aq_parent,"[a-zA-Z]+_set{1,2}",save,delete diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..5ca54d0 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,11 @@ +sudo: true +before_install: + - sudo apt-get update -qq + - sudo apt-get install -qq build-essential gettext python-dev zlib1g-dev libpq-dev xvfb + - sudo apt-get install -qq libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms1-dev libwebp-dev + - sudo apt-get install -qq graphviz-dev python-setuptools python3-dev python-virtualenv python-pip + - sudo apt-get install -qq firefox automake libtool libreadline6 libreadline6-dev libreadline-dev + - sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm +language: python +python: + - "3.6" diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt new file mode 100644 index 0000000..9aeeb6e --- /dev/null +++ b/CONTRIBUTORS.txt @@ -0,0 +1 @@ +Center for Interdisciplinary Digital Research (CIDR) diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..79f7dc1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,10 @@ + +The MIT License (MIT) +Copyright (c) 2018, Center for Interdisciplinary Digital Research (CIDR) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/Procfile b/Procfile new file mode 100644 index 0000000..36f80aa --- /dev/null +++ b/Procfile @@ -0,0 +1,2 @@ +web: gunicorn config.wsgi:application +worker: celery worker --app=histonets.taskapp --loglevel=info diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..97d7815 --- /dev/null +++ b/README.rst @@ -0,0 +1,131 @@ +Histonets +========= + +From scanned map images to graph data + +.. image:: https://img.shields.io/badge/built%20with-Cookiecutter%20Django-ff69b4.svg + :target: https://github.com/pydanny/cookiecutter-django/ + :alt: Built with Cookiecutter Django + + +:License: MIT + + +Settings +-------- + +Moved to settings_. + +.. _settings: http://cookiecutter-django.readthedocs.io/en/latest/settings.html + +Basic Commands +-------------- + +Setting Up Your Users +^^^^^^^^^^^^^^^^^^^^^ + +* To create a **normal user account**, just go to Sign Up and fill out the form. Once you submit it, you'll see a "Verify Your E-mail Address" page. Go to your console to see a simulated email verification message. Copy the link into your browser. Now the user's email should be verified and ready to go. + +* To create an **superuser account**, use this command:: + + $ python manage.py createsuperuser + +For convenience, you can keep your normal user logged in on Chrome and your superuser logged in on Firefox (or similar), so that you can see how the site behaves for both kinds of users. + +Test coverage +^^^^^^^^^^^^^ + +To run the tests, check your test coverage, and generate an HTML coverage report:: + + $ coverage run manage.py test + $ coverage html + $ open htmlcov/index.html + +Running tests with py.test +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:: + + $ py.test + + +Live reloading and Sass CSS compilation +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Moved to `Live reloading and SASS compilation`_. + +.. _`Live reloading and SASS compilation`: http://cookiecutter-django.readthedocs.io/en/latest/live-reloading-and-sass-compilation.html + + + +Celery +^^^^^^ + +This app comes with Celery. + +To run a celery worker: + +.. code-block:: bash + + cd histonets + celery -A histonets.taskapp worker -l info + +Please note: For Celery's import magic to work, it is important *where* the celery commands are run. If you are in the same folder with *manage.py*, you should be right. + + +Email Server +^^^^^^^^^^^^ + +In development, it is often nice to be able to see emails that are being sent from your application. For that reason local SMTP server `MailHog`_ with a web interface is available as docker container. + +Container mailhog will start automatically when you will run all docker containers. +Please check `cookiecutter-django Docker documentation`_ for more details how to start all containers. + +With MailHog running, to view messages that are sent by your application, open your browser and go to ``http://127.0.0.1:8025`` + +.. _mailhog: https://github.com/mailhog/MailHog + + +IIIF Image Server +^^^^^^^^^^^^^^^^^ + +A dockerized version of Cantaloupe is also available and setup in the compose config file. The service +takes a number of properties defined in a `cantaloupe.properties` file and it starts by default at ``http://127.0.0.1:8182`` with the admin interface at ``http://127.0.0.1:8182/admin``. + +Images placed in ``/media/iiif`` will get IIIF by default. For example, the image ``/media/iiif/ghostdriver.jpg`` will have its ``info.json`` descriptor at ``http://localhost:8182/iiif/2/ghostdriver.jpg/info.json``, and the image itself will be available at ``http://localhost:8182/iiif/2/ghostdriver.jpg/full/full/0/default.jpg``. + + +Docker +^^^^^^ + +A docker-compose config file is provided for local development. For convenience, a couple of scripts are include as well: + +* ``run.sh``, that runs a command inside a specific container (e.g., ``./run.sh django flake8``) + +* ``dev.sh``, that starts all servers and builds, recreates, or removes images if necessary. + + +Deployment +---------- + +The following details how to deploy this application. + + +Heroku +^^^^^^ + +See detailed `cookiecutter-django Heroku documentation`_. + +.. _`cookiecutter-django Heroku documentation`: http://cookiecutter-django.readthedocs.io/en/latest/deployment-on-heroku.html + + + +Docker +^^^^^^ + +See detailed `cookiecutter-django Docker documentation`_. + +.. _`cookiecutter-django Docker documentation`: http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html + + + diff --git a/compose/local/cantaloupe/Dockerfile b/compose/local/cantaloupe/Dockerfile new file mode 100644 index 0000000..52c74d2 --- /dev/null +++ b/compose/local/cantaloupe/Dockerfile @@ -0,0 +1,47 @@ +FROM mostalive/ubuntu-14.04-oracle-jdk8 + +# Adapted from: +# https://github.com/pulibrary/cantaloupe-docker +# https://github.com/kaij/cantaloupe +# https://github.com/MITLibraries/docker-cantaloupe + +ENV CANTALOUPE_VERSION 3.4.1 +EXPOSE 8182 + +# Update packages and install tools +RUN apt-get update -y && apt-get install -y wget unzip graphicsmagick curl build-essential cmake + +#Build OpenJPEG +RUN wget -c https://github.com/uclouvain/openjpeg/archive/v2.1.2.tar.gz -O openjpeg-2.1.2.tar.gz \ + && tar -zxvf openjpeg-2.1.2.tar.gz \ + && cd openjpeg-2.1.2 \ + && mkdir -v build \ + && cd build \ + && cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr .. \ + && make \ + && make install + +# run non priviledged +RUN adduser --system cantaloupe + +# +# Cantaloupe +# +WORKDIR /tmp +RUN curl -OL https://github.com/medusa-project/cantaloupe/releases/download/v$CANTALOUPE_VERSION/Cantaloupe-$CANTALOUPE_VERSION.zip \ + && mkdir -p /usr/local/ \ + && cd /usr/local \ + && unzip /tmp/Cantaloupe-$CANTALOUPE_VERSION.zip \ + && ln -s Cantaloupe-$CANTALOUPE_VERSION cantaloupe \ + && rm -rf /tmp/Cantaloupe-$CANTALOUPE_VERSION \ + && rm /tmp/Cantaloupe-$CANTALOUPE_VERSION.zip + +COPY ./compose/local/cantaloupe/cantaloupe.properties /etc/cantaloupe.properties +RUN mkdir -p /var/log/cantaloupe \ + && mkdir -p /var/cache/cantaloupe \ + && chown -R cantaloupe /var/log/cantaloupe \ + && chown -R cantaloupe /var/cache/cantaloupe \ + && chown cantaloupe /etc/cantaloupe.properties + +USER cantaloupe +CMD ["sh", "-c", "java -Dcantaloupe.config=/etc/cantaloupe.properties -Xmx2g -jar /usr/local/cantaloupe/Cantaloupe-$CANTALOUPE_VERSION.war"] diff --git a/compose/local/cantaloupe/cantaloupe.properties b/compose/local/cantaloupe/cantaloupe.properties new file mode 100644 index 0000000..49ae10a --- /dev/null +++ b/compose/local/cantaloupe/cantaloupe.properties @@ -0,0 +1,666 @@ +########################################################################### +# Sample Cantaloupe configuration file +# +# Copy this file to `cantaloupe.properties` and edit as desired. +# +# Keys may change from version to version. See the "Upgrading" section of +# the website. +# +# Most changes will take effect without restarting. Those that won't are +# marked with "!!". +########################################################################### + +# !! Leave blank to use the JVM default temporary directory. +temp_pathname = + +# !! Configures the HTTP server. (Standalone mode only.) +http.enabled = true +http.host = 0.0.0.0 +http.port = 8182 +http.http2.enabled = false + +# !! Configures the HTTPS server. (Standalone mode only.) +https.enabled = false +https.host = 0.0.0.0 +https.port = 8183 +# Secure HTTP/2 requires Java 9 or later. +https.http2.enabled = false + +# !! Available values are `JKS` and `PKCS12`. (Standalone mode only.) +https.key_store_type = JKS +https.key_store_password = myPassword +https.key_store_path = /path/to/keystore.jks +https.key_password = myPassword + +# !! Maximum size of the HTTP(S) request queue. Set to 0 to use the default. +http.accept_queue_limit = 0 + +# Base URI to use for internal links, such as Link headers and JSON-LD +# @id values, in a reverse-proxy context. This should only be used when +# X-Forwarded-* headers cannot be used instead. (See the user manual.) +base_uri = + +# Normally, slashes in a URI path component must be percent-encoded as +# "%2F". If your proxy is not able to pass these through without decoding, +# you can define an alternate character or character sequence to substitute +# for a slash. Supply the non-percent-encoded version here, and use the +# percent-encoded version in URLs. +slash_substitute = + +# Maximum number of pixels to return in a response, to prevent overloading +# the server. Requests for more pixels than this will receive an error +# response. Set to 0 for no maximum. +max_pixels = 400000000 + +# Errors will also be logged to the error log (if enabled). +print_stack_trace_on_error_pages = true + +########################################################################### +# DELEGATE SCRIPT +########################################################################### + +# !! Enables the delegate script: a Ruby script containing various delegate +# methods. (See the user manual.) +delegate_script.enabled = false + +# !! This can be an absolute path, or a filename; if only a filename is +# specified, it will be searched for in the same folder as this file, and +# then the current working directory. +delegate_script.pathname = delegates-3.4.rb + +# Enables the invocation cache, which caches method invocations and return +# values in memory. See the user manual for more information. +delegate_script.cache.enabled = false + +########################################################################### +# ENDPOINTS +########################################################################### + +# !! Configures HTTP Basic authentication in all public endpoints. +endpoint.public.auth.basic.enabled = false +endpoint.public.auth.basic.username = admin +endpoint.public.auth.basic.secret = docker + +endpoint.iiif.1.enabled = true +endpoint.iiif.2.enabled = true + +# Controls the response Content-Disposition header for images. Allowed +# values are `inline`, `attachment`, and `none`. This can be overridden +# using the ?response-content-disposition query argument. +endpoint.iiif.content_disposition = inline + +# Minimum size that will be used in info.json `sizes` keys. +endpoint.iiif.min_size = 64 + +# Minimum size that will be used in info.json `tiles` keys. See the user +# manual to learn how these are calculated. +endpoint.iiif.min_tile_size = 1024 + +# If true, requests for sizes other than those specified in an info.json +# response will be denied. +endpoint.iiif.2.restrict_to_sizes = false + +# Enables the Control Panel, at /admin. +endpoint.admin.enabled = true +endpoint.admin.username = admin +endpoint.admin.secret = docker + +# Enables the administrative HTTP API. (See the user manual.) +endpoint.api.enabled = true + +# HTTP Basic credentials to access the HTTP API. +endpoint.api.username = admin +endpoint.api.secret = docker + +########################################################################### +# RESOLVERS +########################################################################### + +# Specifies one resolver to translate the identifier in the URL to an image +# source for all requests. Available values are `FilesystemResolver`, +# `HttpResolver`, `JdbcResolver`, `AmazonS3Resolver`, and +# `AzureStorageResolver`. +resolver.static = FilesystemResolver + +# If true, `resolver.static` will be overridden, and the +# `get_resolver(identifier)` delegate script method will be used to select +# a resolver per-request. +resolver.delegate = false + +#---------------------------------------- +# FilesystemResolver +#---------------------------------------- + +# How to look up files. Allowed values are `BasicLookupStrategy` and +# `ScriptLookupStrategy`. ScriptLookupStrategy uses the delegate script for +# dynamic lookups; see the user manual. +FilesystemResolver.lookup_strategy = BasicLookupStrategy + +# Server-side path that will be prefixed to the identifier in the URL. +# Trailing slash is important! +FilesystemResolver.BasicLookupStrategy.path_prefix = /var/lib/cantaloupe/images/ + +# Server-side path or extension that will be suffixed to the identifier in +# the URL. +FilesystemResolver.BasicLookupStrategy.path_suffix = + +#---------------------------------------- +# HttpResolver +#---------------------------------------- + +HttpResolver.trust_all_certs = false +HttpResolver.request_timeout = 10 + +# Tells HttpResolver how to look up resources. Allowed values are +# `BasicLookupStrategy` and `ScriptLookupStrategy`. ScriptLookupStrategy +# uses the delegate script for dynamic lookups; see the user manual. +HttpResolver.lookup_strategy = BasicLookupStrategy + +# URL that will be prefixed to the identifier in the request URL. +# Trailing slash is important! +HttpResolver.BasicLookupStrategy.url_prefix = http://localhost/images/ + +# Path, extension, query string, etc. that will be suffixed to the +# identifier in the request URL. +HttpResolver.BasicLookupStrategy.url_suffix = + +# Enables access to resources that require HTTP Basic authentication. +HttpResolver.auth.basic.username = +HttpResolver.auth.basic.secret = + +#---------------------------------------- +# JdbcResolver +#---------------------------------------- + +# Note: JdbcResolver requires some delegate methods to be implemented in +# addition to the configuration here, and a JDBC driver to be installed on +# the classpath; see the user manual. + +# !! +JdbcResolver.url = jdbc:postgresql://localhost:5432/my_database +# !! +JdbcResolver.user = postgres +# !! +JdbcResolver.password = postgres + +# !! Connection timeout in seconds. +JdbcResolver.connection_timeout = 10 + +#---------------------------------------- +# AmazonS3Resolver +#---------------------------------------- + +# !! Credentials for your AWS account. +# See: http://aws.amazon.com/security-credentials +# Note that this info can be obtained from elsewhere rather than setting +# it here; see the user manual. +AmazonS3Resolver.access_key_id = +AmazonS3Resolver.secret_key = + +# !! Name of the bucket containing images to be served. +AmazonS3Resolver.bucket.name = + +# !! See: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region +AmazonS3Resolver.bucket.region = + +# Tells AmazonS3Resolver how to look up objects. Allowed values are +# `BasicLookupStrategy` and `ScriptLookupStrategy`. ScriptLookupStrategy +# uses the delegate script for dynamic lookups; see the user manual. +AmazonS3Resolver.lookup_strategy = BasicLookupStrategy + +# !! Maximum number of concurrent HTTP connections to AWS. Leave blank to +# use the default. +AmazonS3Resolver.max_connections = + +#---------------------------------------- +# AzureStorageResolver +#---------------------------------------- + +# !! Credentials for your Azure account. +AzureStorageResolver.account_name = +AzureStorageResolver.account_key = + +# !! Name of the container containing images to be served. +AzureStorageResolver.container_name = + +# Tells AzureStorageResolver how to look up objects. Allowed values are +# `BasicLookupStrategy` and `ScriptLookupStrategy`. ScriptLookupStrategy +# uses the delegate script for dynamic lookups; see the user manual. +AzureStorageResolver.lookup_strategy = BasicLookupStrategy + +########################################################################### +# PROCESSORS +########################################################################### + +#---------------------------------------- +# Processor Selection +#---------------------------------------- + +# Image processors to use for various source formats. Available values are +# `Java2dProcessor`, `GraphicsMagickProcessor`, `ImageMagickProcessor`, +# `KakaduProcessor`, `OpenJpegProcessor`, `JaiProcessor`, `PdfBoxProcessor`, +# and `FfmpegProcessor`. + +# These extension-specific definitions are optional. +processor.avi = FfmpegProcessor +processor.bmp = +processor.dcm = +processor.flv = FfmpegProcessor +processor.gif = +processor.jp2 = KakaduProcessor +processor.jpg = +processor.mov = FfmpegProcessor +processor.mp4 = FfmpegProcessor +processor.mpg = FfmpegProcessor +processor.pdf = PdfBoxProcessor +processor.png = +processor.tif = +processor.webm = FfmpegProcessor +processor.webp = ImageMagickProcessor + +# Fall back to this processor for any formats not assigned above. +processor.fallback = Java2dProcessor + +#---------------------------------------- +# Global Processor Configuration +#---------------------------------------- + +# Resolution of vector rasterization (of e.g. PDFs) at a scale of 1. +processor.dpi = 150 + +# Expands contrast to utilize available dynamic range. This forces the entire +# source image to be read into memory, so can be slow with large images. +processor.normalize = false + +# Color of the background when an image is rotated or alpha-flattened, for +# output formats that don't support transparency. +# This may not be respected for indexed color derivative images. +processor.background_color = black + +# Available values are `bell`, `bspline`, `bicubic`, `box`, `hermite`, +# `lanczos3`, `mitchell`, `triangle`. (JaiProcessor ignores these.) +processor.downscale_filter = bicubic +processor.upscale_filter = bicubic + +# Intensity of an unsharp mask from 0 to 1. +processor.sharpen = 0 + +# Attempts to copy source image metadata (EXIF, IPTC, XMP) into derivative +# images. (This is not foolproof; see the user manual.) +processor.metadata.preserve = false + +# Whether to auto-rotate images using the EXIF `Orientation` field. +# The check for this field can impair performance slightly. +processor.metadata.respect_orientation = false + +# Whether to reduce images with more than 8 bits per sample to 8 bits. +# This only applies to formats that support >8-bit samples, and not all +# processors respect this setting; see the user manual. +processor.limit_to_8_bits = true + +# Progressive JPEGs are usually more compact. +processor.jpg.progressive = true + +# JPEG output quality (1-100). +processor.jpg.quality = 80 + +# TIFF output compression type. Available values are `Deflate`, `JPEG`, +# `LZW`, and `RLE`. Leave blank for no compression. +processor.tif.compression = LZW + +# Available values are `StreamStrategy` and `CacheStrategy`. StreamStrategy +# will try to stream source images from non-filesystem resolvers, when this +# is possible; CacheStrategy will first download them into the source cache +# using FilesystemCache, which must also be configured. +StreamProcessor.retrieval_strategy = StreamStrategy + +#---------------------------------------- +# FfmpegProcessor +#---------------------------------------- + +# Optional absolute path of the directory containing the FFmpeg binaries. +# Overrides the PATH. +FfmpegProcessor.path_to_binaries = + +#---------------------------------------- +# GraphicsMagickProcessor +#---------------------------------------- + +# !! Optional absolute path of the directory containing the GraphicsMagick +# binary. Overrides the PATH. +GraphicsMagickProcessor.path_to_binaries = + +#---------------------------------------- +# ImageMagickProcessor +#---------------------------------------- + +# !! Optional absolute path of the directory containing the ImageMagick +# binary. Overrides the PATH. +ImageMagickProcessor.path_to_binaries = /usr/local/bin + +#---------------------------------------- +# KakaduProcessor +#---------------------------------------- + +# Optional absolute path of the directory containing the Kakadu binaries. +# Overrides the PATH. +KakaduProcessor.path_to_binaries = + +#---------------------------------------- +# OpenJpegProcessor +#---------------------------------------- + +# Optional absolute path of the directory containing the OpenJPEG binaries. +# Overrides the PATH. +OpenJpegProcessor.path_to_binaries = + +########################################################################### +# CLIENT-SIDE CACHING +########################################################################### + +# Whether to enable the response Cache-Control header. +cache.client.enabled = true + +cache.client.max_age = 2592000 +cache.client.shared_max_age = +cache.client.public = true +cache.client.private = false +cache.client.no_cache = false +cache.client.no_store = false +cache.client.must_revalidate = false +cache.client.proxy_revalidate = false +cache.client.no_transform = true + +########################################################################### +# SERVER-SIDE CACHING +########################################################################### + +# Enables the source cache. Note that source images will only be cached +# when a FileProcessor is used with a StreamResolver, or when a +# StreamProcessor is used with `StreamProcessor.retrieval_strategy` set to +# `CacheStrategy`. +cache.server.source.enabled = false + +# FilesystemCache is the only available source cache. +cache.server.source = FilesystemCache + +# Enables the derivative (processed image) cache. +cache.server.derivative.enabled = false + +# Available values are `FilesystemCache`, `JdbcCache`, `RedisCache`, +# `HeapCache`, `AmazonS3Cache`, and `AzureStorageCache`. +cache.server.derivative = + +# Whether to use the Java heap as a "level 1" cache for image infos, either +# independently or in front of a "level 2" derivative cache (if enabled). +cache.server.info.enabled = true + +# Time before cached content (excluding info cache content) becomes invalid. +# Set to blank or 0 for never. +cache.server.ttl_seconds = 2592000 + +# If true, when a resolver reports that the requested source image has gone +# missing, all cached information relating to it (if any) will be deleted. +# (This is effectively always false when cache.server.resolve_first is also +# false.) +cache.server.purge_missing = false + +# If true, the source image will be confirmed to exist before a cached copy +# is returned. If false, the cached copy will be returned without checking. +# Resolving first is safer but slower. +cache.server.resolve_first = false + +# !! Enables the cache worker, which periodically purges invalid cache +# items in the background. +cache.server.worker.enabled = false + +# !! The cache worker will start its shifts this many seconds apart. +cache.server.worker.interval = 86400 + +#---------------------------------------- +# FilesystemCache +#---------------------------------------- + +# If this directory does not exist, it will be created automatically. +FilesystemCache.pathname = /var/cache/cantaloupe + +# Levels of folder hierarchy in which to store cached images. Deeper depth +# results in fewer files per directory. Set to 0 to disable subfolders. +# Purge the cache after changing this. +FilesystemCache.dir.depth = 3 + +# Number of characters in hierarchy directory names. Should be set to +# 16^n < (max number of directory entries your filesystem can deal with). +# Purge the cache after changing this. +FilesystemCache.dir.name_length = 2 + +#---------------------------------------- +# HeapCache +#---------------------------------------- + +# Target cache size, in bytes or a number ending in M, MB, G, GB, etc. +# This is not a hard limit, and may be transiently exceeded. +# Ensure your heap can accommodate this size using the -Xmx= VM argument. +HeapCache.target_size = 2G + +# If true, the cache contents will be written to a file on exit, and read +# back in at startup. +HeapCache.persist = false + +# When the contents are persisted, this specifies the location of the cache +# file. If the parent directory does not exist, it will be created +# automatically. +HeapCache.persist.filesystem.pathname = /var/cache/cantaloupe/heap.cache + +#---------------------------------------- +# JdbcCache +#---------------------------------------- + +# !! +JdbcCache.url = jdbc:postgresql://localhost:5432/cantaloupe +# !! +JdbcCache.user = postgres +# !! +JdbcCache.password = + +# !! Connection timeout in seconds. +JdbcCache.connection_timeout = 10 + +# These must be created manually; see the user manual. +JdbcCache.derivative_image_table = derivative_cache +JdbcCache.info_table = info_cache + +#---------------------------------------- +# AmazonS3Cache +#---------------------------------------- + +# !! Credentials for your AWS account. +# See: http://aws.amazon.com/security-credentials +# Note that this info can be obtained from elsewhere rather than setting it +# here; see the user manual. +AmazonS3Cache.access_key_id = +AmazonS3Cache.secret_key = + +# !! Name of a bucket to use to hold cached data. +AmazonS3Cache.bucket.name = + +# See: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region +AmazonS3Cache.bucket.region = + +# !! String that will be prefixed to object keys. +AmazonS3Cache.object_key_prefix = + +# !! Maximum number of concurrent HTTP connections to AWS. Leave blank to +# use the default. +AmazonS3Cache.max_connections = + +#---------------------------------------- +# AzureStorageCache +#---------------------------------------- + +# !! Credentials for your Azure account. +AzureStorageCache.account_name = +AzureStorageCache.account_key = + +# !! Name of the container containing cached images. +AzureStorageCache.container_name = + +# !! String that will be prefixed to object keys. +AzureStorageCache.object_key_prefix = + +#---------------------------------------- +# RedisCache +#---------------------------------------- + +# !! Redis connection info. +RedisCache.host = localhost +RedisCache.port = 6379 +RedisCache.ssl = false +RedisCache.password = +RedisCache.database = 0 + +########################################################################### +# OVERLAYS +########################################################################### + +# Whether to enable overlays. +overlays.enabled = false + +# Controls how overlays are configured. `BasicStrategy` will use the +# `overlays.BasicStrategy.*` keys in this section. `ScriptStrategy` will +# use the `overlay` delegate method. See the user manual for more +# information. +overlays.strategy = BasicStrategy + +# `image` or `string`. +overlays.BasicStrategy.type = image + +# Absolute path or URL of the overlay image. Must be a PNG file. +overlays.BasicStrategy.image = /path/to/overlay.png + +# Overlay text. +overlays.BasicStrategy.string = Copyright \u00A9️ My Great Organization\nAll rights reserved. + +# For possible values, launch with the -Dcantaloupe.list_fonts option. +overlays.BasicStrategy.string.font = Helvetica + +# Font size in points. +overlays.BasicStrategy.string.font.size = 24 + +# If the string doesn't fit in the image at the above size, the largest size +# at which it does fit will be used, down to this. +overlays.BasicStrategy.string.font.min_size = 18 + +# Font weight. 1 = regular, 2 = bold. Unfortunately, many fonts don't +# support fractional weights. +overlays.BasicStrategy.string.font.weight = 1.0 + +# Point spacing between glyphs, typically between -0.1 and 0.1. +overlays.BasicStrategy.string.glyph_spacing = 0.02 + +# CSS color syntax is supported. +overlays.BasicStrategy.string.color = white + +# CSS color syntax is supported. +overlays.BasicStrategy.string.stroke.color = black + +# Stroke width in pixels. +overlays.BasicStrategy.string.stroke.width = 1 + +# Color of a rectangular background to draw under the string. +# CSS color syntax and alpha are supported. +overlays.BasicStrategy.string.background.color = rgba(0, 0, 0, 100) + +# Allowed values: `top left`, `top center`, `top right`, `left center`, +# `center`, `right center`, `bottom left`, `bottom center`, `bottom right`. +overlays.BasicStrategy.position = bottom right + +# Pixel margin between the overlay and the image edge. +overlays.BasicStrategy.inset = 10 + +# Output images less than this many pixels wide will not receive an overlay. +# Set to 0 to add the overlay regardless. +overlays.BasicStrategy.output_width_threshold = 400 + +# Output images less than this many pixels tall will not receive an overlay. +# Set to 0 to add the overlay regardless. +overlays.BasicStrategy.output_height_threshold = 300 + +########################################################################### +# REDACTIONS +########################################################################### + +# See the user manual for information about how redactions work. +redaction.enabled = false + +########################################################################### +# LOGGING +########################################################################### + +#---------------------------------------- +# Application Log +#---------------------------------------- + +# `trace`, `debug`, `info`, `warn`, `error`, `all`, or `off` +log.application.level = debug + +log.application.ConsoleAppender.enabled = true + +# N.B.: Don't enable FileAppender and RollingFileAppender simultaneously! +log.application.FileAppender.enabled = false +log.application.FileAppender.pathname = /path/to/logs/application.log + +log.application.RollingFileAppender.enabled = false +log.application.RollingFileAppender.pathname = /path/to/logs/application.log +log.application.RollingFileAppender.policy = TimeBasedRollingPolicy +log.application.RollingFileAppender.TimeBasedRollingPolicy.filename_pattern = /path/to/logs/application-%d{yyyy-MM-dd}.log +log.application.RollingFileAppender.TimeBasedRollingPolicy.max_history = 30 + +# See the "SyslogAppender" section for a list of facilities: +# http://logback.qos.ch/manual/appenders.html +log.application.SyslogAppender.enabled = false +log.application.SyslogAppender.host = +log.application.SyslogAppender.port = 514 +log.application.SyslogAppender.facility = LOCAL0 + +#---------------------------------------- +# Error Log +#---------------------------------------- + +# Application log messages with a severity of WARN or greater can be copied +# into a dedicated error log, which may make them easier to spot. + +# N.B.: Don't enable FileAppender and RollingFileAppender simultaneously! +log.error.FileAppender.enabled = false +log.error.FileAppender.pathname = /path/to/logs/error.log + +log.error.RollingFileAppender.enabled = false +log.error.RollingFileAppender.pathname = /path/to/logs/error.log +log.error.RollingFileAppender.policy = TimeBasedRollingPolicy +log.error.RollingFileAppender.TimeBasedRollingPolicy.filename_pattern = /path/to/logs/error-%d{yyyy-MM-dd}.log +log.error.RollingFileAppender.TimeBasedRollingPolicy.max_history = 30 + +#---------------------------------------- +# Access Log +#---------------------------------------- + +log.access.ConsoleAppender.enabled = false + +# N.B.: Don't enable FileAppender and RollingFileAppender simultaneously! +log.access.FileAppender.enabled = false +log.access.FileAppender.pathname = /path/to/logs/access.log + +# RollingFileAppender is an alternative to using something like +# FileAppender + logrotate. +log.access.RollingFileAppender.enabled = false +log.access.RollingFileAppender.pathname = /path/to/logs/access.log +log.access.RollingFileAppender.policy = TimeBasedRollingPolicy +log.access.RollingFileAppender.TimeBasedRollingPolicy.filename_pattern = /path/to/logs/access-%d{yyyy-MM-dd}.log +log.access.RollingFileAppender.TimeBasedRollingPolicy.max_history = 30 + +# See the "SyslogAppender" section for a list of facilities: +# http://logback.qos.ch/manual/appenders.html +log.access.SyslogAppender.enabled = false +log.access.SyslogAppender.host = +log.access.SyslogAppender.port = 514 +log.access.SyslogAppender.facility = LOCAL0 diff --git a/compose/local/django/Dockerfile b/compose/local/django/Dockerfile new file mode 100644 index 0000000..55181cd --- /dev/null +++ b/compose/local/django/Dockerfile @@ -0,0 +1,54 @@ +FROM python:3.6-slim-jessie + +ENV PYTHONUNBUFFERED 1 + +ENV PG_MAJOR 9.6 +RUN apt-get update && apt-get install -y --no-install-recommends gnupg dirmngr +RUN echo 'deb http://apt.postgresql.org/pub/repos/apt/ jessie-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list +RUN set -ex; \ + # pub 4096R/ACCC4CF8 2011-10-13 [expires: 2019-07-02] + # Key fingerprint = B97B 0AFC AA1A 47F0 44F2 44A0 7FCC 7D46 ACCC 4CF8 + # uid PostgreSQL Debian Repository + key='B97B0AFCAA1A47F044F244A07FCC7D46ACCC4CF8'; \ + apt-key adv --keyserver ha.pool.sks-keyservers.net --recv-keys $key + +RUN apt-get update && apt-get install -y --no-install-recommends \ + # building + build-essential python3-dev git-core \ + # psycopg2 dependencies + libpq-dev postgresql-client-common postgresql-client-9.6 python3-psycopg2 \ + # Pillow dependencies + python3-pil liblcms2-dev \ + # Django + gettext graphviz-dev \ + # CFFI dependencies + libffi-dev libssl-dev python3-cffi \ + # Histonets + libblas-dev liblapack-dev gfortran \ + && rm -rf /var/lib/apt/lists/* + +RUN useradd django + +# Requirements have to be pulled and installed here, otherwise caching won't work +COPY ./requirements /requirements +RUN pip install -r /requirements/local.txt + +COPY ./compose/production/django/entrypoint.sh /entrypoint.sh +RUN sed -i 's/\r//' /entrypoint.sh +RUN chmod +x /entrypoint.sh + +COPY ./compose/local/django/start.sh /start.sh +RUN sed -i 's/\r//' /start.sh +RUN chmod +x /start.sh + +COPY ./compose/local/django/celery/worker/start.sh /start-celeryworker.sh +RUN sed -i 's/\r//' /start-celeryworker.sh +RUN chmod +x /start-celeryworker.sh + +COPY ./compose/local/django/celery/beat/start.sh /start-celerybeat.sh +RUN sed -i 's/\r//' /start-celerybeat.sh +RUN chmod +x /start-celerybeat.sh + +WORKDIR /app + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/compose/local/django/celery/beat/start.sh b/compose/local/django/celery/beat/start.sh new file mode 100644 index 0000000..fee1068 --- /dev/null +++ b/compose/local/django/celery/beat/start.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset +set -o xtrace + + +rm -f './celerybeat.pid' +celery -A histonets.taskapp beat -l INFO diff --git a/compose/local/django/celery/worker/start.sh b/compose/local/django/celery/worker/start.sh new file mode 100644 index 0000000..de064ef --- /dev/null +++ b/compose/local/django/celery/worker/start.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset +set -o xtrace + + +celery -A histonets.taskapp worker -l INFO diff --git a/compose/local/django/start.sh b/compose/local/django/start.sh new file mode 100644 index 0000000..6b8d7db --- /dev/null +++ b/compose/local/django/start.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset +set -o xtrace + + +python manage.py migrate +python manage.py runserver_plus 0.0.0.0:8000 diff --git a/compose/production/caddy/Caddyfile b/compose/production/caddy/Caddyfile new file mode 100644 index 0000000..d744d1c --- /dev/null +++ b/compose/production/caddy/Caddyfile @@ -0,0 +1,14 @@ +www.{$DOMAIN_NAME} { + redir https://histonets.stanford.edu +} + +{$DOMAIN_NAME} { + proxy / django:5000 { + header_upstream Host {host} + header_upstream X-Real-IP {remote} + header_upstream X-Forwarded-Proto {scheme} + } + log stdout + errors stdout + gzip +} diff --git a/compose/production/caddy/Dockerfile b/compose/production/caddy/Dockerfile new file mode 100644 index 0000000..d02f034 --- /dev/null +++ b/compose/production/caddy/Dockerfile @@ -0,0 +1,3 @@ +FROM abiosoft/caddy:0.10.6 + +COPY ./compose/production/caddy/Caddyfile /etc/Caddyfile diff --git a/compose/production/cantaloupe/Dockerfile b/compose/production/cantaloupe/Dockerfile new file mode 100644 index 0000000..eadc8a8 --- /dev/null +++ b/compose/production/cantaloupe/Dockerfile @@ -0,0 +1,48 @@ +FROM mostalive/ubuntu-14.04-oracle-jdk8 + +# Adapted from: +# https://github.com/pulibrary/cantaloupe-docker +# https://github.com/kaij/cantaloupe +# https://github.com/MITLibraries/docker-cantaloupe + +ENV CANTALOUPE_VERSION 3.4.1 +EXPOSE 8182 + +# Update packages and install tools +RUN apt-get update -y && apt-get install -y wget unzip graphicsmagick curl build-essential cmake + +#Build OpenJPEG +RUN wget -c https://github.com/uclouvain/openjpeg/archive/v2.1.2.tar.gz -O openjpeg-2.1.2.tar.gz \ + && tar -zxvf openjpeg-2.1.2.tar.gz \ + && cd openjpeg-2.1.2 \ + && mkdir -v build \ + && cd build \ + && cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr .. \ + && make \ + && make install + +# run non priviledged +RUN adduser --system cantaloupe + +# +# Cantaloupe +# +WORKDIR /tmp +RUN curl -OL https://github.com/medusa-project/cantaloupe/releases/download/v$CANTALOUPE_VERSION/Cantaloupe-$CANTALOUPE_VERSION.zip \ + && mkdir -p /usr/local/ \ + && cd /usr/local \ + && unzip /tmp/Cantaloupe-$CANTALOUPE_VERSION.zip \ + && ln -s Cantaloupe-$CANTALOUPE_VERSION cantaloupe \ + && rm -rf /tmp/Cantaloupe-$CANTALOUPE_VERSION \ + && rm /tmp/Cantaloupe-$CANTALOUPE_VERSION.zip + +COPY ./compose/production/cantaloupe/cantaloupe.properties /etc/cantaloupe.properties + +RUN mkdir -p /var/log/cantaloupe \ + && mkdir -p /var/cache/cantaloupe \ + && chown -R cantaloupe /var/log/cantaloupe \ + && chown -R cantaloupe /var/cache/cantaloupe \ + && chown cantaloupe /etc/cantaloupe.properties + +USER cantaloupe +CMD ["sh", "-c", "java -Dcantaloupe.config=/etc/cantaloupe.properties -Xmx2g -jar /usr/local/cantaloupe/Cantaloupe-$CANTALOUPE_VERSION.war"] diff --git a/compose/production/cantaloupe/cantaloupe.properties.example b/compose/production/cantaloupe/cantaloupe.properties.example new file mode 100644 index 0000000..a817160 --- /dev/null +++ b/compose/production/cantaloupe/cantaloupe.properties.example @@ -0,0 +1,648 @@ +########################################################################### +# Sample Cantaloupe configuration file +# +# Copy this file to `cantaloupe.properties` and edit as desired. +# +# Most changes will take effect without restarting. Those that won't are +# marked with "!!". +########################################################################### + +# !! Whether to enable HTTP access (http://), and on what host interface +# and TCP port. (Applies in standalone mode only.) +http.enabled = true +http.host = 0.0.0.0 +http.port = 8182 + +# !! Whether to enable HTTPS access (https://), and on what host interface +# and TCP port. (Applies in standalone mode only.) +https.enabled = false +https.host = 0.0.0.0 +https.port = 8183 + +# !! Available values are `JKS` and `PKCS12`. (Standalone mode only.) +https.key_store_type = JKS +https.key_store_password = myPassword +https.key_store_path = /path/to/keystore.jks +https.key_password = myPassword + +# !! Configures HTTP Basic authentication. +auth.basic.enabled = false +auth.basic.username = myself +auth.basic.secret = mypassword + +# Enables the Control Panel, at /admin. +admin.enabled = true +# Password to access the Control Panel. (The username is "admin".) +admin.password = docker + +# Base URI to use for internal links, such as Link headers and JSON-LD @id +# values, in a reverse-proxy context. This should only be used when +# X-Forwarded-* headers cannot be used instead (see the user manual). +base_uri = + +# Normally, slashes in a URI path component must be percent-encoded as +# "%2F". If your proxy is incapable of passing these through without +# decoding them, you can define an alternate character or character +# sequence to substitute for a slash. Supply the non-percent-encoded +# version here, and use the percent-encoded version in URLs. +slash_substitute = + +# Maximum number of pixels to return in a response, to prevent overloading +# the server. Requests for more pixels than this will receive an error +# response. Set to 0 for no maximum. +max_pixels = 400000000 + +# Sometimes helpful. +print_stack_trace_on_error_pages = true + +########################################################################### +# DELEGATE SCRIPT +########################################################################### + +# !! Enables the delegate script: a Ruby script containing various delegate +# methods. (See the user manual.) +delegate_script.enabled = false + +# !! This can be an absolute path, or a filename; if only a +# filename is specified, it will be searched for in the same folder as this +# file, and then the current working directory. +delegate_script.pathname = delegates.rb + +# Enables the invocation cache, which caches method invocations and +# return values in memory. See the user manual for more information. +delegate_script.cache.enabled = false + +# !! Maximum size of the invocation cache. This is pretty much a wild +# guess. +delegate_script.cache.max_size = 1000000 + +########################################################################### +# ENDPOINTS +########################################################################### + +endpoint.iiif.1.enabled = true + +endpoint.iiif.2.enabled = true + +# Controls the response Content-Disposition header for images. Allowed +# values are `inline`, `attachment`, and `none`. +endpoint.iiif.content_disposition = inline + +# Minimum size that will be used in info.json "tiles" keys. See the user +# manual for an explanation of how these are calculated. +endpoint.iiif.min_tile_size = 1024 + +# If true, requests for sizes other than those specified in an info.json +# response will be denied. +endpoint.iiif.2.restrict_to_sizes = false + +# Enables the administrative REST API. (See the user manual.) +endpoint.api.enabled = false + +# HTTP Basic credentials to access the REST API. +endpoint.api.username = +endpoint.api.secret = + +########################################################################### +# RESOLVERS +########################################################################### + +# Specifies one resolver to translate the identifier in the URL to an image +# source for all requests. Available values are `FilesystemResolver`, +# `HttpResolver`, `JdbcResolver`, `AmazonS3Resolver`, and +# `AzureStorageResolver`. +resolver.static = FilesystemResolver + +# If true, `resolver.static` will be overridden, and the +# `get_resolver(identifier)` delegate script method will be used to select +# a resolver per-request. +resolver.delegate = false + +#---------------------------------------- +# FilesystemResolver +#---------------------------------------- + +# Tells FilesystemResolver how to look up files. Allowed values are +# `BasicLookupStrategy` and `ScriptLookupStrategy`. ScriptLookupStrategy +# uses the delegate script for dynamic lookups; see the user manual for +# details. +FilesystemResolver.lookup_strategy = BasicLookupStrategy + +# Server-side path that will be prefixed to the identifier in the URL. +# Trailing slash is important. +FilesystemResolver.BasicLookupStrategy.path_prefix = /var/lib/cantaloupe/images/ + +# Server-side path or extension that will be suffixed to the identifier in +# the URL. +FilesystemResolver.BasicLookupStrategy.path_suffix = + +#---------------------------------------- +# HttpResolver +#---------------------------------------- + +# Tells HttpResolver how to look up resources. Allowed values are +# `BasicLookupStrategy` and `ScriptLookupStrategy`. ScriptLookupStrategy +# uses the delegate script for dynamic lookups; see the user manual for +# details. +HttpResolver.lookup_strategy = BasicLookupStrategy + +# URL that will be prefixed to the identifier in the request URL. Trailing +# slash is important. +HttpResolver.BasicLookupStrategy.url_prefix = http://localhost/images/ + +# Path, extension, query string, etc. that will be suffixed to the +# identifier in the request URL. +HttpResolver.BasicLookupStrategy.url_suffix = + +# Used for HTTP Basic authentication. +HttpResolver.auth.basic.username = +HttpResolver.auth.basic.secret = + +#---------------------------------------- +# JdbcResolver +#---------------------------------------- + +# Note: JdbcResolver requires some delegate methods to be implemented in +# addition to the configuration here; see the user manual. + +# !! +JdbcResolver.url = jdbc:postgresql://localhost:5432/my_database +# !! +JdbcResolver.user = postgres +# !! +JdbcResolver.password = postgres + +# !! Connection timeout in seconds. +JdbcResolver.connection_timeout = 10 + +#---------------------------------------- +# AmazonS3Resolver +#---------------------------------------- + +# !! Access key ID and secret key associated with your AWS account. +# See: http://aws.amazon.com/security-credentials +AmazonS3Resolver.access_key_id = +AmazonS3Resolver.secret_key = + +# !! Name of the bucket containing images to be served. +AmazonS3Resolver.bucket.name = + +# !! Can be left blank. +# See: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region +AmazonS3Resolver.bucket.region = + +# Tells AmazonS3Resolver how to look up objects. Allowed values are +# `BasicLookupStrategy` and `ScriptLookupStrategy`. ScriptLookupStrategy +# uses the delegate script for dynamic lookups; see the user manual for +# details. +AmazonS3Resolver.lookup_strategy = BasicLookupStrategy + +#---------------------------------------- +# AzureStorageResolver +#---------------------------------------- + +# !! Name of your Azure account. +AzureStorageResolver.account_name = + +# !! Key of your Azure account. +AzureStorageResolver.account_key = + +# !! Name of the container containing images to be served. +AzureStorageResolver.container_name = + +# Tells AzureStorageResolver how to look up objects. Allowed values are +# `BasicLookupStrategy` and `ScriptLookupStrategy`. ScriptLookupStrategy +# uses the delegate script for dynamic lookups; see the user manual for +# details. +AzureStorageResolver.lookup_strategy = BasicLookupStrategy + +########################################################################### +# PROCESSORS +########################################################################### + +# Image processors to use for various source formats. Available values are +# `Java2dProcessor`, `GraphicsMagickProcessor`, `ImageMagickProcessor`, +# `KakaduProcessor`, `OpenJpegProcessor`, `JaiProcessor`, `PdfBoxProcessor`, +# and `FfmpegProcessor`. + +# These extension-specific definitions are optional. +processor.avi = FfmpegProcessor +processor.bmp = +processor.gif = +processor.jp2 = OpenJpegProcessor +processor.jpg = +processor.mov = FfmpegProcessor +processor.mp4 = FfmpegProcessor +processor.mpg = FfmpegProcessor +processor.pdf = PdfBoxProcessor +processor.png = +processor.tif = Java2dProcessor +processor.webm = FfmpegProcessor +processor.webp = ImageMagickProcessor + +# Fall back to this processor for any formats not assigned above. +processor.fallback = Java2dProcessor + +# Available values are `StreamStrategy` and `CacheStrategy`. StreamStrategy +# will try to stream source images from non-filesystem resolvers, when this +# is possible; CacheStrategy will first download them into the source cache +# using FilesystemCache, which must also be configured. +StreamProcessor.retrieval_strategy = StreamStrategy + +#---------------------------------------- +# FfmpegProcessor +#---------------------------------------- + +# Optional absolute path of the directory containing the FFmpeg binaries. +# Overrides the PATH. +FfmpegProcessor.path_to_binaries = + +# See Java2dProcessor.upscale_filter for a list of available filters. +FfmpegProcessor.upscale_filter = bicubic +FfmpegProcessor.downscale_filter = bicubic + +# Intensity of an unsharp mask from 0 to 1. +FfmpegProcessor.sharpen = 0 + +#---------------------------------------- +# GraphicsMagickProcessor +#---------------------------------------- + +# !! Optional absolute path of the directory containing the GraphicsMagick +# binary. Overrides the PATH. +GraphicsMagickProcessor.path_to_binaries = + +# Color of the background when an image is rotated. Only affects output +# formats that do not support transparency. +GraphicsMagickProcessor.background_color = black + +# Adjusts levels to utilize available dynamic range. +GraphicsMagickProcessor.normalize = false + +# Intensity of an unsharp mask from 0 to 1. +GraphicsMagickProcessor.sharpen = 0 + +#---------------------------------------- +# ImageMagickProcessor +#---------------------------------------- + +# !! Optional absolute path of the directory containing the ImageMagick +# binaries. Overrides the PATH. +ImageMagickProcessor.path_to_binaries = /usr/local/bin + +# Color of the background when an image is rotated. Only affects output +# formats that do not support transparency. +ImageMagickProcessor.background_color = black + +# Expands contrast to utilize available dynamic range. +ImageMagickProcessor.normalize = false + +# Intensity of an unsharp mask from 0 to 1. +ImageMagickProcessor.sharpen = 0 + +#---------------------------------------- +# JaiProcessor +#---------------------------------------- + +# Expands contrast to utilize available dynamic range. This forces the entire +# source image to be read into memory, so can be slow with large images. +JaiProcessor.normalize = false + +# Intensity of an unsharp mask from 0 to 1. +JaiProcessor.sharpen = 0 + +# JPEG output quality. Should be a number between 0-1. +JaiProcessor.jpg.quality = 0.8 + +# TIFF output compression type. Available values are `LZW`, `Deflate`, +# `ZLib`, `JPEG`, and `PackBits`. Leave blank for no compression. +JaiProcessor.tif.compression = LZW + +#---------------------------------------- +# Java2dProcessor +#---------------------------------------- + +# Available values are `bell`, `bspline`, `bicubic`, `box`, `hermite`, +# `lanczos3`, `mitchell`, `triangle`. +Java2dProcessor.upscale_filter = bicubic +Java2dProcessor.downscale_filter = bicubic + +# Expands contrast to utilize available dynamic range. This forces the entire +# source image to be read into memory, so can be slow with large images. +Java2dProcessor.normalize = false + +# Intensity of an unsharp mask from 0 to 1. +Java2dProcessor.sharpen = 0 + +# JPEG output quality. Should be a number between 0-1. +Java2dProcessor.jpg.quality = 0.8 + +# TIFF output compression type. Available values are `LZW`, `Deflate`, +# `ZLib`, `JPEG`, and `PackBits`. Leave blank for no compression. +Java2dProcessor.tif.compression = LZW + +#---------------------------------------- +# KakaduProcessor +#---------------------------------------- + +# Optional absolute path of the directory containing the Kakadu binaries. +# Overrides the PATH. +KakaduProcessor.path_to_binaries = + +# Expands contrast to utilize available dynamic range. This forces the entire +# image area to be read into memory. +KakaduProcessor.normalize = false + +# See Java2dProcessor.upscale_filter for a list of available filters. +KakaduProcessor.upscale_filter = bicubic +KakaduProcessor.downscale_filter = bicubic + +# Intensity of an unsharp mask from 0 to 1. +KakaduProcessor.sharpen = 0 + +#---------------------------------------- +# OpenJpegProcessor +#---------------------------------------- + +# Optional absolute path of the directory containing the OpenJPEG binaries. +# Overrides the PATH. +OpenJpegProcessor.path_to_binaries = /usr/bin + +# Expands contrast to utilize available dynamic range. This forces the entire +# image area to be read into memory. +OpenJpegProcessor.normalize = false + +# See Java2dProcessor.upscale_filter for a list of available filters. +OpenJpegProcessor.upscale_filter = bicubic +OpenJpegProcessor.downscale_filter = bicubic + +# Intensity of an unsharp mask from 0 to 1. +OpenJpegProcessor.sharpen = 0 + +#---------------------------------------- +# PdfBoxProcessor +#---------------------------------------- + +# Resolution of the PDF rasterization at a scale of 1. Requests for +# scales less than 0.5 or larger than 2 will automatically use a lower or +# higher factor of this. +PdfBoxProcessor.dpi = 150 + +# See Java2dProcessor.upscale_filter for a list of available filters. +PdfBoxProcessor.upscale_filter = bicubic +PdfBoxProcessor.downscale_filter = bicubic + +# Intensity of an unsharp mask from 0 to 1. +PdfBoxProcessor.sharpen = 0 + +########################################################################### +# CLIENT-SIDE CACHING +########################################################################### + +# Whether to enable the response Cache-Control header. +cache.client.enabled = true + +cache.client.max_age = 2592000 +cache.client.shared_max_age = +cache.client.public = true +cache.client.private = false +cache.client.no_cache = false +cache.client.no_store = false +cache.client.must_revalidate = false +cache.client.proxy_revalidate = false +cache.client.no_transform = true + +########################################################################### +# SERVER-SIDE CACHING +########################################################################### + +# Enables the source cache. The only available value is `FilesystemCache`. +# Set blank to disable source image caching. +# Note that source images will only be cached when a FileProcessor is used +# with a StreamResolver, or when a StreamProcessor is used with +# `StreamProcessor.retrieval_strategy` set to `CacheStrategy`. +cache.source = + +# Enables the derivative (processed image) cache. Available values are +# `FilesystemCache`, `JdbcCache`, `AmazonS3Cache`, and `AzureStorageCache`. +# Set blank to disable derivative caching. +cache.derivative = FilesystemCache + +# Time before a cached image becomes stale and needs to be reloaded. Set to +# blank or 0 for infinite. +cache.server.ttl_seconds = 2592000 + +# If true, when a resolver reports that the requested source image has gone +# missing, all cached information relating to it (if any) will be deleted. +# (This is effectively always false when cache.server.resolve_first is also +# false.) +cache.server.purge_missing = false + +# If true, the source image will be confirmed to exist before a cached copy +# is returned. If false, the cached copy will be returned without any +# checking. Resolving first is slower but safer. +cache.server.resolve_first = false + +# !! Enables the cache worker, which periodically purges expired cache +# items in the background. +cache.server.worker.enabled = false + +# !! The cache worker will wait this many seconds between purgings. +cache.server.worker.interval = 86400 + +#---------------------------------------- +# FilesystemCache +#---------------------------------------- + +# If this directory does not exist, it will be created automatically. +FilesystemCache.pathname = /var/cache/cantaloupe + +# Levels of folder hierarchy in which to store cached images. Deeper depth +# results in fewer files per directory. Set to 0 to disable subfolders. +# Purge the cache after changing this. +FilesystemCache.dir.depth = 3 + +# Number of characters in hierarchy directory names. Should be set to +# 16^n < (max number of directory entries your filesystem can deal with). +# Purge the cache after changing this. +FilesystemCache.dir.name_length = 2 + +#---------------------------------------- +# JdbcCache +#---------------------------------------- + +# !! +JdbcCache.url = jdbc:postgresql://localhost:5432/cantaloupe +# !! +JdbcCache.user = postgres +# !! +JdbcCache.password = + +# !! Connection timeout in seconds. +JdbcCache.connection_timeout = 10 + +# These must be created manually; see the user manual. +JdbcCache.derivative_image_table = derivative_cache +JdbcCache.info_table = info_cache + +#---------------------------------------- +# AmazonS3Cache +#---------------------------------------- + +# !! Access key ID and secret key associated with your AWS account. +# See: http://aws.amazon.com/security-credentials +AmazonS3Cache.access_key_id = +AmazonS3Cache.secret_key = + +# !! Name of a bucket to use to hold cached data. +AmazonS3Cache.bucket.name = + +# !! Can be left blank. +# See: http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region +AmazonS3Cache.bucket.region = + +# !! String that will be prefixed to object keys. +AmazonS3Cache.object_key_prefix = + +#---------------------------------------- +# AzureStorageCache +#---------------------------------------- + +# !! Name of your Azure account. +AzureStorageCache.account_name = + +# !! Key of your Azure account. +AzureStorageCache.account_key = + +# !! Name of the container containing cached images. +AzureStorageCache.container_name = + +# !! String that will be prefixed to object keys. +AzureStorageCache.object_key_prefix = + +########################################################################### +# OVERLAYS +########################################################################### + +# Whether to enable overlays. +overlays.enabled = false + +# Specifies how overlays are configured. `BasicStrategy` will use the +# `overlays.BasicStrategy.*` keys in this section. `ScriptStrategy` will +# use the `overlay` delegate method. See the user manual for more +# information. +overlays.strategy = BasicStrategy + +# `image` or `string`. +overlays.BasicStrategy.type = image + +# Absolute path or URL of the overlay image. Must be a PNG file. +overlays.BasicStrategy.image = /path/to/overlay.png + +# Overlay text. +overlays.BasicStrategy.string = Copyright \u00A9️ My Great Organization\nAll rights reserved. + +# For possible values, launch with the -Dcantaloupe.list_fonts option. +overlays.BasicStrategy.string.font = Helvetica + +# Font size in points. +overlays.BasicStrategy.string.font.size = 18 + +# Color name, rgb(r,g,b), and #rrggbb syntax are supported. +overlays.BasicStrategy.string.color = white + +# Color name, rgb(r,g,b), and #rrggbb syntax are supported. +overlays.BasicStrategy.string.stroke.color = black + +# Stroke width in pixels. +overlays.BasicStrategy.string.stroke.width = 1 + +# Allowed values: `top left`, `top center`, `top right`, `left center`, +# `center`, `right center`, `bottom left`, `bottom center`, `bottom right`. +overlays.BasicStrategy.position = bottom right + +# Pixel margin between the overlay and the image edge. +overlays.BasicStrategy.inset = 10 + +# Output images less than this many pixels wide will not receive an overlay. +# Set to 0 to add the overlay regardless. +overlays.BasicStrategy.output_width_threshold = 400 + +# Output images less than this many pixels tall will not receive an overlay. +# Set to 0 to add the overlay regardless. +overlays.BasicStrategy.output_height_threshold = 300 + +########################################################################### +# REDACTIONS +########################################################################### + +# Whether to enable redactions. See the user manual for information about +# how these work. +redaction.enabled = false + +########################################################################### +# METADATA +########################################################################### + +# Whether to attempt to copy source image metadata (EXIF, IPTC, XMP) into +# derivative images. (This is not foolproof; see the user manual.) +metadata.preserve = false + +# Whether to respect the EXIF "Orientation" field to auto-rotate images. +# The check for this field can impair performance slightly. +metadata.respect_orientation = false + +########################################################################### +# LOGGING +########################################################################### + +#---------------------------------------- +# Application Log +#---------------------------------------- + +# `trace`, `debug`, `info`, `warn`, `error`, `all`, or `off` +log.application.level = debug + +log.application.ConsoleAppender.enabled = true + +log.application.FileAppender.enabled = false +log.application.FileAppender.pathname = /path/to/logs/application.log + +# RollingFileAppender is an alternative to using something like +# FileAppender + logrotate. +log.application.RollingFileAppender.enabled = false +log.application.RollingFileAppender.pathname = /path/to/logs/application.log +log.application.RollingFileAppender.policy = TimeBasedRollingPolicy +log.application.RollingFileAppender.TimeBasedRollingPolicy.filename_pattern = /path/to/logs/application-%d{yyyy-MM-dd}.log +log.application.RollingFileAppender.TimeBasedRollingPolicy.max_history = 30 + +# See the "SyslogAppender" section for a list of facilities: +# http://logback.qos.ch/manual/appenders.html +log.application.SyslogAppender.enabled = false +log.application.SyslogAppender.host = +log.application.SyslogAppender.port = 514 +log.application.SyslogAppender.facility = LOCAL0 + +#---------------------------------------- +# Access Log +#---------------------------------------- + +log.access.ConsoleAppender.enabled = true + +log.access.FileAppender.enabled = false +log.access.FileAppender.pathname = /path/to/logs/access.log + +# RollingFileAppender is an alternative to using something like +# FileAppender + logrotate. +log.access.RollingFileAppender.enabled = false +log.access.RollingFileAppender.pathname = /path/to/logs/access.log +log.access.RollingFileAppender.policy = TimeBasedRollingPolicy +log.access.RollingFileAppender.TimeBasedRollingPolicy.filename_pattern = /path/to/logs/access-%d{yyyy-MM-dd}.log +log.access.RollingFileAppender.TimeBasedRollingPolicy.max_history = 30 + +# See the "SyslogAppender" section for a list of facilities: +# http://logback.qos.ch/manual/appenders.html +log.access.SyslogAppender.enabled = false +log.access.SyslogAppender.host = +log.access.SyslogAppender.port = 514 +log.access.SyslogAppender.facility = LOCAL0 diff --git a/compose/production/django/Dockerfile b/compose/production/django/Dockerfile new file mode 100644 index 0000000..c1e6d06 --- /dev/null +++ b/compose/production/django/Dockerfile @@ -0,0 +1,63 @@ +FROM python:3.6-slim-jessie + +ENV PYTHONUNBUFFERED 1 + +ENV PG_MAJOR 9.6 +RUN apt-get update && apt-get install -y --no-install-recommends gnupg dirmngr +RUN echo 'deb http://apt.postgresql.org/pub/repos/apt/ jessie-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list +RUN set -ex; \ + # pub 4096R/ACCC4CF8 2011-10-13 [expires: 2019-07-02] + # Key fingerprint = B97B 0AFC AA1A 47F0 44F2 44A0 7FCC 7D46 ACCC 4CF8 + # uid PostgreSQL Debian Repository + key='B97B0AFCAA1A47F044F244A07FCC7D46ACCC4CF8'; \ + apt-key adv --keyserver ha.pool.sks-keyservers.net --recv-keys $key + +RUN apt-get update && apt-get install -y --no-install-recommends \ + # building + build-essential python3-dev git-core \ + # psycopg2 dependencies + libpq-dev postgresql-client-common postgresql-client-9.6 python3-psycopg2 \ + # Pillow dependencies + python3-pil liblcms2-dev \ + # Django + gettext graphviz-dev \ + # CFFI dependencies + libffi-dev libssl-dev python3-cffi \ + # Histonets + libblas-dev liblapack-dev gfortran \ + && rm -rf /var/lib/apt/lists/* + +RUN useradd django + +# Requirements have to be pulled and installed here, otherwise caching won't work +COPY ./requirements /requirements +RUN pip install --no-cache-dir -r /requirements/production.txt \ + && rm -rf /requirements + +COPY ./compose/production/django/gunicorn.sh /gunicorn.sh +RUN sed -i 's/\r//' /gunicorn.sh +RUN chmod +x /gunicorn.sh +RUN chown django /gunicorn.sh + +COPY ./compose/production/django/entrypoint.sh /entrypoint.sh +RUN sed -i 's/\r//' /entrypoint.sh +RUN chmod +x /entrypoint.sh +RUN chown django /entrypoint.sh + +COPY ./compose/production/django/celery/worker/start.sh /start-celeryworker.sh +RUN sed -i 's/\r//' /start-celeryworker.sh +RUN chmod +x /start-celeryworker.sh + +COPY ./compose/production/django/celery/beat/start.sh /start-celerybeat.sh +RUN sed -i 's/\r//' /start-celerybeat.sh +RUN chmod +x /start-celerybeat.sh + +COPY . /app + +RUN chown -R django /app + +USER django + +WORKDIR /app + +ENTRYPOINT ["/entrypoint.sh"] diff --git a/compose/production/django/celery/beat/start.sh b/compose/production/django/celery/beat/start.sh new file mode 100644 index 0000000..bf8076c --- /dev/null +++ b/compose/production/django/celery/beat/start.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +set -o errexit +set -o pipefail +set -o nounset + + +celery -A histonets.taskapp beat -l INFO diff --git a/compose/production/django/celery/worker/start.sh b/compose/production/django/celery/worker/start.sh new file mode 100644 index 0000000..d3bd126 --- /dev/null +++ b/compose/production/django/celery/worker/start.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +set -o errexit +set -o pipefail +set -o nounset + + +celery -A histonets.taskapp worker -l INFO diff --git a/compose/production/django/entrypoint.sh b/compose/production/django/entrypoint.sh new file mode 100644 index 0000000..8540bc1 --- /dev/null +++ b/compose/production/django/entrypoint.sh @@ -0,0 +1,40 @@ +#!/bin/bash +set -euxo pipefail + + +cmd="$@" + +if [ -z "${POSTGRES_USER}" ]; then + # the official postgres image uses 'postgres' as default user if not set explictly. + export POSTGRES_USER=postgres +fi +export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB}" + +postgres_ready() { +python << END +import sys + +import psycopg2 + +try: + psycopg2.connect( + dbname="${POSTGRES_DB}", + user="${POSTGRES_USER}", + password="${POSTGRES_PASSWORD}", + host="postgres" + ) +except psycopg2.OperationalError: + sys.exit(-1) +sys.exit(0) + +END +} + +until postgres_ready; do + >&2 echo 'PostgreSQL is unavailable (sleeping)...' + sleep 1 +done + +>&2 echo 'PostgreSQL is up - continuing...' + +exec $cmd diff --git a/compose/production/django/gunicorn.sh b/compose/production/django/gunicorn.sh new file mode 100644 index 0000000..2f4f906 --- /dev/null +++ b/compose/production/django/gunicorn.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -euxo pipefail + + +python /app/manage.py collectstatic --noinput +/usr/local/bin/gunicorn config.wsgi -b 0.0.0.0:5000 --chdir=/app diff --git a/compose/production/postgres/Dockerfile b/compose/production/postgres/Dockerfile new file mode 100644 index 0000000..71f7a6d --- /dev/null +++ b/compose/production/postgres/Dockerfile @@ -0,0 +1,10 @@ +FROM postgres:10.1 + +COPY ./compose/production/postgres/backup.sh /usr/local/bin/backup +RUN chmod +x /usr/local/bin/backup + +COPY ./compose/production/postgres/restore.sh /usr/local/bin/restore +RUN chmod +x /usr/local/bin/restore + +COPY ./compose/production/postgres/list-backups.sh /usr/local/bin/list-backups +RUN chmod +x /usr/local/bin/list-backups diff --git a/compose/production/postgres/backup.sh b/compose/production/postgres/backup.sh new file mode 100644 index 0000000..4643801 --- /dev/null +++ b/compose/production/postgres/backup.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -o errexit +set -o pipefail +set -o nounset + + +# we might run into trouble when using the default `postgres` user, e.g. when dropping the postgres +# database in restore.sh. Check that something else is used here +if [ "$POSTGRES_USER" == "postgres" ] +then + echo "creating a backup as the postgres user is not supported, make sure to set the POSTGRES_USER environment variable" + exit 1 +fi + +# export the postgres password so that subsequent commands don't ask for it +export PGPASSWORD=$POSTGRES_PASSWORD + +echo "creating backup" +echo "---------------" + +FILENAME=backup_$(date +'%Y_%m_%dT%H_%M_%S').sql.gz +pg_dump -h postgres -U $POSTGRES_USER | gzip > /backups/$FILENAME + +echo "successfully created backup $FILENAME" diff --git a/compose/production/postgres/list-backups.sh b/compose/production/postgres/list-backups.sh new file mode 100644 index 0000000..2be3d1d --- /dev/null +++ b/compose/production/postgres/list-backups.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -o errexit +set -o pipefail +set -o nounset + + +echo "listing available backups" +echo "-------------------------" +ls /backups/ diff --git a/compose/production/postgres/restore.sh b/compose/production/postgres/restore.sh new file mode 100644 index 0000000..e735894 --- /dev/null +++ b/compose/production/postgres/restore.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env bash + +set -o errexit +set -o pipefail +set -o nounset + + +# we might run into trouble when using the default `postgres` user, e.g. when dropping the postgres +# database in restore.sh. Check that something else is used here +if [ "$POSTGRES_USER" == "postgres" ] +then + echo "restoring as the postgres user is not supported, make sure to set the POSTGRES_USER environment variable" + exit 1 +fi + +# export the postgres password so that subsequent commands don't ask for it +export PGPASSWORD=$POSTGRES_PASSWORD + +# check that we have an argument for a filename candidate +if [[ $# -eq 0 ]] ; then + echo 'usage:' + echo ' docker-compose -f production.yml run postgres restore ' + echo '' + echo 'to get a list of available backups, run:' + echo ' docker-compose -f production.yml run postgres list-backups' + exit 1 +fi + +# set the backupfile variable +BACKUPFILE=/backups/$1 + +# check that the file exists +if ! [ -f $BACKUPFILE ]; then + echo "backup file not found" + echo 'to get a list of available backups, run:' + echo ' docker-compose -f production.yml run postgres list-backups' + exit 1 +fi + +echo "beginning restore from $1" +echo "-------------------------" + +# delete the db +# deleting the db can fail. Spit out a comment if this happens but continue since the db +# is created in the next step +echo "deleting old database $POSTGRES_USER" +if dropdb -h postgres -U $POSTGRES_USER $POSTGRES_USER +then echo "deleted $POSTGRES_USER database" +else echo "database $POSTGRES_USER does not exist, continue" +fi + +# create a new database +echo "creating new database $POSTGRES_USER" +createdb -h postgres -U $POSTGRES_USER $POSTGRES_USER -O $POSTGRES_USER + +# restore the database +echo "restoring database $POSTGRES_USER" +gunzip -c $BACKUPFILE | psql -h postgres -U $POSTGRES_USER diff --git a/config/__init__.py b/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/config/settings/__init__.py b/config/settings/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/config/settings/base.py b/config/settings/base.py new file mode 100644 index 0000000..4349c5e --- /dev/null +++ b/config/settings/base.py @@ -0,0 +1,258 @@ +""" +Base settings to build other settings files upon. +""" + +import environ + +ROOT_DIR = environ.Path(__file__) - 3 # (histonets/config/settings/base.py - 3 = histonets/) +APPS_DIR = ROOT_DIR.path('histonets') + +env = environ.Env() + +READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False) +if READ_DOT_ENV_FILE: + # OS environment variables take precedence over variables from .env + env.read_env(str(ROOT_DIR.path('.env'))) + +# GENERAL +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#debug +DEBUG = env.bool('DJANGO_DEBUG', False) +# Local time zone. Choices are +# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name +# though not all of them may be available with every OS. +# In Windows, this must be set to your system time zone. +TIME_ZONE = 'UTC' +# https://docs.djangoproject.com/en/dev/ref/settings/#language-code +LANGUAGE_CODE = 'en-us' +# https://docs.djangoproject.com/en/dev/ref/settings/#site-id +SITE_ID = 1 +# https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n +USE_I18N = True +# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n +USE_L10N = True +# https://docs.djangoproject.com/en/dev/ref/settings/#use-tz +USE_TZ = True + +# DATABASES +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#databases +DATABASES = { + 'default': env.db('DATABASE_URL'), +} +DATABASES['default']['ATOMIC_REQUESTS'] = True + +# URLS +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf +ROOT_URLCONF = 'config.urls' +# https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application +WSGI_APPLICATION = 'config.wsgi.application' + +# APPS +# ------------------------------------------------------------------------------ +DJANGO_APPS = [ + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.sites', + 'django.contrib.messages', + 'django.contrib.staticfiles', + # 'django.contrib.humanize', # Handy template tags + 'django.contrib.admin', +] +THIRD_PARTY_APPS = [ + 'crispy_forms', + + 'allauth', + 'allauth.account', + 'allauth.socialaccount', +] +LOCAL_APPS = [ + 'histonets.users.apps.UsersConfig', + # Your stuff: custom apps go here +] +# https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps +INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS + +# MIGRATIONS +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules +MIGRATION_MODULES = { + 'sites': 'histonets.contrib.sites.migrations' +} + +# AUTHENTICATION +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends +AUTHENTICATION_BACKENDS = [ + 'django.contrib.auth.backends.ModelBackend', + 'allauth.account.auth_backends.AuthenticationBackend', +] +# https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model +AUTH_USER_MODEL = 'users.User' +# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url +LOGIN_REDIRECT_URL = 'users:redirect' +# https://docs.djangoproject.com/en/dev/ref/settings/#login-url +LOGIN_URL = 'account_login' + +# PASSWORDS +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers +PASSWORD_HASHERS = [ + # https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django + 'django.contrib.auth.hashers.Argon2PasswordHasher', + 'django.contrib.auth.hashers.PBKDF2PasswordHasher', + 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', + 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher', + 'django.contrib.auth.hashers.BCryptPasswordHasher', +] +# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + +# MIDDLEWARE +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#middleware +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +# STATIC +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#static-root +STATIC_ROOT = str(ROOT_DIR('static')) +# https://docs.djangoproject.com/en/dev/ref/settings/#static-url +STATIC_URL = '/static/' +# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS +STATICFILES_DIRS = [ + str(APPS_DIR.path('static')), +] +# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders +STATICFILES_FINDERS = [ + 'django.contrib.staticfiles.finders.FileSystemFinder', + 'django.contrib.staticfiles.finders.AppDirectoriesFinder', +] + +# MEDIA +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#media-root +MEDIA_ROOT = str(APPS_DIR('media')) +# https://docs.djangoproject.com/en/dev/ref/settings/#media-url +MEDIA_URL = '/media/' + +# TEMPLATES +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#templates +TEMPLATES = [ + { + # https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + # https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs + 'DIRS': [ + str(APPS_DIR.path('templates')), + ], + 'OPTIONS': { + # https://docs.djangoproject.com/en/dev/ref/settings/#template-debug + 'debug': DEBUG, + # https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders + # https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types + 'loaders': [ + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + ], + # https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.template.context_processors.i18n', + 'django.template.context_processors.media', + 'django.template.context_processors.static', + 'django.template.context_processors.tz', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] +# http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs +CRISPY_TEMPLATE_PACK = 'bootstrap4' + +# FIXTURES +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs +FIXTURE_DIRS = ( + str(APPS_DIR.path('fixtures')), +) + +# EMAIL +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend +EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend') + +# ADMIN +# ------------------------------------------------------------------------------ +# Django Admin URL regex. +ADMIN_URL = r'^admin/' +# https://docs.djangoproject.com/en/dev/ref/settings/#admins +ADMINS = [ + ("""Center for Interdisciplinary Digital Research (CIDR)""", 'contact-cidr@stanford.edu'), +] +# https://docs.djangoproject.com/en/dev/ref/settings/#managers +MANAGERS = ADMINS + +# Celery +# ------------------------------------------------------------------------------ +INSTALLED_APPS += ['histonets.taskapp.celery.CeleryConfig'] +# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url +CELERY_BROKER_URL = env('CELERY_BROKER_URL', default='django://') +# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend +if CELERY_BROKER_URL == 'django://': + CELERY_RESULT_BACKEND = 'redis://' +else: + CELERY_RESULT_BACKEND = CELERY_BROKER_URL +# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content +CELERY_ACCEPT_CONTENT = ['json'] +# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer +CELERY_TASK_SERIALIZER = 'json' +# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer +CELERY_RESULT_SERIALIZER = 'json' +# django-allauth +# ------------------------------------------------------------------------------ +ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True) +# https://django-allauth.readthedocs.io/en/latest/configuration.html +ACCOUNT_AUTHENTICATION_METHOD = 'username' +# https://django-allauth.readthedocs.io/en/latest/configuration.html +ACCOUNT_EMAIL_REQUIRED = True +# https://django-allauth.readthedocs.io/en/latest/configuration.html +ACCOUNT_EMAIL_VERIFICATION = 'mandatory' +# https://django-allauth.readthedocs.io/en/latest/configuration.html +ACCOUNT_ADAPTER = 'histonets.users.adapters.AccountAdapter' +# https://django-allauth.readthedocs.io/en/latest/configuration.html +SOCIALACCOUNT_ADAPTER = 'histonets.users.adapters.SocialAccountAdapter' + +# django-compressor +# ------------------------------------------------------------------------------ +# https://django-compressor.readthedocs.io/en/latest/quickstart/#installation +INSTALLED_APPS += ['compressor'] +STATICFILES_FINDERS += ['compressor.finders.CompressorFinder'] +# Your stuff... +# ------------------------------------------------------------------------------ diff --git a/config/settings/local.py b/config/settings/local.py new file mode 100644 index 0000000..cc97330 --- /dev/null +++ b/config/settings/local.py @@ -0,0 +1,68 @@ +from .base import * # noqa +from .base import env + +# GENERAL +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#debug +DEBUG = env.bool('DJANGO_DEBUG', default=True) +# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key +SECRET_KEY = env('DJANGO_SECRET_KEY', default='AmV6muruGEGNpRqR0G3DDC9L6dg7NwYF3sY4YU5EdB9bZ7MgNk9AUSND5g4MA72p') +# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts +ALLOWED_HOSTS = [ + "localhost", + "0.0.0.0", +] + +# CACHES +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#caches +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': '' + } +} + +# TEMPLATES +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#templates +TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # noqa F405 + +# EMAIL +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#email-host +EMAIL_HOST = env('EMAIL_HOST', default='mailhog') +# https://docs.djangoproject.com/en/dev/ref/settings/#email-port +EMAIL_PORT = 1025 + +# django-debug-toolbar +# ------------------------------------------------------------------------------ +# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites +INSTALLED_APPS += ['debug_toolbar'] # noqa F405 +# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware +MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware'] # noqa F405 +# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config +DEBUG_TOOLBAR_CONFIG = { + 'DISABLE_PANELS': [ + 'debug_toolbar.panels.redirects.RedirectsPanel', + ], + 'SHOW_TEMPLATE_CONTEXT': True, +} +# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips +INTERNAL_IPS = ['127.0.0.1', '10.0.2.2'] +import socket +import os +if os.environ.get('USE_DOCKER') == 'yes': + hostname, _, ips = socket.gethostbyname_ex(socket.gethostname()) + INTERNAL_IPS += [ip[:-1] + '1' for ip in ips] + +# django-extensions +# ------------------------------------------------------------------------------ +# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration +INSTALLED_APPS += ['django_extensions'] # noqa F405 +# Celery +# ------------------------------------------------------------------------------ +# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_always_eager +CELERY_ALWAYS_EAGER = True +# Your stuff... +# ------------------------------------------------------------------------------ diff --git a/config/settings/production.py b/config/settings/production.py new file mode 100644 index 0000000..70e65ea --- /dev/null +++ b/config/settings/production.py @@ -0,0 +1,201 @@ +import logging + +from .base import * # noqa +from .base import env + +# GENERAL +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key +SECRET_KEY = env('DJANGO_SECRET_KEY') +# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts +ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['histonets.stanford.edu']) + +# DATABASES +# ------------------------------------------------------------------------------ +DATABASES['default'] = env.db('DATABASE_URL') # noqa F405 +DATABASES['default']['ATOMIC_REQUESTS'] = True # noqa F405 +DATABASES['default']['CONN_MAX_AGE'] = env.int('CONN_MAX_AGE', default=60) # noqa F405 + +# CACHES +# ------------------------------------------------------------------------------ +CACHES = { + 'default': { + 'BACKEND': 'django_redis.cache.RedisCache', + 'LOCATION': f'{env("REDIS_URL", default="redis://127.0.0.1:6379")}/{0}', + 'OPTIONS': { + 'CLIENT_CLASS': 'django_redis.client.DefaultClient', + # Mimicing memcache behavior. + # http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior + 'IGNORE_EXCEPTIONS': True, + } + } +} + +# SECURITY +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header +SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') +# https://docs.djangoproject.com/en/dev/ref/settings/#secure-ssl-redirect +SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True) +# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure +SESSION_COOKIE_SECURE = True +# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-httponly +SESSION_COOKIE_HTTPONLY = True +# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure +CSRF_COOKIE_SECURE = True +# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-httponly +CSRF_COOKIE_HTTPONLY = True +# https://docs.djangoproject.com/en/dev/topics/security/#ssl-https +# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-seconds +# TODO: set this to 60 seconds first and then to 518400 once you prove the former works +SECURE_HSTS_SECONDS = 60 +# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains +SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool('DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True) +# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload +SECURE_HSTS_PRELOAD = env.bool('DJANGO_SECURE_HSTS_PRELOAD', default=True) +# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff +SECURE_CONTENT_TYPE_NOSNIFF = env.bool('DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True) +# https://docs.djangoproject.com/en/dev/ref/settings/#secure-browser-xss-filter +SECURE_BROWSER_XSS_FILTER = True +# https://docs.djangoproject.com/en/dev/ref/settings/#x-frame-options +X_FRAME_OPTIONS = 'DENY' + +# STORAGES +# ------------------------------------------------------------------------------ +# https://django-storages.readthedocs.io/en/latest/#installation +INSTALLED_APPS += ['storages'] # noqa F405 +# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings +AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID') +# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings +AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY') +# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings +AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME') +# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings +AWS_AUTO_CREATE_BUCKET = True +# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings +AWS_QUERYSTRING_AUTH = False +# DO NOT change these unless you know what you're doing. +_AWS_EXPIRY = 60 * 60 * 24 * 7 +# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings +AWS_S3_OBJECT_PARAMETERS = { + 'CacheControl': f'max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate', +} + +# STATIC +# ------------------------ +STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' + +# MEDIA +# ------------------------------------------------------------------------------ +DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' +MEDIA_URL = f'https://s3.amazonaws.com/{AWS_STORAGE_BUCKET_NAME}/' + +# TEMPLATES +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#templates +TEMPLATES[0]['OPTIONS']['loaders'] = [ # noqa F405 + ( + 'django.template.loaders.cached.Loader', + [ + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + ] + ), +] + +# EMAIL +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email +DEFAULT_FROM_EMAIL = env( + 'DJANGO_DEFAULT_FROM_EMAIL', + default='Histonets ' +) +# https://docs.djangoproject.com/en/dev/ref/settings/#server-email +SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL) +# https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix +EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[Histonets]') + +# ADMIN +# ------------------------------------------------------------------------------ +# Django Admin URL regex. +ADMIN_URL = env('DJANGO_ADMIN_URL') + +# Anymail (Mailgun) +# ------------------------------------------------------------------------------ +# https://anymail.readthedocs.io/en/stable/installation/#installing-anymail +INSTALLED_APPS += ['anymail'] # noqa F405 +EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend' +# https://anymail.readthedocs.io/en/stable/installation/#anymail-settings-reference +ANYMAIL = { + 'MAILGUN_API_KEY': env('MAILGUN_API_KEY'), + 'MAILGUN_SENDER_DOMAIN': env('MAILGUN_DOMAIN') +} + +# Gunicorn +# ------------------------------------------------------------------------------ +INSTALLED_APPS += ['gunicorn'] # noqa F405 + +# WhiteNoise +# ------------------------------------------------------------------------------ +# http://whitenoise.evans.io/en/latest/django.html#enable-whitenoise +MIDDLEWARE = ['whitenoise.middleware.WhiteNoiseMiddleware'] + MIDDLEWARE # noqa F405 +# django-compressor +# ------------------------------------------------------------------------------ +# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_ENABLED +COMPRESS_ENABLED = env.bool('COMPRESS_ENABLED', default=True) +# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_STORAGE +COMPRESS_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' +# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_URL +COMPRESS_URL = STATIC_URL + + +# LOGGING +# ------------------------------------------------------------------------------ +# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging +# A sample logging configuration. The only tangible logging +# performed by this configuration is to send an email to +# the site admins bon every HTTP 500 error when DEBUG=False. +# See https://docs.djangoproject.com/en/dev/topics/logging for +# more details on how to customize your logging configuration. +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'filters': { + 'require_debug_false': { + '()': 'django.utils.log.RequireDebugFalse' + } + }, + 'formatters': { + 'verbose': { + 'format': '%(levelname)s %(asctime)s %(module)s ' + '%(process)d %(thread)d %(message)s' + }, + }, + 'handlers': { + 'mail_admins': { + 'level': 'ERROR', + 'filters': ['require_debug_false'], + 'class': 'django.utils.log.AdminEmailHandler' + }, + 'console': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler', + 'formatter': 'verbose', + }, + }, + 'loggers': { + 'django.request': { + 'handlers': ['mail_admins'], + 'level': 'ERROR', + 'propagate': True + }, + 'django.security.DisallowedHost': { + 'level': 'ERROR', + 'handlers': ['console', 'mail_admins'], + 'propagate': True + } + } +} + +# Your stuff... +# ------------------------------------------------------------------------------ diff --git a/config/settings/test.py b/config/settings/test.py new file mode 100644 index 0000000..059a44d --- /dev/null +++ b/config/settings/test.py @@ -0,0 +1,58 @@ +""" +With these settings, tests run faster. +""" + +from .base import * # noqa +from .base import env + +# GENERAL +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#debug +DEBUG = False +# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key +SECRET_KEY = env('DJANGO_SECRET_KEY', default='j2m4E6s0RuCgAz86YMUlH4HFsbpD9SGbXlcvhWNiEpc7QhmdVqzmfUG7ABIknVIL') +# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner +TEST_RUNNER = 'django.test.runner.DiscoverRunner' + +# CACHES +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#caches +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': '' + } +} + +# PASSWORDS +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers +PASSWORD_HASHERS = [ + 'django.contrib.auth.hashers.MD5PasswordHasher', +] + +# TEMPLATES +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#templates +TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # noqa F405 +TEMPLATES[0]['OPTIONS']['loaders'] = [ # noqa F405 + ( + 'django.template.loaders.cached.Loader', + [ + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + ], + ), +] + +# EMAIL +# ------------------------------------------------------------------------------ +# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend +EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' +# https://docs.djangoproject.com/en/dev/ref/settings/#email-host +EMAIL_HOST = 'localhost' +# https://docs.djangoproject.com/en/dev/ref/settings/#email-port +EMAIL_PORT = 1025 + +# Your stuff... +# ------------------------------------------------------------------------------ diff --git a/config/urls.py b/config/urls.py new file mode 100644 index 0000000..6467101 --- /dev/null +++ b/config/urls.py @@ -0,0 +1,37 @@ +from django.conf import settings +from django.conf.urls import include, url +from django.conf.urls.static import static +from django.contrib import admin +from django.views.generic import TemplateView +from django.views import defaults as default_views + +urlpatterns = [ + url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name='home'), + url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'), + + # Django Admin, use {% url 'admin:index' %} + url(settings.ADMIN_URL, admin.site.urls), + + # User management + url(r'^users/', include('histonets.users.urls', namespace='users')), + url(r'^accounts/', include('allauth.urls')), + + # Your stuff: custom urls includes go here + + +] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) + +if settings.DEBUG: + # This allows the error pages to be debugged during development, just visit + # these url in browser to see how these error pages look like. + urlpatterns += [ + url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), + url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), + url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), + url(r'^500/$', default_views.server_error), + ] + if 'debug_toolbar' in settings.INSTALLED_APPS: + import debug_toolbar + urlpatterns = [ + url(r'^__debug__/', include(debug_toolbar.urls)), + ] + urlpatterns diff --git a/config/wsgi.py b/config/wsgi.py new file mode 100644 index 0000000..037b158 --- /dev/null +++ b/config/wsgi.py @@ -0,0 +1,42 @@ +""" +WSGI config for Histonets project. + +This module contains the WSGI application used by Django's development server +and any production WSGI deployments. It should expose a module-level variable +named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover +this application via the ``WSGI_APPLICATION`` setting. + +Usually you will have the standard Django WSGI application here, but it also +might make sense to replace the whole Django WSGI application with a custom one +that later delegates to the Django one. For example, you could introduce WSGI +middleware here, or combine a Django application with an application of another +framework. + +""" +import os +import sys + +from django.core.wsgi import get_wsgi_application + +# This allows easy placement of apps within the interior +# histonets directory. +app_path = os.path.abspath(os.path.join( + os.path.dirname(os.path.abspath(__file__)), os.pardir)) +sys.path.append(os.path.join(app_path, 'histonets')) + + + +# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks +# if running multiple sites in the same mod_wsgi process. To fix this, use +# mod_wsgi daemon mode with each site in its own daemon process, or use +# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production" +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") + +# This application object is used by any WSGI server configured to use this +# file. This includes Django's development server, if the WSGI_APPLICATION +# setting points here. +application = get_wsgi_application() + +# Apply WSGI middleware here. +# from helloworld.wsgi import HelloWorldApplication +# application = HelloWorldApplication(application) diff --git a/dev.sh b/dev.sh new file mode 100755 index 0000000..efba72c --- /dev/null +++ b/dev.sh @@ -0,0 +1,3 @@ +#!/bin/bash +docker-compose -f local.yml up $@ +docker-compose -f local.yml rm --stop --force diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..50164c5 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,153 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/histonets.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/histonets.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/histonets" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/histonets" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/docs/__init__.py b/docs/__init__.py new file mode 100644 index 0000000..8772c82 --- /dev/null +++ b/docs/__init__.py @@ -0,0 +1 @@ +# Included so that Django's startproject comment runs against the docs directory diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..2cae42b --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,243 @@ +# Histonets documentation build configuration file, created by +# sphinx-quickstart. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'Histonets' +copyright = """2018, Center for Interdisciplinary Digital Research (CIDR)""" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.1' +# The full version, including alpha/beta/rc tags. +release = '0.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'histonetsdoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # 'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', + 'histonets.tex', + 'Histonets Documentation', + """Center for Interdisciplinary Digital Research (CIDR)""", 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'histonets', 'Histonets Documentation', + ["""Center for Interdisciplinary Digital Research (CIDR)"""], 1) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------------ + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'histonets', 'Histonets Documentation', + """Center for Interdisciplinary Digital Research (CIDR)""", 'Histonets', + """From scanned map images to graph data""", 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' diff --git a/docs/deploy.rst b/docs/deploy.rst new file mode 100644 index 0000000..1e642c7 --- /dev/null +++ b/docs/deploy.rst @@ -0,0 +1,4 @@ +Deploy +======== + +This is where you describe how the project is deployed in production. diff --git a/docs/docker_ec2.rst b/docs/docker_ec2.rst new file mode 100644 index 0000000..606d295 --- /dev/null +++ b/docs/docker_ec2.rst @@ -0,0 +1,186 @@ +Developing with Docker +====================== + +You can develop your application in a `Docker`_ container for simpler deployment onto bare Linux machines later. This instruction assumes an `Amazon Web Services`_ EC2 instance, but it should work on any machine with Docker > 1.3 and `Docker compose`_ installed. + +.. _Docker: https://www.docker.com/ +.. _Amazon Web Services: http://aws.amazon.com/ +.. _Docker compose: https://docs.docker.com/compose/ + +Setting up +^^^^^^^^^^ + +Docker encourages running one container for each process. This might mean one container for your web server, one for Django application and a third for your database. Once you're happy composing containers in this way you can easily add more, such as a `Redis`_ cache. + +.. _Redis: http://redis.io/ + +The Docker compose tool (previously known as `fig`_) makes linking these containers easy. An example set up for your Cookiecutter Django project might look like this: + +.. _fig: http://www.fig.sh/ + +:: + + webapp/ # Your cookiecutter project would be in here + Dockerfile + ... + database/ + Dockerfile + ... + webserver/ + Dockerfile + ... + production.yml + +Each component of your application would get its own `Dockerfile`_. The rest of this example assumes you are using the `base postgres image`_ for your database. Your database settings in `config/base.py` might then look something like: + +.. _Dockerfile: https://docs.docker.com/reference/builder/ +.. _base postgres image: https://registry.hub.docker.com/_/postgres/ + +.. code-block:: python + + DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.postgresql_psycopg2', + 'NAME': 'postgres', + 'USER': 'postgres', + 'HOST': 'database', + 'PORT': 5432, + } + } + +The `Docker compose documentation`_ explains in detail what you can accomplish in the `production.yml` file, but an example configuration might look like this: + +.. _Docker compose documentation: https://docs.docker.com/compose/#compose-documentation + +.. code-block:: yaml + + database: + build: database + webapp: + build: webapp: + command: /usr/bin/python3.6 manage.py runserver 0.0.0.0:8000 # dev setting + # command: gunicorn -b 0.0.0.0:8000 wsgi:application # production setting + volumes: + - webapp/your_project_name:/path/to/container/workdir/ + links: + - database + webserver: + build: webserver + ports: + - "80:80" + - "443:443" + links: + - webapp + +We'll ignore the webserver for now (you'll want to comment that part out while we do). A working Dockerfile to run your cookiecutter application might look like this: + +:: + + FROM ubuntu:14.04 + ENV REFRESHED_AT 2015-01-13 + + # update packages and prepare to build software + RUN ["apt-get", "update"] + RUN ["apt-get", "-y", "install", "build-essential", "vim", "git", "curl"] + RUN ["locale-gen", "en_GB.UTF-8"] + + # install latest python + RUN ["apt-get", "-y", "build-dep", "python3-dev", "python3-imaging"] + RUN ["apt-get", "-y", "install", "python3-dev", "python3-imaging", "python3-pip"] + + # prepare postgreSQL support + RUN ["apt-get", "-y", "build-dep", "python3-psycopg2"] + + # move into our working directory + # ADD must be after chown see http://stackoverflow.com/a/26145444/1281947 + RUN ["groupadd", "python"] + RUN ["useradd", "python", "-s", "/bin/bash", "-m", "-g", "python", "-G", "python"] + ENV HOME /home/python + WORKDIR /home/python + RUN ["chown", "-R", "python:python", "/home/python"] + ADD ./ /home/python + + # manage requirements + ENV REQUIREMENTS_REFRESHED_AT 2015-02-25 + RUN ["pip3", "install", "-r", "requirements.txt"] + + # uncomment the line below to use container as a non-root user + USER python:python + +Running `sudo docker-compose -f production.yml build` will follow the instructions in your `production.yml` file and build the database container, then your webapp, before mounting your cookiecutter project files as a volume in the webapp container and linking to the database. Our example yaml file runs in development mode but changing it to production mode is as simple as commenting out the line using `runserver` and uncommenting the line using `gunicorn`. + +Both are set to run on port `0.0.0.0:8000`, which is where the Docker daemon will discover it. You can now run `sudo docker-compose -f production.yml up` and browse to `localhost:8000` to see your application running. + +Deployment +^^^^^^^^^^ + +You'll need a webserver container for deployment. An example setup for `Nginx`_ might look like this: + +.. _Nginx: http://wiki.nginx.org/Main + +:: + + FROM ubuntu:14.04 + ENV REFRESHED_AT 2015-02-11 + + # get the nginx package and set it up + RUN ["apt-get", "update"] + RUN ["apt-get", "-y", "install", "nginx"] + + # forward request and error logs to docker log collector + RUN ln -sf /dev/stdout /var/log/nginx/access.log + RUN ln -sf /dev/stderr /var/log/nginx/error.log + VOLUME ["/var/cache/nginx"] + EXPOSE 80 443 + + # load nginx conf + ADD ./site.conf /etc/nginx/sites-available/your_cookiecutter_project + RUN ["ln", "-s", "/etc/nginx/sites-available/your_cookiecutter_project", "/etc/nginx/sites-enabled/your_cookiecutter_project"] + RUN ["rm", "-rf", "/etc/nginx/sites-available/default"] + + #start the server + CMD ["nginx", "-g", "daemon off;"] + +That Dockerfile assumes you have an Nginx conf file named `site.conf` in the same directory as the webserver Dockerfile. A very basic example, which forwards traffic onto the development server or gunicorn for processing, would look like this: + +:: + + # see http://serverfault.com/questions/577370/how-can-i-use-environment-variables-in-nginx-conf#comment730384_577370 + upstream localhost { + server webapp_1:8000; + } + server { + location / { + proxy_pass http://localhost; + } + } + +Running `sudo docker-compose -f production.yml build webserver` will build your server container. Running `sudo docker-compose -f production.yml up` will now expose your application directly on `localhost` (no need to specify the port number). + +Building and running your app on EC2 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All you now need to do to run your app in production is: + +* Create an empty EC2 Linux instance (any Linux machine should do). + +* Install your preferred source control solution, Docker and Docker compose on the news instance. + +* Pull in your code from source control. The root directory should be the one with your `production.yml` file in it. + +* Run `sudo docker-compose -f production.yml build` and `sudo docker-compose -f production.yml up`. + +* Assign an `Elastic IP address`_ to your new machine. + +.. _Elastic IP address: https://aws.amazon.com/articles/1346 + +* Point your domain name to the elastic IP. + +**Be careful with Elastic IPs** because, on the AWS free tier, if you assign one and then stop the machine you will incur charges while the machine is down (presumably because you're preventing them allocating the IP to someone else). + +Security advisory +^^^^^^^^^^^^^^^^^ + +The setup described in this instruction will get you up-and-running but it hasn't been audited for security. If you are running your own setup like this it is always advisable to, at a minimum, examine your application with a tool like `OWASP ZAP`_ to see what security holes you might be leaving open. + +.. _OWASP ZAP: https://www.owasp.org/index.php/OWASP_Zed_Attack_Proxy_Project diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..17f18b8 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,26 @@ +.. Histonets documentation master file, created by + sphinx-quickstart. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Histonets's documentation! +==================================================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + install + deploy + docker_ec2 + tests + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/install.rst b/docs/install.rst new file mode 100644 index 0000000..1bc0333 --- /dev/null +++ b/docs/install.rst @@ -0,0 +1,4 @@ +Install +========= + +This is where you write how to get a new laptop to run this project. diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..a742e13 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,190 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\histonets.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\histonets.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +:end diff --git a/histonets/__init__.py b/histonets/__init__.py new file mode 100644 index 0000000..76f97b1 --- /dev/null +++ b/histonets/__init__.py @@ -0,0 +1,2 @@ +__version__ = '0.1.0' +__version_info__ = tuple([int(num) if num.isdigit() else num for num in __version__.replace('-', '.', 1).split('.')]) diff --git a/histonets/contrib/__init__.py b/histonets/contrib/__init__.py new file mode 100644 index 0000000..1c7ecc8 --- /dev/null +++ b/histonets/contrib/__init__.py @@ -0,0 +1,5 @@ +""" +To understand why this file is here, please read: + +http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django +""" diff --git a/histonets/contrib/sites/__init__.py b/histonets/contrib/sites/__init__.py new file mode 100644 index 0000000..1c7ecc8 --- /dev/null +++ b/histonets/contrib/sites/__init__.py @@ -0,0 +1,5 @@ +""" +To understand why this file is here, please read: + +http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django +""" diff --git a/histonets/contrib/sites/migrations/0001_initial.py b/histonets/contrib/sites/migrations/0001_initial.py new file mode 100644 index 0000000..a763986 --- /dev/null +++ b/histonets/contrib/sites/migrations/0001_initial.py @@ -0,0 +1,31 @@ +import django.contrib.sites.models +from django.contrib.sites.models import _simple_domain_name_validator +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [] + + operations = [ + migrations.CreateModel( + name='Site', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('domain', models.CharField( + max_length=100, verbose_name='domain name', validators=[_simple_domain_name_validator] + )), + ('name', models.CharField(max_length=50, verbose_name='display name')), + ], + options={ + 'ordering': ('domain',), + 'db_table': 'django_site', + 'verbose_name': 'site', + 'verbose_name_plural': 'sites', + }, + bases=(models.Model,), + managers=[ + ('objects', django.contrib.sites.models.SiteManager()), + ], + ), + ] diff --git a/histonets/contrib/sites/migrations/0002_alter_domain_unique.py b/histonets/contrib/sites/migrations/0002_alter_domain_unique.py new file mode 100644 index 0000000..6a26ebc --- /dev/null +++ b/histonets/contrib/sites/migrations/0002_alter_domain_unique.py @@ -0,0 +1,20 @@ +import django.contrib.sites.models +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('sites', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='site', + name='domain', + field=models.CharField( + max_length=100, unique=True, validators=[django.contrib.sites.models._simple_domain_name_validator], + verbose_name='domain name' + ), + ), + ] diff --git a/histonets/contrib/sites/migrations/0003_set_site_domain_and_name.py b/histonets/contrib/sites/migrations/0003_set_site_domain_and_name.py new file mode 100644 index 0000000..f669654 --- /dev/null +++ b/histonets/contrib/sites/migrations/0003_set_site_domain_and_name.py @@ -0,0 +1,42 @@ +""" +To understand why this file is here, please read: + +http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django +""" +from django.conf import settings +from django.db import migrations + + +def update_site_forward(apps, schema_editor): + """Set site domain and name.""" + Site = apps.get_model('sites', 'Site') + Site.objects.update_or_create( + id=settings.SITE_ID, + defaults={ + 'domain': 'histonets.stanford.edu', + 'name': 'Histonets' + } + ) + + +def update_site_backward(apps, schema_editor): + """Revert site domain and name to default.""" + Site = apps.get_model('sites', 'Site') + Site.objects.update_or_create( + id=settings.SITE_ID, + defaults={ + 'domain': 'example.com', + 'name': 'example.com' + } + ) + + +class Migration(migrations.Migration): + + dependencies = [ + ('sites', '0002_alter_domain_unique'), + ] + + operations = [ + migrations.RunPython(update_site_forward, update_site_backward), + ] diff --git a/histonets/contrib/sites/migrations/__init__.py b/histonets/contrib/sites/migrations/__init__.py new file mode 100644 index 0000000..1c7ecc8 --- /dev/null +++ b/histonets/contrib/sites/migrations/__init__.py @@ -0,0 +1,5 @@ +""" +To understand why this file is here, please read: + +http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django +""" diff --git a/histonets/static/css/project.css b/histonets/static/css/project.css new file mode 100644 index 0000000..f1d543d --- /dev/null +++ b/histonets/static/css/project.css @@ -0,0 +1,13 @@ +/* These styles are generated from project.scss. */ + +.alert-debug { + color: black; + background-color: white; + border-color: #d6e9c6; +} + +.alert-error { + color: #b94a48; + background-color: #f2dede; + border-color: #eed3d7; +} diff --git a/histonets/static/fonts/.gitkeep b/histonets/static/fonts/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/histonets/static/images/favicon.ico b/histonets/static/images/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..e1c1dd1a32a3a077c41a21e52bc7fb5ac90d3afb GIT binary patch literal 8348 zcmeHLX-gGh6rQ3V&`92%;lcmGu`Jl^WK@OV`^XKh08PZoaH%l?rdiiWq>kJ2@6vM zhAH8L6=kTRD1!xR`-2o^hS&}loN!U1#gF+=YxEq~uqf5#iBw%p0;w;5ehm+6a!sSu zh;X+$+}oF$X1Q6DwS~>Y_Hpw^(&QvJO<5ZCPrn5laNp%s{B3x1hf%*?eW>oS{<{4s^u_yG zpDyG!_iV#~G=q<~slG@0Sx47XXQ%O;HY7ILVf}B-)R$6GV^A78)t0tN9`tu3r9Z+xM?NgUd8geu=c`0?r;-KR&IQjKs zSB#VCpg8CPW&M}$sth@H=VS%t;23!!j};F)bb;W3EkA!4QmCsY_p81^TK0{;$g>e1Hl998^0M+r0-7ZSN+l_cMSRuEALTE@|d6+3{GMP^;_|<yhubb{FF1IPgH|0>SH%pC!c)uFI)H z?jv4y0uO{P5WE>~I<$r!RFo0lN4r{xm;Jy4p$h~b3S*a#)$Z*nI}&Kc_C+*zZHz1v zIR8TBVH7Y?Mp~ zofpsr+SP@>EX4e*bQ{nAUYtKrQ&-5d4j;FF{^-^Dt2^5I`HSb!|22PN26n3>hKPOy zW2D*A*v+c{bFKCwozbB{dObp~e&1Nxrj<4Ih4~w)M`nl08o}Wq2 z#Jt(k+M>+}JY(=2gm(gdUq)^@e%tX(F)RBtotnB2^y>YKz-5eg_qO&n%lJ1nuQmTY zxmyE1NWjl1EGvD?Z|n;neT;sa?Q;Ef-#%$BYxXAhD4xF+@M>*qrR!x^sPN98|BX4; z!$NJcKF`^C7f(<_vlp%b>`pxL^8Y<&?KFx{n_!5C9VqLA*CP@zhXs2t#dmrAKu?d_ y^&_r5_e@uesG}CO*g!3o?-kB+I^cA`>44J#rvpw0oDMi0a5~_0!0Eu>4*Uj$LD0AW literal 0 HcmV?d00001 diff --git a/histonets/static/js/project.js b/histonets/static/js/project.js new file mode 100644 index 0000000..91ab9e2 --- /dev/null +++ b/histonets/static/js/project.js @@ -0,0 +1,21 @@ +/* Project specific Javascript goes here. */ + +/* +Formatting hack to get around crispy-forms unfortunate hardcoding +in helpers.FormHelper: + + if template_pack == 'bootstrap4': + grid_colum_matcher = re.compile('\w*col-(xs|sm|md|lg|xl)-\d+\w*') + using_grid_layout = (grid_colum_matcher.match(self.label_class) or + grid_colum_matcher.match(self.field_class)) + if using_grid_layout: + items['using_grid_layout'] = True + +Issues with the above approach: + +1. Fragile: Assumes Bootstrap 4's API doesn't change (it does) +2. Unforgiving: Doesn't allow for any variation in template design +3. Really Unforgiving: No way to override this behavior +4. Undocumented: No mention in the documentation, or it's too hard for me to find +*/ +$('.form-group').removeClass('row'); diff --git a/histonets/static/sass/custom_bootstrap_vars.scss b/histonets/static/sass/custom_bootstrap_vars.scss new file mode 100644 index 0000000..e69de29 diff --git a/histonets/static/sass/project.scss b/histonets/static/sass/project.scss new file mode 100644 index 0000000..3c8f261 --- /dev/null +++ b/histonets/static/sass/project.scss @@ -0,0 +1,37 @@ + + + + +// project specific CSS goes here + +//////////////////////////////// + //Variables// +//////////////////////////////// + +// Alert colors + +$white: #fff; +$mint-green: #d6e9c6; +$black: #000; +$pink: #f2dede; +$dark-pink: #eed3d7; +$red: #b94a48; + +//////////////////////////////// + //Alerts// +//////////////////////////////// + +// bootstrap alert CSS, translated to the django-standard levels of +// debug, info, success, warning, error + +.alert-debug { + background-color: $white; + border-color: $mint-green; + color: $black; +} + +.alert-error { + background-color: $pink; + border-color: $dark-pink; + color: $red; +} diff --git a/histonets/taskapp/__init__.py b/histonets/taskapp/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/histonets/taskapp/celery.py b/histonets/taskapp/celery.py new file mode 100644 index 0000000..e009394 --- /dev/null +++ b/histonets/taskapp/celery.py @@ -0,0 +1,34 @@ + +import os +from celery import Celery +from django.apps import apps, AppConfig +from django.conf import settings + + +if not settings.configured: + # set the default Django settings module for the 'celery' program. + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover + + +app = Celery('histonets') + + +class CeleryConfig(AppConfig): + name = 'histonets.taskapp' + verbose_name = 'Celery Config' + + def ready(self): + # Using a string here means the worker will not have to + # pickle the object when using Windows. + app.config_from_object('django.conf:settings') + installed_apps = [app_config.name for app_config in apps.get_app_configs()] + app.autodiscover_tasks(lambda: installed_apps, force=True) + + + + + + +@app.task(bind=True) +def debug_task(self): + print(f'Request: {self.request!r}') # pragma: no cover diff --git a/histonets/templates/403_csrf.html b/histonets/templates/403_csrf.html new file mode 100644 index 0000000..77db8ae --- /dev/null +++ b/histonets/templates/403_csrf.html @@ -0,0 +1,9 @@ +{% extends "base.html" %} + +{% block title %}Forbidden (403){% endblock %} + +{% block content %} +

Forbidden (403)

+ +

CSRF verification failed. Request aborted.

+{% endblock content %} diff --git a/histonets/templates/404.html b/histonets/templates/404.html new file mode 100644 index 0000000..98327cd --- /dev/null +++ b/histonets/templates/404.html @@ -0,0 +1,9 @@ +{% extends "base.html" %} + +{% block title %}Page not found{% endblock %} + +{% block content %} +

Page not found

+ +

This is not the page you were looking for.

+{% endblock content %} diff --git a/histonets/templates/500.html b/histonets/templates/500.html new file mode 100644 index 0000000..21df606 --- /dev/null +++ b/histonets/templates/500.html @@ -0,0 +1,13 @@ +{% extends "base.html" %} + +{% block title %}Server Error{% endblock %} + +{% block content %} +

Ooops!!! 500

+ +

Looks like something went wrong!

+ +

We track these errors automatically, but if the problem persists feel free to contact us. In the meantime, try refreshing.

+{% endblock content %} + + diff --git a/histonets/templates/account/account_inactive.html b/histonets/templates/account/account_inactive.html new file mode 100644 index 0000000..17c2157 --- /dev/null +++ b/histonets/templates/account/account_inactive.html @@ -0,0 +1,12 @@ +{% extends "account/base.html" %} + +{% load i18n %} + +{% block head_title %}{% trans "Account Inactive" %}{% endblock %} + +{% block inner %} +

{% trans "Account Inactive" %}

+ +

{% trans "This account is inactive." %}

+{% endblock %} + diff --git a/histonets/templates/account/base.html b/histonets/templates/account/base.html new file mode 100644 index 0000000..8e1f260 --- /dev/null +++ b/histonets/templates/account/base.html @@ -0,0 +1,10 @@ +{% extends "base.html" %} +{% block title %}{% block head_title %}{% endblock head_title %}{% endblock title %} + +{% block content %} +
+
+ {% block inner %}{% endblock %} +
+
+{% endblock %} diff --git a/histonets/templates/account/email.html b/histonets/templates/account/email.html new file mode 100644 index 0000000..0dc8d14 --- /dev/null +++ b/histonets/templates/account/email.html @@ -0,0 +1,80 @@ + +{% extends "account/base.html" %} + +{% load i18n %} +{% load crispy_forms_tags %} + +{% block head_title %}{% trans "Account" %}{% endblock %} + +{% block inner %} +

{% trans "E-mail Addresses" %}

+ +{% if user.emailaddress_set.all %} +

{% trans 'The following e-mail addresses are associated with your account:' %}

+ + + +{% else %} +

{% trans 'Warning:'%} {% trans "You currently do not have any e-mail address set up. You should really add an e-mail address so you can receive notifications, reset your password, etc." %}

+ +{% endif %} + + +

{% trans "Add E-mail Address" %}

+ +
+ {% csrf_token %} + {{ form|crispy }} + +
+ +{% endblock %} + + +{% block javascript %} +{{ block.super }} + +{% endblock %} + diff --git a/histonets/templates/account/email_confirm.html b/histonets/templates/account/email_confirm.html new file mode 100644 index 0000000..46c7812 --- /dev/null +++ b/histonets/templates/account/email_confirm.html @@ -0,0 +1,32 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% load account %} + +{% block head_title %}{% trans "Confirm E-mail Address" %}{% endblock %} + + +{% block inner %} +

{% trans "Confirm E-mail Address" %}

+ +{% if confirmation %} + +{% user_display confirmation.email_address.user as user_display %} + +

{% blocktrans with confirmation.email_address.email as email %}Please confirm that {{ email }} is an e-mail address for user {{ user_display }}.{% endblocktrans %}

+ +
+{% csrf_token %} + +
+ +{% else %} + +{% url 'account_email' as email_url %} + +

{% blocktrans %}This e-mail confirmation link expired or is invalid. Please issue a new e-mail confirmation request.{% endblocktrans %}

+ +{% endif %} + +{% endblock %} + diff --git a/histonets/templates/account/login.html b/histonets/templates/account/login.html new file mode 100644 index 0000000..2cadea6 --- /dev/null +++ b/histonets/templates/account/login.html @@ -0,0 +1,48 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% load account socialaccount %} +{% load crispy_forms_tags %} + +{% block head_title %}{% trans "Sign In" %}{% endblock %} + +{% block inner %} + +

{% trans "Sign In" %}

+ +{% get_providers as socialaccount_providers %} + +{% if socialaccount_providers %} +

{% blocktrans with site.name as site_name %}Please sign in with one +of your existing third party accounts. Or, sign up +for a {{ site_name }} account and sign in below:{% endblocktrans %}

+ +
+ +
    + {% include "socialaccount/snippets/provider_list.html" with process="login" %} +
+ + + +
+ +{% include "socialaccount/snippets/login_extra.html" %} + +{% else %} +

{% blocktrans %}If you have not created an account yet, then please +sign up first.{% endblocktrans %}

+{% endif %} + + + +{% endblock %} + diff --git a/histonets/templates/account/logout.html b/histonets/templates/account/logout.html new file mode 100644 index 0000000..8e2e675 --- /dev/null +++ b/histonets/templates/account/logout.html @@ -0,0 +1,22 @@ +{% extends "account/base.html" %} + +{% load i18n %} + +{% block head_title %}{% trans "Sign Out" %}{% endblock %} + +{% block inner %} +

{% trans "Sign Out" %}

+ +

{% trans 'Are you sure you want to sign out?' %}

+ +
+ {% csrf_token %} + {% if redirect_field_value %} + + {% endif %} + +
+ + +{% endblock %} + diff --git a/histonets/templates/account/password_change.html b/histonets/templates/account/password_change.html new file mode 100644 index 0000000..b72ca06 --- /dev/null +++ b/histonets/templates/account/password_change.html @@ -0,0 +1,17 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% load crispy_forms_tags %} + +{% block head_title %}{% trans "Change Password" %}{% endblock %} + +{% block inner %} +

{% trans "Change Password" %}

+ +
+ {% csrf_token %} + {{ form|crispy }} + +
+{% endblock %} + diff --git a/histonets/templates/account/password_reset.html b/histonets/templates/account/password_reset.html new file mode 100644 index 0000000..845bbda --- /dev/null +++ b/histonets/templates/account/password_reset.html @@ -0,0 +1,26 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% load account %} +{% load crispy_forms_tags %} + +{% block head_title %}{% trans "Password Reset" %}{% endblock %} + +{% block inner %} + +

{% trans "Password Reset" %}

+ {% if user.is_authenticated %} + {% include "account/snippets/already_logged_in.html" %} + {% endif %} + +

{% trans "Forgotten your password? Enter your e-mail address below, and we'll send you an e-mail allowing you to reset it." %}

+ +
+ {% csrf_token %} + {{ form|crispy }} + +
+ +

{% blocktrans %}Please contact us if you have any trouble resetting your password.{% endblocktrans %}

+{% endblock %} + diff --git a/histonets/templates/account/password_reset_done.html b/histonets/templates/account/password_reset_done.html new file mode 100644 index 0000000..c59534a --- /dev/null +++ b/histonets/templates/account/password_reset_done.html @@ -0,0 +1,17 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% load account %} + +{% block head_title %}{% trans "Password Reset" %}{% endblock %} + +{% block inner %} +

{% trans "Password Reset" %}

+ + {% if user.is_authenticated %} + {% include "account/snippets/already_logged_in.html" %} + {% endif %} + +

{% blocktrans %}We have sent you an e-mail. Please contact us if you do not receive it within a few minutes.{% endblocktrans %}

+{% endblock %} + diff --git a/histonets/templates/account/password_reset_from_key.html b/histonets/templates/account/password_reset_from_key.html new file mode 100644 index 0000000..0bd60d6 --- /dev/null +++ b/histonets/templates/account/password_reset_from_key.html @@ -0,0 +1,25 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% load crispy_forms_tags %} +{% block head_title %}{% trans "Change Password" %}{% endblock %} + +{% block inner %} +

{% if token_fail %}{% trans "Bad Token" %}{% else %}{% trans "Change Password" %}{% endif %}

+ + {% if token_fail %} + {% url 'account_reset_password' as passwd_reset_url %} +

{% blocktrans %}The password reset link was invalid, possibly because it has already been used. Please request a new password reset.{% endblocktrans %}

+ {% else %} + {% if form %} +
+ {% csrf_token %} + {{ form|crispy }} + +
+ {% else %} +

{% trans 'Your password is now changed.' %}

+ {% endif %} + {% endif %} +{% endblock %} + diff --git a/histonets/templates/account/password_reset_from_key_done.html b/histonets/templates/account/password_reset_from_key_done.html new file mode 100644 index 0000000..89be086 --- /dev/null +++ b/histonets/templates/account/password_reset_from_key_done.html @@ -0,0 +1,10 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% block head_title %}{% trans "Change Password" %}{% endblock %} + +{% block inner %} +

{% trans "Change Password" %}

+

{% trans 'Your password is now changed.' %}

+{% endblock %} + diff --git a/histonets/templates/account/password_set.html b/histonets/templates/account/password_set.html new file mode 100644 index 0000000..7786e9e --- /dev/null +++ b/histonets/templates/account/password_set.html @@ -0,0 +1,17 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% load crispy_forms_tags %} + +{% block head_title %}{% trans "Set Password" %}{% endblock %} + +{% block inner %} +

{% trans "Set Password" %}

+ +
+ {% csrf_token %} + {{ form|crispy }} + +
+{% endblock %} + diff --git a/histonets/templates/account/signup.html b/histonets/templates/account/signup.html new file mode 100644 index 0000000..6a2954e --- /dev/null +++ b/histonets/templates/account/signup.html @@ -0,0 +1,23 @@ +{% extends "account/base.html" %} + +{% load i18n %} +{% load crispy_forms_tags %} + +{% block head_title %}{% trans "Signup" %}{% endblock %} + +{% block inner %} +

{% trans "Sign Up" %}

+ +

{% blocktrans %}Already have an account? Then please sign in.{% endblocktrans %}

+ + + +{% endblock %} + diff --git a/histonets/templates/account/signup_closed.html b/histonets/templates/account/signup_closed.html new file mode 100644 index 0000000..2322f17 --- /dev/null +++ b/histonets/templates/account/signup_closed.html @@ -0,0 +1,12 @@ +{% extends "account/base.html" %} + +{% load i18n %} + +{% block head_title %}{% trans "Sign Up Closed" %}{% endblock %} + +{% block inner %} +

{% trans "Sign Up Closed" %}

+ +

{% trans "We are sorry, but the sign up is currently closed." %}

+{% endblock %} + diff --git a/histonets/templates/account/verification_sent.html b/histonets/templates/account/verification_sent.html new file mode 100644 index 0000000..ad093fd --- /dev/null +++ b/histonets/templates/account/verification_sent.html @@ -0,0 +1,13 @@ +{% extends "account/base.html" %} + +{% load i18n %} + +{% block head_title %}{% trans "Verify Your E-mail Address" %}{% endblock %} + +{% block inner %} +

{% trans "Verify Your E-mail Address" %}

+ +

{% blocktrans %}We have sent an e-mail to you for verification. Follow the link provided to finalize the signup process. Please contact us if you do not receive it within a few minutes.{% endblocktrans %}

+ +{% endblock %} + diff --git a/histonets/templates/account/verified_email_required.html b/histonets/templates/account/verified_email_required.html new file mode 100644 index 0000000..09d4fde --- /dev/null +++ b/histonets/templates/account/verified_email_required.html @@ -0,0 +1,24 @@ +{% extends "account/base.html" %} + +{% load i18n %} + +{% block head_title %}{% trans "Verify Your E-mail Address" %}{% endblock %} + +{% block inner %} +

{% trans "Verify Your E-mail Address" %}

+ +{% url 'account_email' as email_url %} + +

{% blocktrans %}This part of the site requires us to verify that +you are who you claim to be. For this purpose, we require that you +verify ownership of your e-mail address. {% endblocktrans %}

+ +

{% blocktrans %}We have sent an e-mail to you for +verification. Please click on the link inside this e-mail. Please +contact us if you do not receive it within a few minutes.{% endblocktrans %}

+ +

{% blocktrans %}Note: you can still change your e-mail address.{% endblocktrans %}

+ + +{% endblock %} + diff --git a/histonets/templates/base.html b/histonets/templates/base.html new file mode 100644 index 0000000..387fe85 --- /dev/null +++ b/histonets/templates/base.html @@ -0,0 +1,112 @@ +{% load static i18n compress%} + + + + + {% block title %}Histonets{% endblock title %} + + + + + + + + {% block css %} + + + + + + + {% compress css %} + + + + + {% endcompress %} + {% endblock %} + + + + + +
+ + +
+ +
+ + {% if messages %} + {% for message in messages %} +
{{ message }}
+ {% endfor %} + {% endif %} + + {% block content %} +

Use this document as a way to quick start any new project.

+ {% endblock content %} + +
+ + {% block modal %}{% endblock modal %} + + + + {% block javascript %} + + + + + + + + + + + {% compress js %} + + {% endcompress %} + + {% endblock javascript %} + + + diff --git a/histonets/templates/pages/about.html b/histonets/templates/pages/about.html new file mode 100644 index 0000000..63913c1 --- /dev/null +++ b/histonets/templates/pages/about.html @@ -0,0 +1 @@ +{% extends "base.html" %} \ No newline at end of file diff --git a/histonets/templates/pages/home.html b/histonets/templates/pages/home.html new file mode 100644 index 0000000..63913c1 --- /dev/null +++ b/histonets/templates/pages/home.html @@ -0,0 +1 @@ +{% extends "base.html" %} \ No newline at end of file diff --git a/histonets/templates/users/user_detail.html b/histonets/templates/users/user_detail.html new file mode 100644 index 0000000..e86eda1 --- /dev/null +++ b/histonets/templates/users/user_detail.html @@ -0,0 +1,36 @@ +{% extends "base.html" %} +{% load static %} + +{% block title %}User: {{ object.username }}{% endblock %} + +{% block content %} +
+ +
+
+ +

{{ object.username }}

+ {% if object.name %} +

{{ object.name }}

+ {% endif %} +
+
+ +{% if object == request.user %} + +
+ +
+ My Info + E-Mail + +
+ +
+ +{% endif %} + + +
+{% endblock content %} + diff --git a/histonets/templates/users/user_form.html b/histonets/templates/users/user_form.html new file mode 100644 index 0000000..a054047 --- /dev/null +++ b/histonets/templates/users/user_form.html @@ -0,0 +1,17 @@ +{% extends "base.html" %} +{% load crispy_forms_tags %} + +{% block title %}{{ user.username }}{% endblock %} + +{% block content %} +

{{ user.username }}

+
+ {% csrf_token %} + {{ form|crispy }} +
+
+ +
+
+
+{% endblock %} diff --git a/histonets/templates/users/user_list.html b/histonets/templates/users/user_list.html new file mode 100644 index 0000000..47d3f85 --- /dev/null +++ b/histonets/templates/users/user_list.html @@ -0,0 +1,17 @@ +{% extends "base.html" %} +{% load static i18n %} +{% block title %}Members{% endblock %} + +{% block content %} +
+

Users

+ +
+ {% for user in user_list %} + +

{{ user.username }}

+
+ {% endfor %} +
+
+{% endblock content %} diff --git a/histonets/users/__init__.py b/histonets/users/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/histonets/users/adapters.py b/histonets/users/adapters.py new file mode 100644 index 0000000..b31450a --- /dev/null +++ b/histonets/users/adapters.py @@ -0,0 +1,13 @@ +from django.conf import settings +from allauth.account.adapter import DefaultAccountAdapter +from allauth.socialaccount.adapter import DefaultSocialAccountAdapter + + +class AccountAdapter(DefaultAccountAdapter): + def is_open_for_signup(self, request): + return getattr(settings, 'ACCOUNT_ALLOW_REGISTRATION', True) + + +class SocialAccountAdapter(DefaultSocialAccountAdapter): + def is_open_for_signup(self, request, sociallogin): + return getattr(settings, 'ACCOUNT_ALLOW_REGISTRATION', True) diff --git a/histonets/users/admin.py b/histonets/users/admin.py new file mode 100644 index 0000000..9b61512 --- /dev/null +++ b/histonets/users/admin.py @@ -0,0 +1,39 @@ +from django import forms +from django.contrib import admin +from django.contrib.auth.admin import UserAdmin as AuthUserAdmin +from django.contrib.auth.forms import UserChangeForm, UserCreationForm +from .models import User + + +class MyUserChangeForm(UserChangeForm): + class Meta(UserChangeForm.Meta): + model = User + + +class MyUserCreationForm(UserCreationForm): + + error_message = UserCreationForm.error_messages.update({ + 'duplicate_username': 'This username has already been taken.' + }) + + class Meta(UserCreationForm.Meta): + model = User + + def clean_username(self): + username = self.cleaned_data["username"] + try: + User.objects.get(username=username) + except User.DoesNotExist: + return username + raise forms.ValidationError(self.error_messages['duplicate_username']) + + +@admin.register(User) +class MyUserAdmin(AuthUserAdmin): + form = MyUserChangeForm + add_form = MyUserCreationForm + fieldsets = ( + ('User Profile', {'fields': ('name',)}), + ) + AuthUserAdmin.fieldsets + list_display = ('username', 'name', 'is_superuser') + search_fields = ['name'] diff --git a/histonets/users/apps.py b/histonets/users/apps.py new file mode 100644 index 0000000..11885ae --- /dev/null +++ b/histonets/users/apps.py @@ -0,0 +1,16 @@ +from django.apps import AppConfig + + +class UsersConfig(AppConfig): + name = 'histonets.users' + verbose_name = "Users" + + def ready(self): + """Override this to put in: + Users system checks + Users signal registration + """ + try: + import users.signals # noqa F401 + except ImportError: + pass diff --git a/histonets/users/migrations/0001_initial.py b/histonets/users/migrations/0001_initial.py new file mode 100644 index 0000000..7cc96f6 --- /dev/null +++ b/histonets/users/migrations/0001_initial.py @@ -0,0 +1,43 @@ +import django.contrib.auth.models +import django.contrib.auth.validators +from django.db import migrations, models +import django.utils.timezone + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('auth', '0008_alter_user_username_max_length'), + ] + + operations = [ + migrations.CreateModel( + name='User', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('password', models.CharField(max_length=128, verbose_name='password')), + ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), + ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), + ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), + ('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')), + ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), + ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), + ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), + ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), + ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), + ('name', models.CharField(blank=True, max_length=255, verbose_name='Name of User')), + ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), + ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), + ], + options={ + 'verbose_name_plural': 'users', + 'verbose_name': 'user', + 'abstract': False, + }, + managers=[ + ('objects', django.contrib.auth.models.UserManager()), + ], + ), + ] diff --git a/histonets/users/migrations/__init__.py b/histonets/users/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/histonets/users/models.py b/histonets/users/models.py new file mode 100644 index 0000000..4b1a10d --- /dev/null +++ b/histonets/users/models.py @@ -0,0 +1,17 @@ +from django.contrib.auth.models import AbstractUser +from django.db import models +from django.urls import reverse +from django.utils.translation import ugettext_lazy as _ + + +class User(AbstractUser): + + # First Name and Last Name do not cover name patterns + # around the globe. + name = models.CharField(_('Name of User'), blank=True, max_length=255) + + def __str__(self): + return self.username + + def get_absolute_url(self): + return reverse('users:detail', kwargs={'username': self.username}) diff --git a/histonets/users/tests/__init__.py b/histonets/users/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/histonets/users/tests/factories.py b/histonets/users/tests/factories.py new file mode 100644 index 0000000..a777ae9 --- /dev/null +++ b/histonets/users/tests/factories.py @@ -0,0 +1,11 @@ +import factory + + +class UserFactory(factory.django.DjangoModelFactory): + username = factory.Sequence(lambda n: f'user-{n}') + email = factory.Sequence(lambda n: f'user-{n}@example.com') + password = factory.PostGenerationMethodCall('set_password', 'password') + + class Meta: + model = 'users.User' + django_get_or_create = ('username', ) diff --git a/histonets/users/tests/test_admin.py b/histonets/users/tests/test_admin.py new file mode 100644 index 0000000..a1ff0b8 --- /dev/null +++ b/histonets/users/tests/test_admin.py @@ -0,0 +1,40 @@ +from test_plus.test import TestCase + +from ..admin import MyUserCreationForm + + +class TestMyUserCreationForm(TestCase): + + def setUp(self): + self.user = self.make_user('notalamode', 'notalamodespassword') + + def test_clean_username_success(self): + # Instantiate the form with a new username + form = MyUserCreationForm({ + 'username': 'alamode', + 'password1': '7jefB#f@Cc7YJB]2v', + 'password2': '7jefB#f@Cc7YJB]2v', + }) + # Run is_valid() to trigger the validation + valid = form.is_valid() + self.assertTrue(valid) + + # Run the actual clean_username method + username = form.clean_username() + self.assertEqual('alamode', username) + + def test_clean_username_false(self): + # Instantiate the form with the same username as self.user + form = MyUserCreationForm({ + 'username': self.user.username, + 'password1': 'notalamodespassword', + 'password2': 'notalamodespassword', + }) + # Run is_valid() to trigger the validation, which is going to fail + # because the username is already taken + valid = form.is_valid() + self.assertFalse(valid) + + # The form.errors dict should contain a single error called 'username' + self.assertTrue(len(form.errors) == 1) + self.assertTrue('username' in form.errors) diff --git a/histonets/users/tests/test_models.py b/histonets/users/tests/test_models.py new file mode 100644 index 0000000..894ed18 --- /dev/null +++ b/histonets/users/tests/test_models.py @@ -0,0 +1,19 @@ +from test_plus.test import TestCase + + +class TestUser(TestCase): + + def setUp(self): + self.user = self.make_user() + + def test__str__(self): + self.assertEqual( + self.user.__str__(), + 'testuser' # This is the default username for self.make_user() + ) + + def test_get_absolute_url(self): + self.assertEqual( + self.user.get_absolute_url(), + '/users/testuser/' + ) diff --git a/histonets/users/tests/test_urls.py b/histonets/users/tests/test_urls.py new file mode 100644 index 0000000..4935b0f --- /dev/null +++ b/histonets/users/tests/test_urls.py @@ -0,0 +1,51 @@ +from django.urls import reverse, resolve + +from test_plus.test import TestCase + + +class TestUserURLs(TestCase): + """Test URL patterns for users app.""" + + def setUp(self): + self.user = self.make_user() + + def test_list_reverse(self): + """users:list should reverse to /users/.""" + self.assertEqual(reverse('users:list'), '/users/') + + def test_list_resolve(self): + """/users/ should resolve to users:list.""" + self.assertEqual(resolve('/users/').view_name, 'users:list') + + def test_redirect_reverse(self): + """users:redirect should reverse to /users/~redirect/.""" + self.assertEqual(reverse('users:redirect'), '/users/~redirect/') + + def test_redirect_resolve(self): + """/users/~redirect/ should resolve to users:redirect.""" + self.assertEqual( + resolve('/users/~redirect/').view_name, + 'users:redirect' + ) + + def test_detail_reverse(self): + """users:detail should reverse to /users/testuser/.""" + self.assertEqual( + reverse('users:detail', kwargs={'username': 'testuser'}), + '/users/testuser/' + ) + + def test_detail_resolve(self): + """/users/testuser/ should resolve to users:detail.""" + self.assertEqual(resolve('/users/testuser/').view_name, 'users:detail') + + def test_update_reverse(self): + """users:update should reverse to /users/~update/.""" + self.assertEqual(reverse('users:update'), '/users/~update/') + + def test_update_resolve(self): + """/users/~update/ should resolve to users:update.""" + self.assertEqual( + resolve('/users/~update/').view_name, + 'users:update' + ) diff --git a/histonets/users/tests/test_views.py b/histonets/users/tests/test_views.py new file mode 100644 index 0000000..23f30f0 --- /dev/null +++ b/histonets/users/tests/test_views.py @@ -0,0 +1,64 @@ +from django.test import RequestFactory + +from test_plus.test import TestCase + +from ..views import ( + UserRedirectView, + UserUpdateView +) + + +class BaseUserTestCase(TestCase): + + def setUp(self): + self.user = self.make_user() + self.factory = RequestFactory() + + +class TestUserRedirectView(BaseUserTestCase): + + def test_get_redirect_url(self): + # Instantiate the view directly. Never do this outside a test! + view = UserRedirectView() + # Generate a fake request + request = self.factory.get('/fake-url') + # Attach the user to the request + request.user = self.user + # Attach the request to the view + view.request = request + # Expect: '/users/testuser/', as that is the default username for + # self.make_user() + self.assertEqual( + view.get_redirect_url(), + '/users/testuser/' + ) + + +class TestUserUpdateView(BaseUserTestCase): + + def setUp(self): + # call BaseUserTestCase.setUp() + super(TestUserUpdateView, self).setUp() + # Instantiate the view directly. Never do this outside a test! + self.view = UserUpdateView() + # Generate a fake request + request = self.factory.get('/fake-url') + # Attach the user to the request + request.user = self.user + # Attach the request to the view + self.view.request = request + + def test_get_success_url(self): + # Expect: '/users/testuser/', as that is the default username for + # self.make_user() + self.assertEqual( + self.view.get_success_url(), + '/users/testuser/' + ) + + def test_get_object(self): + # Expect: self.user, as that is the request's user object + self.assertEqual( + self.view.get_object(), + self.user + ) diff --git a/histonets/users/urls.py b/histonets/users/urls.py new file mode 100644 index 0000000..1e16183 --- /dev/null +++ b/histonets/users/urls.py @@ -0,0 +1,27 @@ +from django.conf.urls import url + +from . import views + +app_name = 'users' +urlpatterns = [ + url( + regex=r'^$', + view=views.UserListView.as_view(), + name='list' + ), + url( + regex=r'^~redirect/$', + view=views.UserRedirectView.as_view(), + name='redirect' + ), + url( + regex=r'^~update/$', + view=views.UserUpdateView.as_view(), + name='update' + ), + url( + regex=r'^(?P[\w.@+-]+)/$', + view=views.UserDetailView.as_view(), + name='detail' + ), +] diff --git a/histonets/users/views.py b/histonets/users/views.py new file mode 100644 index 0000000..acde4a8 --- /dev/null +++ b/histonets/users/views.py @@ -0,0 +1,44 @@ +from django.contrib.auth.mixins import LoginRequiredMixin +from django.urls import reverse +from django.views.generic import DetailView, ListView, RedirectView, UpdateView + +from .models import User + + +class UserDetailView(LoginRequiredMixin, DetailView): + model = User + # These next two lines tell the view to index lookups by username + slug_field = 'username' + slug_url_kwarg = 'username' + + +class UserRedirectView(LoginRequiredMixin, RedirectView): + permanent = False + + def get_redirect_url(self): + return reverse('users:detail', + kwargs={'username': self.request.user.username}) + + +class UserUpdateView(LoginRequiredMixin, UpdateView): + + fields = ['name', ] + + # we already imported User in the view code above, remember? + model = User + + # send the user back to their own page after a successful update + def get_success_url(self): + return reverse('users:detail', + kwargs={'username': self.request.user.username}) + + def get_object(self): + # Only get the User record for the user making the request + return User.objects.get(username=self.request.user.username) + + +class UserListView(LoginRequiredMixin, ListView): + model = User + # These next two lines tell the view to index lookups by username + slug_field = 'username' + slug_url_kwarg = 'username' diff --git a/local.yml b/local.yml new file mode 100644 index 0000000..0cf68b3 --- /dev/null +++ b/local.yml @@ -0,0 +1,74 @@ +version: '2' + +volumes: + postgres_data_local: {} + postgres_backup_local: {} + +services: + django: &django + build: + context: . + dockerfile: ./compose/local/django/Dockerfile + depends_on: + - postgres + - mailhog + volumes: + - .:/app + env_file: + - ./.envs/.local/.django + - ./.envs/.local/.postgres + - ./.envs/.local/.celery + ports: + - "8000:8000" + command: /start.sh + + postgres: + build: + context: . + dockerfile: ./compose/production/postgres/Dockerfile + volumes: + - postgres_data_local:/var/lib/postgresql/data + - postgres_backup_local:/backups + env_file: + - ./.envs/.local/.postgres + + mailhog: + image: mailhog/mailhog:v1.0.0 + ports: + - "8025:8025" + + redis: + image: redis:3.0 + + cantaloupe: + build: + context: . + dockerfile: ./compose/local/cantaloupe/Dockerfile + volumes: + - ./histonets/media/iiif:/var/lib/cantaloupe/images + ports: + - "8182:8182" + + celeryworker: + <<: *django + depends_on: + - redis + - postgres + - mailhog + env_file: + - ./.envs/.local/.celery + - ./.envs/.local/.postgres + ports: [] + command: /start-celeryworker.sh + + celerybeat: + <<: *django + depends_on: + - redis + - postgres + - mailhog + env_file: + - ./.envs/.local/.celery + - ./.envs/.local/.postgres + ports: [] + command: /start-celerybeat.sh diff --git a/locale/README.rst b/locale/README.rst new file mode 100644 index 0000000..c2f1dcd --- /dev/null +++ b/locale/README.rst @@ -0,0 +1,6 @@ +Translations +============ + +Translations will be placed in this folder when running:: + + python manage.py makemessages diff --git a/manage.py b/manage.py new file mode 100755 index 0000000..23ed05a --- /dev/null +++ b/manage.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +import os +import sys + +if __name__ == '__main__': + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') + + try: + from django.core.management import execute_from_command_line + except ImportError: + # The above import may fail for some other reason. Ensure that the + # issue is really that Django is missing to avoid masking other + # exceptions on Python 2. + try: + import django # noqa + except ImportError: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) + raise + + # This allows easy placement of apps within the interior + # histonets directory. + current_path = os.path.dirname(os.path.abspath(__file__)) + sys.path.append(os.path.join(current_path, 'histonets')) + + execute_from_command_line(sys.argv) diff --git a/merge_production_dotenvs_in_dotenv.py b/merge_production_dotenvs_in_dotenv.py new file mode 100644 index 0000000..8bbdec0 --- /dev/null +++ b/merge_production_dotenvs_in_dotenv.py @@ -0,0 +1,70 @@ +import os +from typing import Sequence + +import pytest + +ROOT_DIR_PATH = os.path.dirname(os.path.realpath(__file__)) +PRODUCTION_DOTENVS_DIR_PATH = os.path.join(ROOT_DIR_PATH, '.envs', '.production') +PRODUCTION_DOTENV_FILE_PATHS = [ + os.path.join(PRODUCTION_DOTENVS_DIR_PATH, '.django'), + os.path.join(PRODUCTION_DOTENVS_DIR_PATH, '.postgres'), + os.path.join(PRODUCTION_DOTENVS_DIR_PATH, '.celery'), + os.path.join(PRODUCTION_DOTENVS_DIR_PATH, '.caddy'), +] +DOTENV_FILE_PATH = os.path.join(ROOT_DIR_PATH, '.env') + + +def merge(output_file_path: str, + merged_file_paths: Sequence[str], + append_linesep: bool = True) -> None: + with open(output_file_path, 'w') as output_file: + for merged_file_path in merged_file_paths: + with open(merged_file_path, 'r') as merged_file: + merged_file_content = merged_file.read() + output_file.write(merged_file_content) + if append_linesep: + output_file.write(os.linesep) + + +def main(): + merge(DOTENV_FILE_PATH, PRODUCTION_DOTENV_FILE_PATHS) + + +@pytest.mark.parametrize('merged_file_count', range(3)) +@pytest.mark.parametrize('append_linesep', [True, False]) +def test_merge(tmpdir_factory, + merged_file_count: int, + append_linesep: bool): + tmp_dir_path = str(tmpdir_factory.getbasetemp()) + + output_file_path = os.path.join(tmp_dir_path, '.env') + + expected_output_file_content = '' + merged_file_paths = [] + for i in range(merged_file_count): + merged_file_ord = i + 1 + + merged_filename = '.service{}'.format(merged_file_ord) + merged_file_path = os.path.join(tmp_dir_path, merged_filename) + + merged_file_content = merged_filename * merged_file_ord + + with open(merged_file_path, 'w+') as file: + file.write(merged_file_content) + + expected_output_file_content += merged_file_content + if append_linesep: + expected_output_file_content += os.linesep + + merged_file_paths.append(merged_file_path) + + merge(output_file_path, merged_file_paths, append_linesep) + + with open(output_file_path, 'r') as output_file: + actual_output_file_content = output_file.read() + + assert actual_output_file_content == expected_output_file_content + + +if __name__ == '__main__': + main() diff --git a/production.yml b/production.yml new file mode 100644 index 0000000..73a80d8 --- /dev/null +++ b/production.yml @@ -0,0 +1,72 @@ +version: '2' + +volumes: + postgres_data: {} + postgres_backup: {} + caddy: {} + +services: + django: &django + build: + context: . + dockerfile: ./compose/production/django/Dockerfile + depends_on: + - postgres + - redis + env_file: + - ./.envs/.production/.django + - ./.envs/.production/.postgres + - ./.envs/.production/.celery + command: /gunicorn.sh + + postgres: + build: + context: . + dockerfile: ./compose/production/postgres/Dockerfile + volumes: + - postgres_data:/var/lib/postgresql/data + - postgres_backup:/backups + env_file: + - ./.envs/.production/.postgres + + caddy: + build: + context: . + dockerfile: ./compose/production/caddy/Dockerfile + depends_on: + - django + volumes: + - caddy:/root/.caddy + env_file: + - ./.envs/.production/.caddy + ports: + - "0.0.0.0:80:80" + - "0.0.0.0:443:443" + + redis: + image: redis:3.0 + + cantaloupe: + build: + context: . + dockerfile: ./compose/production/cantaloupe/Dockerfile + ports: + - "8182:8182" + + celeryworker: + <<: *django + depends_on: + - postgres + - redis + env_file: + - ./.envs/.production/.celery + command: /start-celeryworker.sh + + celerybeat: + <<: *django + depends_on: + - postgres + - redis + env_file: + - ./.envs/.production/.celery + command: /start-celerybeat.sh diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..5b4369b --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +DJANGO_SETTINGS_MODULE=config.settings.test diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..c1b500c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +# This file is expected by Heroku. + +-r requirements/production.txt diff --git a/requirements/base.txt b/requirements/base.txt new file mode 100644 index 0000000..35bc681 --- /dev/null +++ b/requirements/base.txt @@ -0,0 +1,23 @@ +pytz==2018.3 # https://github.com/stub42/pytz +awesome-slugify==1.6.5 # https://github.com/dimka665/awesome-slugify +Pillow==5.0.0 # https://github.com/python-pillow/Pillow +rcssmin==1.0.6 # https://github.com/ndparker/rcssmin +argon2-cffi==18.1.0 # https://github.com/hynek/argon2_cffi +whitenoise==3.3.1 # https://github.com/evansd/whitenoise +psycopg2==2.7.4 --no-binary psycopg2 # https://github.com/psycopg/psycopg2 +redis>=2.10.5 # https://github.com/antirez/redis +celery==3.1.25 # pyup: <4.0 # https://github.com/celery/celery + +# Django +# ------------------------------------------------------------------------------ +django==2.0.3 # pyup: < 2.1 # https://www.djangoproject.com/ +django-environ==0.4.4 # https://github.com/joke2k/django-environ +django-model-utils==3.1.1 # https://github.com/jazzband/django-model-utils +django-allauth==0.35.0 # https://github.com/pennersr/django-allauth +django-crispy-forms==1.7.2 # https://github.com/django-crispy-forms/django-crispy-forms +django-compressor==2.2 # https://github.com/django-compressor/django-compressor +django-redis==4.9.0 # https://github.com/niwinz/django-redis + +# Histonets +#------------------------------------------------------------------------------- +git+git://github.com/sul-cidr/histonets-cv.git#egg=histonets diff --git a/requirements/local.txt b/requirements/local.txt new file mode 100644 index 0000000..6b6323d --- /dev/null +++ b/requirements/local.txt @@ -0,0 +1,25 @@ +-r ./base.txt + +Werkzeug==0.14.1 # https://github.com/pallets/werkzeug +ipdb==0.11 # https://github.com/gotcha/ipdb +Sphinx==1.7.1 # https://github.com/sphinx-doc/sphinx + +# Testing +# ------------------------------------------------------------------------------ +pytest==3.4.2 # https://github.com/pytest-dev/pytest +pytest-sugar==0.9.1 # https://github.com/Frozenball/pytest-sugar + +# Code quality +# ------------------------------------------------------------------------------ +flake8==3.5.0 # https://github.com/PyCQA/flake8 +coverage==4.5.1 # https://github.com/nedbat/coveragepy + +# Django +# ------------------------------------------------------------------------------ +factory-boy==2.10.0 # https://github.com/FactoryBoy/factory_boy +django-test-plus==1.0.22 # https://github.com/revsys/django-test-plus + +django-debug-toolbar==1.9.1 # https://github.com/jazzband/django-debug-toolbar +django-extensions==2.0.5 # https://github.com/django-extensions/django-extensions +django-coverage-plugin==1.5.0 # https://github.com/nedbat/django_coverage_plugin +pytest-django==3.1.2 # https://github.com/pytest-dev/pytest-django diff --git a/requirements/production.txt b/requirements/production.txt new file mode 100644 index 0000000..e6e2585 --- /dev/null +++ b/requirements/production.txt @@ -0,0 +1,12 @@ +# PRECAUTION: avoid production dependencies that aren't in development + +-r base.txt + +gevent==1.2.2 +gunicorn==19.7.1 # https://github.com/benoitc/gunicorn +boto3==1.6.2 # pyup: update minor # https://github.com/boto/boto3 + +# Django +# ------------------------------------------------------------------------------ +django-storages==1.6.5 # https://github.com/jschneier/django-storages +django-anymail==2.0 # https://github.com/anymail/django-anymail diff --git a/run.sh b/run.sh new file mode 100755 index 0000000..95bda08 --- /dev/null +++ b/run.sh @@ -0,0 +1,2 @@ +#!/bin/bash +docker-compose -f local.yml run $@ diff --git a/runtime.txt b/runtime.txt new file mode 100644 index 0000000..5c45380 --- /dev/null +++ b/runtime.txt @@ -0,0 +1 @@ +python-3.6.4 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..2b26ba6 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,7 @@ +[flake8] +max-line-length = 120 +exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules + +[pycodestyle] +max-line-length = 120 +exclude=.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules diff --git a/utility/install_os_dependencies.sh b/utility/install_os_dependencies.sh new file mode 100755 index 0000000..ec9372f --- /dev/null +++ b/utility/install_os_dependencies.sh @@ -0,0 +1,96 @@ +#!/bin/bash + +WORK_DIR="$(dirname "$0")" +DISTRO_NAME=$(lsb_release -sc) +OS_REQUIREMENTS_FILENAME="requirements-$DISTRO_NAME.apt" + +cd $WORK_DIR + +# Check if a requirements file exist for the current distribution. +if [ ! -r "$OS_REQUIREMENTS_FILENAME" ]; then + cat <<-EOF >&2 + There is no requirements file for your distribution. + You can see one of the files listed below to help search the equivalent package in your system: + $(find ./ -name "requirements-*.apt" -printf " - %f\n") + EOF + exit 1; +fi + +# Handle call with wrong command +function wrong_command() +{ + echo "${0##*/} - unknown command: '${1}'" >&2 + usage_message +} + +# Print help / script usage +function usage_message() +{ + cat <<-EOF + Usage: $WORK_DIR/${0##*/} + Available commands are: + list Print a list of all packages defined on ${OS_REQUIREMENTS_FILENAME} file + help Print this help + + Commands that require superuser permission: + install Install packages defined on ${OS_REQUIREMENTS_FILENAME} file. Note: This + does not upgrade the packages already installed for new versions, even if + new version is available in the repository. + upgrade Same that install, but upgrade the already installed packages, if new + version is available. + EOF +} + +# Read the requirements.apt file, and remove comments and blank lines +function list_packages(){ + grep -v "#" "${OS_REQUIREMENTS_FILENAME}" | grep -v "^$"; +} + +function install_packages() +{ + list_packages | xargs apt-get --no-upgrade install -y; +} + +function upgrade_packages() +{ + list_packages | xargs apt-get install -y; +} + +function install_or_upgrade() +{ + P=${1} + PARAN=${P:-"install"} + + if [[ $EUID -ne 0 ]]; then + cat <<-EOF >&2 + You must run this script with root privilege + Please do: + sudo $WORK_DIR/${0##*/} $PARAN + EOF + exit 1 + else + + apt-get update + + # Install the basic compilation dependencies and other required libraries of this project + if [ "$PARAN" == "install" ]; then + install_packages; + else + upgrade_packages; + fi + + # cleaning downloaded packages from apt-get cache + apt-get clean + + exit 0 + fi +} + +# Handle command argument +case "$1" in + install) install_or_upgrade;; + upgrade) install_or_upgrade "upgrade";; + list) list_packages;; + help|"") usage_message;; + *) wrong_command "$1";; +esac diff --git a/utility/install_python_dependencies.sh b/utility/install_python_dependencies.sh new file mode 100755 index 0000000..77dd95f --- /dev/null +++ b/utility/install_python_dependencies.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +WORK_DIR="$(dirname "$0")" +PROJECT_DIR="$(dirname "$WORK_DIR")" + +pip --version >/dev/null 2>&1 || { + echo >&2 -e "\npip is required but it's not installed." + echo >&2 -e "You can install it by running the following command:\n" + echo >&2 "wget https://bootstrap.pypa.io/get-pip.py --output-document=get-pip.py; chmod +x get-pip.py; sudo -H python3 get-pip.py" + echo >&2 -e "\n" + echo >&2 -e "\nFor more information, see pip documentation: https://pip.pypa.io/en/latest/" + exit 1; +} + +virtualenv --version >/dev/null 2>&1 || { + echo >&2 -e "\nvirtualenv is required but it's not installed." + echo >&2 -e "You can install it by running the following command:\n" + echo >&2 "sudo -H pip3 install virtualenv" + echo >&2 -e "\n" + echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/" + exit 1; +} + +if [ -z "$VIRTUAL_ENV" ]; then + echo >&2 -e "\nYou need activate a virtualenv first" + echo >&2 -e 'If you do not have a virtualenv created, run the following command to create and automatically activate a new virtualenv named "venv" on current folder:\n' + echo >&2 -e "virtualenv venv --python=\`which python3\`" + echo >&2 -e "\nTo leave/disable the currently active virtualenv, run the following command:\n" + echo >&2 "deactivate" + echo >&2 -e "\nTo activate the virtualenv again, run the following command:\n" + echo >&2 "source venv/bin/activate" + echo >&2 -e "\nFor more information, see virtualenv documentation: https://virtualenv.pypa.io/en/latest/" + echo >&2 -e "\n" + exit 1; +else + + pip install -r $PROJECT_DIR/requirements/local.txt + pip install -r $PROJECT_DIR/requirements.txt +fi diff --git a/utility/requirements-jessie.apt b/utility/requirements-jessie.apt new file mode 100644 index 0000000..5c49365 --- /dev/null +++ b/utility/requirements-jessie.apt @@ -0,0 +1,23 @@ +##basic build dependencies of various Django apps for Debian Jessie 8.x +#build-essential metapackage install: make, gcc, g++, +build-essential +#required to translate +gettext +python3-dev + +##shared dependencies of: +##Pillow, pylibmc +zlib1g-dev + +##Postgresql and psycopg2 dependencies +libpq-dev + +##Pillow dependencies +libtiff5-dev +libjpeg62-turbo-dev +libfreetype6-dev +liblcms2-dev +libwebp-dev + +##django-extensions +graphviz-dev diff --git a/utility/requirements-stretch.apt b/utility/requirements-stretch.apt new file mode 100644 index 0000000..a2b3a7e --- /dev/null +++ b/utility/requirements-stretch.apt @@ -0,0 +1,23 @@ +##basic build dependencies of various Django apps for Debian Jessie 9.x +#build-essential metapackage install: make, gcc, g++, +build-essential +#required to translate +gettext +python3-dev + +##shared dependencies of: +##Pillow, pylibmc +zlib1g-dev + +##Postgresql and psycopg2 dependencies +libpq-dev + +##Pillow dependencies +libtiff5-dev +libjpeg62-turbo-dev +libfreetype6-dev +liblcms2-dev +libwebp-dev + +##django-extensions +graphviz-dev diff --git a/utility/requirements-trusty.apt b/utility/requirements-trusty.apt new file mode 100644 index 0000000..455f1a8 --- /dev/null +++ b/utility/requirements-trusty.apt @@ -0,0 +1,23 @@ +##basic build dependencies of various Django apps for Ubuntu Trusty 14.04 +#build-essential metapackage install: make, gcc, g++, +build-essential +#required to translate +gettext +python3-dev + +##shared dependencies of: +##Pillow, pylibmc +zlib1g-dev + +##Postgresql and psycopg2 dependencies +libpq-dev + +##Pillow dependencies +libtiff4-dev +libjpeg8-dev +libfreetype6-dev +liblcms1-dev +libwebp-dev + +##django-extensions +graphviz-dev diff --git a/utility/requirements-xenial.apt b/utility/requirements-xenial.apt new file mode 100644 index 0000000..ba84ef1 --- /dev/null +++ b/utility/requirements-xenial.apt @@ -0,0 +1,23 @@ +##basic build dependencies of various Django apps for Ubuntu Xenial 16.04 +#build-essential metapackage install: make, gcc, g++, +build-essential +#required to translate +gettext +python3-dev + +##shared dependencies of: +##Pillow, pylibmc +zlib1g-dev + +##Postgresql and psycopg2 dependencies +libpq-dev + +##Pillow dependencies +libtiff5-dev +libjpeg8-dev +libfreetype6-dev +liblcms2-dev +libwebp-dev + +##django-extensions +graphviz-dev