Skip to content

Commit

Permalink
Merge d944c62 into b044d30
Browse files Browse the repository at this point in the history
  • Loading branch information
ElenaHenderson committed Apr 8, 2021
2 parents b044d30 + d944c62 commit 6f21cd5
Show file tree
Hide file tree
Showing 15 changed files with 508 additions and 4 deletions.
14 changes: 13 additions & 1 deletion .buildkite/conbench-deploy/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,21 @@ steps:
- aws ecr get-login-password --region us-east-2 | docker login --username AWS --password-stdin ${DOCKER_REGISTRY}
- docker push ${DOCKER_REGISTRY}/${FLASK_APP}:${BUILDKITE_COMMIT}

- label: "Run Migrates"
key: "run-migrates"
depends_on: "build-and-push"
if: build.branch == 'main'
concurrency: 1
concurrency_group: "conbench-deploy"
command:
- set -x
- aws eks --region us-east-2 update-kubeconfig --name ${EKS_CLUSTER}
- cat migration-job.yml | sed "s/{{BUILDKITE_COMMIT}}/${BUILDKITE_COMMIT}/g; s/{{APPLICATION_NAME}}/${APPLICATION_NAME}/g; s/{{DB_HOST}}/${DB_HOST}/g; s/{{DB_PORT}}/${DB_PORT}/g; s/{{FLASK_APP}}/${FLASK_APP}/g; s/{{DOCKER_REGISTRY}}/${DOCKER_REGISTRY}/g; s/{{CERTIFICATE_ARN}}/${CERTIFICATE_ARN}/g; s/{{EKS_CLUSTER}}/${EKS_CLUSTER}/g" | kubectl delete --ignore-not-found=true -f -
- cat migration-job.yml | sed "s/{{BUILDKITE_COMMIT}}/${BUILDKITE_COMMIT}/g; s/{{APPLICATION_NAME}}/${APPLICATION_NAME}/g; s/{{DB_HOST}}/${DB_HOST}/g; s/{{DB_PORT}}/${DB_PORT}/g; s/{{FLASK_APP}}/${FLASK_APP}/g; s/{{DOCKER_REGISTRY}}/${DOCKER_REGISTRY}/g; s/{{CERTIFICATE_ARN}}/${CERTIFICATE_ARN}/g; s/{{EKS_CLUSTER}}/${EKS_CLUSTER}/g" | kubectl apply -f -

- label: "Deploy"
key: "deploy"
depends_on: "build-and-push"
depends_on: "run-migrates"
if: build.branch == 'main'
concurrency: 1
concurrency_group: "conbench-deploy"
Expand Down
9 changes: 8 additions & 1 deletion .buildkite/conbench-test/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,18 @@ steps:
depends_on: "build"
command: docker-compose run app pytest -vv conbench/tests/

- label: "Test Migrates"
key: "test-migrates"
depends_on: "test"
command:
- docker-compose down
- docker-compose run migration

# This makes sure that deploys are triggered in the same order as the
# test builds, no matter which test builds finish first.
- label: "Concurrency Gate"
key: "concurrency-gate"
depends_on: "test"
depends_on: "test-migrates"
command: "exit 0"
concurrency: 1
concurrency_group: "conbench-deploy"
Expand Down
1 change: 0 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,3 @@ RUN pip install -r /tmp/requirements-test.txt

WORKDIR /app
ADD . /app
CMD ["gunicorn", "-b", "0.0.0.0:5000", "-w", "5", "conbench:application", "--access-logfile=-", "--error-logfile=-", "--preload"]
27 changes: 27 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,8 @@ repository, and the results are hosted on the
(conbench) $ pip install -r requirements-cli.txt
(conbench) $ python setup.py develop

### Start the database
$ brew services start postgres

### Create the databases

Expand Down Expand Up @@ -118,3 +120,28 @@ repository, and the results are hosted on the
(conbench) $ cd ~/workspace/conbench/
(conbench) $ coverage run --source conbench -m pytest conbench/tests/
(conbench) $ coverage report -m

### Test migrates with the database running using brew
(conbench) $ cd ~/workspace/conbench/
(conbench) $ brew services start postgres
(conbench) $ dropdb conbench_prod
(conbench) $ createdb conbench_prod
(conbench) $ alembic upgrade head

### Test migrates with the database running as a docker container
(conbench) $ cd ~/workspace/conbench/
(conbench) $ brew services stop postgres
(conbench) $ docker-compose down
(conbench) $ docker-compose build
(conbench) $ docker-compose run migration

### To autogenerate migrate
(conbench) $ cd ~/workspace/conbench/
(conbench) $ brew services start postgres
(conbench) $ dropdb conbench_prod
(conbench) $ createdb conbench_prod
(conbench) $ git checkout main && git pull
(conbench) $ alembic upgrade head
(conbench) $ git checkout your-branch
(conbench) $ alembic revision --autogenerate -m "new"

88 changes: 88 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
script_location = migrates

# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .

# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =

# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; this defaults
# to migrates/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat migrates/versions

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url = driver://user:pass@localhost/dbname

[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
7 changes: 6 additions & 1 deletion conbench/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,12 @@ def _init_application(application):
config={"app_name": Config.APPLICATION_NAME},
)
configure_engine(application.config["SQLALCHEMY_DATABASE_URI"])
create_all()

# Do not create all tables when running alembic migrates in production (CREATE_ALL_TABLES=false)
# using k8s migration job
if Config.CREATE_ALL_TABLES:
create_all()

application.register_blueprint(app, url_prefix="/")
application.register_blueprint(api, url_prefix="/api")
application.register_blueprint(api_docs, url_prefix="/api/docs")
Expand Down
1 change: 1 addition & 0 deletions conbench/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class Config:
SQLALCHEMY_DATABASE_URI = (
f"postgresql://{DB_USERNAME}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
)
CREATE_ALL_TABLES = os.environ.get("CREATE_ALL_TABLES", "true") == "true"


class TestConfig(Config):
Expand Down
1 change: 1 addition & 0 deletions deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ spec:
containers:
- name: conbench
image: "{{DOCKER_REGISTRY}}/{{FLASK_APP}}:{{BUILDKITE_COMMIT}}"
command: ["gunicorn", "-b", "0.0.0.0:5000", "-w", "5", "conbench:application", "--access-logfile=-", "--error-logfile=-", "--preload"]
imagePullPolicy: "Always"
ports:
- containerPort: 5000
Expand Down
23 changes: 23 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@

version: '3.5'
services:
app:
build:
context: .
dockerfile: Dockerfile
command: ["gunicorn", "-b", "0.0.0.0:5000", "-w", "5", "conbench:application", "--access-logfile=-", "--error-logfile=-", "--preload"]
environment:
APPLICATION_NAME: "Conbench"
DB_USERNAME: "postgres"
Expand All @@ -30,3 +32,24 @@ services:
interval: 10s
timeout: 5s
retries: 5

migration:
build:
context: .
dockerfile: Dockerfile
command: ["alembic", "upgrade", "head"]
environment:
APPLICATION_NAME: "Conbench"
CREATE_ALL_TABLES: "false"
DB_USERNAME: "postgres"
DB_HOST: "db"
DB_PASSWORD: "postgres"
DB_NAME: "postgres"
DB_PORT: "5432"
FLASK_APP: "conbench"
FLASK_ENV: "development"
REGISTRATION_KEY: "code"
SECRET_KEY: "Person, woman, man, camera, TV"
depends_on:
db:
condition: service_healthy
1 change: 1 addition & 0 deletions migrates/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
94 changes: 94 additions & 0 deletions migrates/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context

from conbench.config import Config
from conbench.entities import (
case,
commit,
context as _,
data,
machine,
run,
summary,
time,
user,
)
from conbench.entities._entity import Base

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

from conbench.config import Config

config.set_main_option("sqlalchemy.url", Config.SQLALCHEMY_DATABASE_URI)


# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions migrates/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}

0 comments on commit 6f21cd5

Please sign in to comment.