Skip to content

Commit

Permalink
Merge pull request #102 from xsnippet/alembic
Browse files Browse the repository at this point in the history
Use alembic for schema migrations
  • Loading branch information
malor committed Jul 26, 2020
2 parents 4a7b6e1 + 6323459 commit 4ff54e4
Show file tree
Hide file tree
Showing 8 changed files with 257 additions and 65 deletions.
39 changes: 36 additions & 3 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,46 @@ jobs:
toolchain: ${{ matrix.rust-version }}
override: true

- name: Install libpq (Windows)
if: matrix.os == 'windows-latest'
- name: Start PostgreSQL and create a database (Linux)
if: runner.os == 'Linux'
run: |
sudo service postgresql start
sudo -u postgres createuser -s devel
sudo -u postgres createdb -O devel devel
sudo -u postgres psql -c "ALTER USER devel PASSWORD 'devel';" devel
echo '::set-env name=DATABASE_URL::postgres://devel:devel@localhost/devel'
- name: Start PostgreSQL and create a database (MacOS)
if: runner.os == 'macOS'
run: |
/usr/local/opt/postgres/bin/pg_ctl -D /usr/local/var/postgres start
/usr/local/opt/postgres/bin/createuser -s devel
/usr/local/opt/postgres/bin/createdb -O devel devel
/usr/local/opt/postgres/bin/psql -c "ALTER USER devel PASSWORD 'devel';" devel
echo '::set-env name=DATABASE_URL::postgres://devel:devel@localhost/devel'
- name: Start PostgreSQL and create a database (Windows)
if: runner.os == 'Windows'
shell: bash
run: |
choco install postgresql12 --force --params '/Password:root'
echo '::add-path::C:\Program Files\PostgreSQL\12\bin'
echo '::add-path::C:\Program Files\PostgreSQL\12\lib'
echo '::set-env name=PQ_LIB_DIR::C:\Program Files\PostgreSQL\12\lib'
sc config postgresql-x64-12 start= demand
net start postgresql-x64-12
"C:\Program Files\PostgreSQL\12\bin\createuser" -s devel
"C:\Program Files\PostgreSQL\12\bin\createdb" -O devel devel
"C:\Program Files\PostgreSQL\12\bin\psql" -c "ALTER USER devel PASSWORD 'devel';" devel
echo '::set-env name=DATABASE_URL::postgres://devel:devel@localhost/devel'
- name: Install Alembic and psycopg2
run: |
python -m pip install --upgrade wheel
python -m pip install --upgrade alembic psycopg2
- name: Run database migrations
run: |
python -m alembic.config upgrade head
- uses: actions-rs/cargo@v1
with:
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
/target
__pycache__
36 changes: 36 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
[alembic]
script_location = src/storage/sql/migrations/

[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = INFO
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

This file was deleted.

This file was deleted.

53 changes: 53 additions & 0 deletions src/storage/sql/migrations/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import logging.config
import os

from alembic import context
from sqlalchemy import create_engine, pool


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""

context.configure(
url=os.environ['DATABASE_URL'],
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""

connectable = create_engine(
os.environ['DATABASE_URL'],
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection)
with context.begin_transaction():
context.run_migrations()


logging.config.fileConfig(context.config.config_file_name)
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions src/storage/sql/migrations/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
107 changes: 107 additions & 0 deletions src/storage/sql/migrations/versions/1bf4e4e7b24f_initial_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
"""Initial schema
Revision ID: 1bf4e4e7b24f
Revises:
Create Date: 2020-07-26 08:46:28.752972
"""

from alembic import op
import sqlalchemy as sa


revision = '1bf4e4e7b24f'
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
metadata = sa.MetaData()

sa.Table(
'snippets', metadata,

# an internal autoincrementing identifier; only used in foreign keys
sa.Column('id', sa.Integer, primary_key=True),
# a short unique snippet identifier visible to users
sa.Column('slug', sa.String(32), nullable=False),

sa.Column('title', sa.Text),
sa.Column('syntax', sa.Text),
sa.Column('created_at', sa.DateTime(timezone=True),
server_default=sa.func.now(), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True),
server_default=sa.func.now(), nullable=False),

# slugs must be unique (this will also automatically create a unique index)
sa.UniqueConstraint('slug', name='uq_slug'),

# will be used for pagination; slug guarantees uniqueness of the sorting key
sa.Index('snippets_created_at_slug', 'created_at', 'slug'),
sa.Index('snippets_updated_at_slug', 'updated_at', 'slug'),
)

sa.Table(
'changesets', metadata,

# an internal autoincrementing identifier; only used in foreign keys
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('snippet_id', sa.Integer, nullable=False),

# numeric index used to determine the ordering of changesets for a given snippet
sa.Column('version', sa.Integer,
server_default=sa.text('0'), nullable=False),
sa.Column('content', sa.Text, nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True),
server_default=sa.func.now(), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True),
server_default=sa.func.now(), nullable=False),

# there can be multiple changesets per snippet; changesets should be
# deleted when the parent snippet is deleted
sa.ForeignKeyConstraint(['snippet_id'], ['snippets.id'],
ondelete='CASCADE', name='fk_snippet',
use_alter=False),
# each changeset is supposed to have a unique version number
sa.UniqueConstraint('snippet_id', 'version',
name='uq_version'),
# sanity check: do not allow empty changesets
sa.CheckConstraint('LENGTH(content) > 0',
name='check_not_empty'),
# sanity check: version numbers are non-negative integers
sa.CheckConstraint('version >= 0',
name='check_non_negative_version')

)

# tags could have been associated with snippets as M:M via an auxiliary table,
# but Diesel only supports child-parent associations, so let's do that instead
sa.Table(
'tags', metadata,

# an internal autoincrementing identifier; only used in foreign keys
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('snippet_id', sa.Integer, nullable=False),

sa.Column('value', sa.Text, nullable=False),

# there can be multiple tags per snippet; tags should be deleted when
# the parent snippet is deleted
sa.ForeignKeyConstraint(['snippet_id'], ['snippets.id'],
ondelete='CASCADE',
name='fk_snippet'),

# do not allow to abuse the tags for storing too much data
sa.CheckConstraint('LENGTH(value) < 128', name='check_length'),
# do not allow repeated tags per snippet
sa.UniqueConstraint('snippet_id', 'value', name='uq_snippet_tag'),
)

metadata.create_all(op.get_bind())


def downgrade():
op.drop_table('tags')
op.drop_table('changesets')
op.drop_table('snippets')

0 comments on commit 4ff54e4

Please sign in to comment.