diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d2206399e6..64de137c78 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: git@github.com:Yelp/detect-secrets - rev: v1.1.0 + rev: v1.2.0 hooks: - id: detect-secrets args: ['--baseline', '.secrets.baseline'] @@ -13,6 +13,6 @@ repos: - id: no-commit-to-branch args: [--branch, develop, --branch, master, --pattern, release/.*] - repo: https://github.com/psf/black - rev: 21.5b2 + rev: 22.3.0 hooks: - id: black diff --git a/.secrets.baseline b/.secrets.baseline index 560dd80acc..02fb8d434e 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -1,5 +1,5 @@ { - "version": "1.1.0", + "version": "1.2.0", "plugins_used": [ { "name": "ArtifactoryDetector" @@ -20,9 +20,12 @@ { "name": "CloudantDetector" }, + { + "name": "GitHubTokenDetector" + }, { "name": "HexHighEntropyString", - "limit": 3 + "limit": 3.0 }, { "name": "IbmCloudIamDetector" @@ -46,6 +49,9 @@ { "name": "PrivateKeyDetector" }, + { + "name": "SendGridDetector" + }, { "name": "SlackDetector" }, @@ -115,8 +121,7 @@ "filename": "deployment/scripts/postgresql/postgresql_init.sql", "hashed_secret": "afc848c316af1a89d49826c5ae9d00ed769415f3", "is_verified": false, - "line_number": 7, - "is_secret": false + "line_number": 7 } ], "fence/blueprints/storage_creds/google.py": [ @@ -125,8 +130,7 @@ "filename": "fence/blueprints/storage_creds/google.py", "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", "is_verified": false, - "line_number": 139, - "is_secret": false + "line_number": 139 } ], "fence/blueprints/storage_creds/other.py": [ @@ -135,16 +139,14 @@ "filename": "fence/blueprints/storage_creds/other.py", "hashed_secret": "98c144f5ecbb4dbe575147a39698b6be1a5649dd", "is_verified": false, - "line_number": 66, - "is_secret": false + "line_number": 66 }, { "type": "Secret Keyword", "filename": "fence/blueprints/storage_creds/other.py", "hashed_secret": "98c144f5ecbb4dbe575147a39698b6be1a5649dd", "is_verified": false, - "line_number": 66, - "is_secret": false + "line_number": 66 } ], "fence/config-default.yaml": [ @@ -153,8 +155,7 @@ "filename": "fence/config-default.yaml", "hashed_secret": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", "is_verified": false, - "line_number": 31, - "is_secret": false + "line_number": 31 } ], "fence/local_settings.example.py": [ @@ -163,16 +164,14 @@ "filename": "fence/local_settings.example.py", "hashed_secret": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", "is_verified": false, - "line_number": 6, - "is_secret": false + "line_number": 6 }, { "type": "Secret Keyword", "filename": "fence/local_settings.example.py", "hashed_secret": "5d07e1b80e448a213b392049888111e1779a52db", "is_verified": false, - "line_number": 63, - "is_secret": false + "line_number": 63 } ], "fence/resources/google/utils.py": [ @@ -181,7 +180,7 @@ "filename": "fence/resources/google/utils.py", "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", "is_verified": false, - "line_number": 129 + "line_number": 132 } ], "fence/utils.py": [ @@ -190,8 +189,16 @@ "filename": "fence/utils.py", "hashed_secret": "8318df9ecda039deac9868adf1944a29a95c7114", "is_verified": false, - "line_number": 105, - "is_secret": false + "line_number": 105 + } + ], + "migrations/versions/e4c7b0ab68d3_create_tables.py": [ + { + "type": "Hex High Entropy String", + "filename": "migrations/versions/e4c7b0ab68d3_create_tables.py", + "hashed_secret": "adb1fcd33b07abf9b6a064745759accea5cb341f", + "is_verified": false, + "line_number": 21 } ], "tests/conftest.py": [ @@ -200,14 +207,14 @@ "filename": "tests/conftest.py", "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", "is_verified": false, - "line_number": 1513 + "line_number": 1516 }, { "type": "Base64 High Entropy String", "filename": "tests/conftest.py", "hashed_secret": "227dea087477346785aefd575f91dd13ab86c108", "is_verified": false, - "line_number": 1536 + "line_number": 1539 } ], "tests/credentials/google/test_credentials.py": [ @@ -216,24 +223,30 @@ "filename": "tests/credentials/google/test_credentials.py", "hashed_secret": "a06bdb09c0106ab559bd6acab2f1935e19f7e939", "is_verified": false, - "line_number": 381, - "is_secret": false + "line_number": 381 }, { "type": "Secret Keyword", "filename": "tests/credentials/google/test_credentials.py", "hashed_secret": "93aa43c580f5347782e17fba5091f944767b15f0", "is_verified": false, - "line_number": 474, - "is_secret": false + "line_number": 474 }, { "type": "Secret Keyword", "filename": "tests/credentials/google/test_credentials.py", "hashed_secret": "768b7fe00de4fd233c0c72375d12f87ce9670144", "is_verified": false, - "line_number": 476, - "is_secret": false + "line_number": 476 + } + ], + "tests/data/test_indexed_file.py": [ + { + "type": "Secret Keyword", + "filename": "tests/data/test_indexed_file.py", + "hashed_secret": "a62f2225bf70bfaccbc7f1ef2a397836717377de", + "is_verified": false, + "line_number": 410 } ], "tests/keys/2018-05-01T21:29:02Z/jwt_private_key.pem": [ @@ -242,8 +255,7 @@ "filename": "tests/keys/2018-05-01T21:29:02Z/jwt_private_key.pem", "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", "is_verified": false, - "line_number": 1, - "is_secret": false + "line_number": 1 } ], "tests/login/test_fence_login.py": [ @@ -252,8 +264,7 @@ "filename": "tests/login/test_fence_login.py", "hashed_secret": "d300421e208bfd0d432294de15169fd9b8975def", "is_verified": false, - "line_number": 48, - "is_secret": false + "line_number": 48 } ], "tests/ras/test_ras.py": [ @@ -272,15 +283,8 @@ "hashed_secret": "afc848c316af1a89d49826c5ae9d00ed769415f3", "is_verified": false, "line_number": 31 - }, - { - "type": "Secret Keyword", - "filename": "tests/test-fence-config.yaml", - "hashed_secret": "1627df13b5cd8b3521d02bd8eb2ca31334b3aef2", - "is_verified": false, - "line_number": 491 } ] }, - "generated_at": "2022-06-27T19:35:11Z" + "generated_at": "2022-07-05T22:07:53Z" } diff --git a/.travis.yml b/.travis.yml index 2d2ef20ddf..98314724b7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,7 +25,7 @@ before_script: - cd - script: - - poetry run pytest -vv --cov=fence --cov-report xml tests + - poetry run pytest -vv --cov=fence --cov=migrations/versions --cov-report xml tests after_script: - python-codacy-coverage -r coverage.xml diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000000..0556a589ae --- /dev/null +++ b/alembic.ini @@ -0,0 +1,100 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/bin/old_migration_script.py b/bin/old_migration_script.py new file mode 100644 index 0000000000..e89c7d8ce7 --- /dev/null +++ b/bin/old_migration_script.py @@ -0,0 +1,850 @@ +""" +This script contains the database migrations written BEFORE switching to +Alembic for migrations. We need to keep it around to migrate databases from a +pre-Alembic version to a post-Alembic version. + +DO NOT ADD NEW MIGRATIONS TO THIS SCRIPT. +Create a new Alembic version instead. +""" + + +from sqlalchemy import ( + Integer, + BigInteger, + DateTime, + String, + Column, + Text, + MetaData, + Table, + text, +) +from sqlalchemy.dialects.postgresql import ARRAY, JSONB +from sqlalchemy import exc as sa_exc, func +import warnings + +from fence.config import config +from fence.models import ( + AuthorizationCode, + Client, + GoogleBucketAccessGroup, + GoogleProxyGroup, + GoogleProxyGroupToGoogleBucketAccessGroup, + GoogleServiceAccount, + Project, + User, + UserRefreshToken, +) + +to_timestamp = ( + "CREATE OR REPLACE FUNCTION pc_datetime_to_timestamp(datetoconvert timestamp) " + "RETURNS BIGINT AS " + "$BODY$ " + "select extract(epoch from $1)::BIGINT " + "$BODY$ " + "LANGUAGE 'sql' IMMUTABLE STRICT;" +) + + +def migrate(driver): + if not driver.engine.dialect.supports_alter: + print( + "This engine dialect doesn't support altering so we are not migrating even if necessary!" + ) + return + + md = MetaData() + + table = Table( + UserRefreshToken.__tablename__, md, autoload=True, autoload_with=driver.engine + ) + if str(table.c.expires.type) != "BIGINT": + print("Altering table %s expires to BIGINT" % (UserRefreshToken.__tablename__)) + with driver.session as session: + session.execute(to_timestamp) + with driver.session as session: + session.execute( + "ALTER TABLE {} ALTER COLUMN expires TYPE BIGINT USING pc_datetime_to_timestamp(expires);".format( + UserRefreshToken.__tablename__ + ) + ) + + # username limit migration + + table = Table(User.__tablename__, md, autoload=True, autoload_with=driver.engine) + if str(table.c.username.type) != str(User.username.type): + print( + "Altering table %s column username type to %s" + % (User.__tablename__, str(User.username.type)) + ) + with driver.session as session: + session.execute( + 'ALTER TABLE "{}" ALTER COLUMN username TYPE {};'.format( + User.__tablename__, str(User.username.type) + ) + ) + + # oidc migration + + table = Table(Client.__tablename__, md, autoload=True, autoload_with=driver.engine) + if not ("ix_name" in [constraint.name for constraint in table.constraints]): + with driver.session as session: + session.execute( + "ALTER TABLE {} ADD constraint ix_name unique (name);".format( + Client.__tablename__ + ) + ) + + if "_allowed_scopes" not in table.c: + print( + "Altering table {} to add _allowed_scopes column".format( + Client.__tablename__ + ) + ) + with driver.session as session: + session.execute( + "ALTER TABLE {} ADD COLUMN _allowed_scopes VARCHAR;".format( + Client.__tablename__ + ) + ) + for client in session.query(Client): + if not client._allowed_scopes: + client._allowed_scopes = " ".join(config["CLIENT_ALLOWED_SCOPES"]) + session.add(client) + session.commit() + session.execute( + "ALTER TABLE {} ALTER COLUMN _allowed_scopes SET NOT NULL;".format( + Client.__tablename__ + ) + ) + + add_column_if_not_exist( + table_name=GoogleProxyGroup.__tablename__, + column=Column("email", String), + driver=driver, + metadata=md, + ) + + add_column_if_not_exist( + table_name=AuthorizationCode.__tablename__, + column=Column("refresh_token_expires_in", Integer), + driver=driver, + metadata=md, + ) + + drop_foreign_key_column_if_exist( + table_name=GoogleProxyGroup.__tablename__, + column_name="user_id", + driver=driver, + metadata=md, + ) + + _add_google_project_id(driver, md) + + drop_unique_constraint_if_exist( + table_name=GoogleServiceAccount.__tablename__, + column_name="google_unique_id", + driver=driver, + metadata=md, + ) + + drop_unique_constraint_if_exist( + table_name=GoogleServiceAccount.__tablename__, + column_name="google_project_id", + driver=driver, + metadata=md, + ) + + add_column_if_not_exist( + table_name=GoogleBucketAccessGroup.__tablename__, + column=Column("privileges", ARRAY(String)), + driver=driver, + metadata=md, + ) + + _update_for_authlib(driver, md) + + # Delete-user migration + + # Check if at least one constraint is already migrated and if so skip + # the delete cascade migration. + user = Table(User.__tablename__, md, autoload=True, autoload_with=driver.engine) + found_user_constraint_already_migrated = False + + for fkey in list(user.foreign_key_constraints): + if ( + len(fkey.column_keys) == 1 + and "google_proxy_group_id" in fkey.column_keys + and fkey.ondelete == "SET NULL" + ): + found_user_constraint_already_migrated = True + + if not found_user_constraint_already_migrated: + # do delete user migration in one session + delete_user_session = driver.Session() + try: + # Deleting google proxy group shouldn't delete user + set_foreign_key_constraint_on_delete_setnull( + table_name=User.__tablename__, + column_name="google_proxy_group_id", + fk_table_name=GoogleProxyGroup.__tablename__, + fk_column_name="id", + driver=driver, + session=delete_user_session, + metadata=md, + ) + + _set_on_delete_cascades(driver, delete_user_session, md) + + delete_user_session.commit() + except Exception: + delete_user_session.rollback() + raise + finally: + delete_user_session.close() + + _remove_policy(driver, md) + + add_column_if_not_exist( + table_name=User.__tablename__, + column=Column( + "_last_auth", DateTime(timezone=False), server_default=func.now() + ), + driver=driver, + metadata=md, + ) + + add_column_if_not_exist( + table_name=User.__tablename__, + column=Column("additional_info", JSONB(), server_default=text("'{}'")), + driver=driver, + metadata=md, + ) + + with driver.session as session: + session.execute( + """\ +CREATE OR REPLACE FUNCTION process_user_audit() RETURNS TRIGGER AS $user_audit$ + BEGIN + IF (TG_OP = 'DELETE') THEN + INSERT INTO user_audit_logs (timestamp, operation, old_values) + SELECT now(), 'DELETE', row_to_json(OLD); + RETURN OLD; + ELSIF (TG_OP = 'UPDATE') THEN + INSERT INTO user_audit_logs (timestamp, operation, old_values, new_values) + SELECT now(), 'UPDATE', row_to_json(OLD), row_to_json(NEW); + RETURN NEW; + ELSIF (TG_OP = 'INSERT') THEN + INSERT INTO user_audit_logs (timestamp, operation, new_values) + SELECT now(), 'INSERT', row_to_json(NEW); + RETURN NEW; + END IF; + RETURN NULL; + END; +$user_audit$ LANGUAGE plpgsql;""" + ) + + exist = session.scalar( + "SELECT exists (SELECT * FROM pg_trigger WHERE tgname = 'user_audit')" + ) + session.execute( + ('DROP TRIGGER user_audit ON "User"; ' if exist else "") + + """\ +CREATE TRIGGER user_audit +AFTER INSERT OR UPDATE OR DELETE ON "User" + FOR EACH ROW EXECUTE PROCEDURE process_user_audit();""" + ) + + session.execute( + """\ +CREATE OR REPLACE FUNCTION process_cert_audit() RETURNS TRIGGER AS $cert_audit$ + BEGIN + IF (TG_OP = 'DELETE') THEN + INSERT INTO cert_audit_logs (timestamp, operation, user_id, username, old_values) + SELECT now(), 'DELETE', "User".id, "User".username, row_to_json(OLD) + FROM application INNER JOIN "User" ON application.user_id = "User".id + WHERE OLD.application_id = application.id; + RETURN OLD; + ELSIF (TG_OP = 'UPDATE') THEN + INSERT INTO cert_audit_logs (timestamp, operation, user_id, username, old_values, new_values) + SELECT now(), 'UPDATE', "User".id, "User".username, row_to_json(OLD), row_to_json(NEW) + FROM application INNER JOIN "User" ON application.user_id = "User".id + WHERE NEW.application_id = application.id; + RETURN NEW; + ELSIF (TG_OP = 'INSERT') THEN + INSERT INTO cert_audit_logs (timestamp, operation, user_id, username, new_values) + SELECT now(), 'INSERT', "User".id, "User".username, row_to_json(NEW) + FROM application INNER JOIN "User" ON application.user_id = "User".id + WHERE NEW.application_id = application.id; + RETURN NEW; + END IF; + RETURN NULL; + END; +$cert_audit$ LANGUAGE plpgsql;""" + ) + + exist = session.scalar( + "SELECT exists (SELECT * FROM pg_trigger WHERE tgname = 'cert_audit')" + ) + session.execute( + ("DROP TRIGGER cert_audit ON certificate; " if exist else "") + + """\ +CREATE TRIGGER cert_audit +AFTER INSERT OR UPDATE OR DELETE ON certificate + FOR EACH ROW EXECUTE PROCEDURE process_cert_audit();""" + ) + + # Google Access expiration + + add_column_if_not_exist( + table_name=GoogleProxyGroupToGoogleBucketAccessGroup.__tablename__, + column=Column("expires", BigInteger()), + driver=driver, + metadata=md, + ) + + add_column_if_not_exist( + table_name=Project.__tablename__, + column=Column("authz", String), + driver=driver, + metadata=md, + ) + + +def add_foreign_key_column_if_not_exist( + table_name, + column_name, + column_type, + fk_table_name, + fk_column_name, + driver, + metadata, +): + column = Column(column_name, column_type) + add_column_if_not_exist(table_name, column, driver, metadata) + add_foreign_key_constraint_if_not_exist( + table_name, column_name, fk_table_name, fk_column_name, driver, metadata + ) + + +def drop_foreign_key_column_if_exist(table_name, column_name, driver, metadata): + drop_foreign_key_constraint_if_exist(table_name, column_name, driver, metadata) + drop_column_if_exist(table_name, column_name, driver, metadata) + + +def add_column_if_not_exist(table_name, column, driver, metadata, default=None): + column_name = column.compile(dialect=driver.engine.dialect) + column_type = column.type.compile(driver.engine.dialect) + + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + if str(column_name) not in table.c: + with driver.session as session: + command = 'ALTER TABLE "{}" ADD COLUMN {} {}'.format( + table_name, column_name, column_type + ) + if not column.nullable: + command += " NOT NULL" + if getattr(column, "default"): + default = column.default.arg + if isinstance(default, str): + default = "'{}'".format(default) + command += " DEFAULT {}".format(default) + command += ";" + + session.execute(command) + session.commit() + + +def drop_column_if_exist(table_name, column_name, driver, metadata): + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + if column_name in table.c: + with driver.session as session: + session.execute( + 'ALTER TABLE "{}" DROP COLUMN {};'.format(table_name, column_name) + ) + session.commit() + + +def add_foreign_key_constraint_if_not_exist( + table_name, column_name, fk_table_name, fk_column_name, driver, metadata +): + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + foreign_key_name = "{}_{}_fkey".format(table_name.lower(), column_name) + + if column_name in table.c: + foreign_keys = [fk.name for fk in getattr(table.c, column_name).foreign_keys] + if foreign_key_name not in foreign_keys: + with driver.session as session: + session.execute( + 'ALTER TABLE "{}" ADD CONSTRAINT {} ' + 'FOREIGN KEY({}) REFERENCES "{}" ({});'.format( + table_name, + foreign_key_name, + column_name, + fk_table_name, + fk_column_name, + ) + ) + session.commit() + + +def set_foreign_key_constraint_on_delete_cascade( + table_name, column_name, fk_table_name, fk_column_name, driver, session, metadata +): + set_foreign_key_constraint_on_delete( + table_name, + column_name, + fk_table_name, + fk_column_name, + "CASCADE", + driver, + session, + metadata, + ) + + +def set_foreign_key_constraint_on_delete_setnull( + table_name, column_name, fk_table_name, fk_column_name, driver, session, metadata +): + set_foreign_key_constraint_on_delete( + table_name, + column_name, + fk_table_name, + fk_column_name, + "SET NULL", + driver, + session, + metadata, + ) + + +def set_foreign_key_constraint_on_delete( + table_name, + column_name, + fk_table_name, + fk_column_name, + ondelete, + driver, + session, + metadata, +): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="Predicate of partial index \S+ ignored during reflection", + category=sa_exc.SAWarning, + ) + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + foreign_key_name = "{}_{}_fkey".format(table_name.lower(), column_name) + + if column_name in table.c: + session.execute( + 'ALTER TABLE ONLY "{}" DROP CONSTRAINT IF EXISTS {}, ' + 'ADD CONSTRAINT {} FOREIGN KEY ({}) REFERENCES "{}" ({}) ON DELETE {};'.format( + table_name, + foreign_key_name, + foreign_key_name, + column_name, + fk_table_name, + fk_column_name, + ondelete, + ) + ) + + +def drop_foreign_key_constraint_if_exist(table_name, column_name, driver, metadata): + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + foreign_key_name = "{}_{}_fkey".format(table_name.lower(), column_name) + + if column_name in table.c: + foreign_keys = [fk.name for fk in getattr(table.c, column_name).foreign_keys] + if foreign_key_name in foreign_keys: + with driver.session as session: + session.execute( + 'ALTER TABLE "{}" DROP CONSTRAINT {};'.format( + table_name, foreign_key_name + ) + ) + session.commit() + + +def add_unique_constraint_if_not_exist(table_name, column_name, driver, metadata): + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + index_name = "{}_{}_key".format(table_name, column_name) + + if column_name in table.c: + indexes = [index.name for index in table.indexes] + + if index_name not in indexes: + with driver.session as session: + session.execute( + 'ALTER TABLE "{}" ADD CONSTRAINT {} UNIQUE ({});'.format( + table_name, index_name, column_name + ) + ) + session.commit() + + +def drop_unique_constraint_if_exist(table_name, column_name, driver, metadata): + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + constraint_name = "{}_{}_key".format(table_name, column_name) + + if column_name in table.c: + constraints = [ + constaint.name for constaint in getattr(table.c, column_name).constraints + ] + + unique_index = None + for index in table.indexes: + if index.name == constraint_name: + unique_index = index + + if constraint_name in constraints or unique_index: + with driver.session as session: + session.execute( + 'ALTER TABLE "{}" DROP CONSTRAINT {};'.format( + table_name, constraint_name + ) + ) + session.commit() + + +def drop_default_value(table_name, column_name, driver, metadata): + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + + if column_name in table.c: + with driver.session as session: + session.execute( + 'ALTER TABLE "{}" ALTER COLUMN "{}" DROP DEFAULT;'.format( + table_name, column_name + ) + ) + session.commit() + + +def add_not_null_constraint(table_name, column_name, driver, metadata): + table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) + + if column_name in table.c: + with driver.session as session: + session.execute( + 'ALTER TABLE "{}" ALTER COLUMN "{}" SET NOT NULL;'.format( + table_name, column_name + ) + ) + session.commit() + + +def _remove_policy(driver, md): + with driver.session as session: + session.execute("DROP TABLE IF EXISTS users_to_policies;") + session.execute("DROP TABLE IF EXISTS policy;") + session.commit() + + +def _add_google_project_id(driver, md): + """ + Add new unique not null field to GoogleServiceAccount. + In order to do this without errors, we have to: + - add the field and allow null (for all previous rows) + - update all null entries to be unique + - at the moment this is just for dev environments since we don't + have anything in production. thus, these nonsense values will + be sufficient + - new additions of GoogleServiceAccounts will require this field + to be not null and unique + - add unique constraint + - add not null constraint + """ + # add new google_project_id column + add_column_if_not_exist( + table_name=GoogleServiceAccount.__tablename__, + column=Column("google_project_id", String), + driver=driver, + metadata=md, + ) + + # make rows have unique values for new column + with driver.session as session: + rows_to_make_unique = session.query(GoogleServiceAccount).filter( + GoogleServiceAccount.google_project_id.is_(None) + ) + count = 0 + for row in rows_to_make_unique: + row.google_project_id = count + count += 1 + session.commit() + + # add not null constraint + add_not_null_constraint( + table_name=GoogleServiceAccount.__tablename__, + column_name="google_project_id", + driver=driver, + metadata=md, + ) + + +def _update_for_authlib(driver, md): + """ + Going to authlib=0.9, the OAuth2ClientMixin from authlib, which the client model + inherits from, adds these new columns, some of which were added directly to the + client model in order to override some things like nullability. + """ + CLIENT_COLUMNS_TO_ADD = [ + Column("issued_at", Integer), + Column("expires_at", Integer, nullable=False, default=0), + Column("redirect_uri", Text, nullable=False, default=""), + Column( + "token_endpoint_auth_method", + String(48), + default="client_secret_basic", + server_default="client_secret_basic", + ), + Column("grant_type", Text, nullable=False, default=""), + Column("response_type", Text, nullable=False, default=""), + Column("scope", Text, nullable=False, default=""), + Column("client_name", String(100)), + Column("client_uri", Text), + Column("logo_uri", Text), + Column("contact", Text), + Column("tos_uri", Text), + Column("policy_uri", Text), + Column("jwks_uri", Text), + Column("jwks_text", Text), + Column("i18n_metadata", Text), + Column("software_id", String(36)), + Column("software_version", String(48)), + ] + add_client_col = lambda col: add_column_if_not_exist( + Client.__tablename__, column=col, driver=driver, metadata=md + ) + list(map(add_client_col, CLIENT_COLUMNS_TO_ADD)) + CODE_COLUMNS_TO_ADD = [Column("response_type", Text, default="")] + + with driver.session as session: + for client in session.query(Client).all(): + # add redirect_uri + if not client.redirect_uri: + redirect_uris = getattr(client, "_redirect_uris") or "" + client.redirect_uri = "\n".join(redirect_uris.split()) + # add grant_type; everything prior to migration was just using code grant + if not client.grant_type: + client.grant_type = "authorization_code\nrefresh_token" + session.commit() + + add_code_col = lambda col: add_column_if_not_exist( + AuthorizationCode.__tablename__, column=col, driver=driver, metadata=md + ) + list(map(add_code_col, CODE_COLUMNS_TO_ADD)) + with driver.session as session: + session.execute("ALTER TABLE client ALTER COLUMN client_secret DROP NOT NULL") + session.commit() + + # these ones are "manual" + table = Table( + AuthorizationCode.__tablename__, md, autoload=True, autoload_with=driver.engine + ) + auth_code_columns = list(map(str, table.columns)) + tablename = AuthorizationCode.__tablename__ + # delete expires_at column + if "{}.expires_at".format(tablename) in auth_code_columns: + with driver.session as session: + session.execute("ALTER TABLE {} DROP COLUMN expires_at;".format(tablename)) + session.commit() + # add auth_time column + if "{}.auth_time".format(tablename) not in auth_code_columns: + with driver.session as session: + command = "ALTER TABLE {} ADD COLUMN auth_time Integer NOT NULL DEFAULT extract(epoch from now());".format( + tablename + ) + session.execute(command) + session.commit() + # make sure modifiers on auth_time column are correct + with driver.session as session: + session.execute( + "ALTER TABLE {} ALTER COLUMN auth_time SET NOT NULL;".format(tablename) + ) + session.commit() + session.execute( + "ALTER TABLE {} ALTER COLUMN auth_time SET DEFAULT extract(epoch from now());".format( + tablename + ) + ) + session.commit() + + +def _set_on_delete_cascades(driver, session, md): + set_foreign_key_constraint_on_delete_cascade( + "client", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "authorization_code", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "google_service_account", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "user_google_account", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "user_google_account_to_proxy_group", + "user_google_account_id", + "user_google_account", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "user_google_account_to_proxy_group", + "proxy_group_id", + "google_proxy_group", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "google_service_account_key", + "service_account_id", + "google_service_account", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "google_bucket_access_group", "bucket_id", "bucket", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "google_proxy_group_to_google_bucket_access_group", + "proxy_group_id", + "google_proxy_group", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "google_proxy_group_to_google_bucket_access_group", + "access_group_id", + "google_bucket_access_group", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "service_account_access_privilege", + "project_id", + "project", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "service_account_access_privilege", + "service_account_id", + "user_service_account", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "service_account_to_google_bucket_access_group", + "service_account_id", + "user_service_account", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "service_account_to_google_bucket_access_group", + "access_group_id", + "google_bucket_access_group", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "hmac_keypair", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "hmac_keypair_archive", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "user_to_group", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "user_to_group", "group_id", "Group", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "access_privilege", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "access_privilege", "group_id", "Group", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "access_privilege", "project_id", "project", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "access_privilege", + "provider_id", + "authorization_provider", + "id", + driver, + session, + md, + ) + set_foreign_key_constraint_on_delete_cascade( + "user_to_bucket", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "user_to_bucket", "bucket_id", "bucket", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "bucket", "provider_id", "cloud_provider", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "project_to_bucket", "project_id", "project", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "project_to_bucket", "bucket_id", "bucket", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "compute_access", "project_id", "project", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "compute_access", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "compute_access", "group_id", "Group", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "compute_access", "provider_id", "cloud_provider", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "storage_access", "project_id", "project", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "storage_access", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "storage_access", "group_id", "Group", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "storage_access", "provider_id", "cloud_provider", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "certificate", "application_id", "application", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "s3credential", "user_id", "User", "id", driver, session, md + ) + set_foreign_key_constraint_on_delete_cascade( + "tag", "user_id", "User", "id", driver, session, md + ) diff --git a/fence/__init__.py b/fence/__init__.py index 2444b53689..729bb0e01d 100755 --- a/fence/__init__.py +++ b/fence/__init__.py @@ -34,7 +34,6 @@ from fence.blueprints.login.utils import allowed_login_redirects, domain from fence.errors import UserError from fence.jwt import keys -from fence.models import migrate, IdentityProvider from fence.oidc.client import query_client from fence.oidc.server import server from fence.resources.audit.client import AuditServiceClient @@ -106,16 +105,12 @@ def app_init( def app_sessions(app): app.url_map.strict_slashes = False - app.db = SQLAlchemyDriver(config["DB"]) - # TODO: we will make a more robust migration system external from the application - # initialization soon - if config["ENABLE_DB_MIGRATION"]: - logger.info("Running database migration...") - migrate(app.db) - logger.info("Done running database migration.") - else: - logger.info("NOT running database migration.") + # override userdatamodel's `setup_db` function which creates tables + # and runs database migrations, because Alembic handles that now. + # TODO move userdatamodel code to Fence and remove dependencies to it + SQLAlchemyDriver.setup_db = lambda _: None + app.db = SQLAlchemyDriver(config["DB"]) session = flask_scoped_session(app.db.Session, app) # noqa app.session_interface = UserSessionInterface() diff --git a/fence/models.py b/fence/models.py index d911e7cb49..1122e14cf2 100644 --- a/fence/models.py +++ b/fence/models.py @@ -14,20 +14,15 @@ from sqlalchemy import ( Integer, BigInteger, - DateTime, String, Column, Boolean, Text, - MetaData, - Table, text, event, ) from sqlalchemy.dialects.postgresql import ARRAY, JSONB from sqlalchemy.orm import relationship, backref -from sqlalchemy.sql import func -from sqlalchemy import exc as sa_exc from sqlalchemy import func from sqlalchemy.schema import ForeignKey from userdatamodel import Base @@ -53,7 +48,6 @@ UserToBucket, UserToGroup, ) -import warnings from fence import logger from fence.config import config @@ -744,817 +738,3 @@ def populate_iss_sub_pair_to_user_table(target, connection, **kw): else: transaction.commit() logger.info("Population was successful") - - -to_timestamp = ( - "CREATE OR REPLACE FUNCTION pc_datetime_to_timestamp(datetoconvert timestamp) " - "RETURNS BIGINT AS " - "$BODY$ " - "select extract(epoch from $1)::BIGINT " - "$BODY$ " - "LANGUAGE 'sql' IMMUTABLE STRICT;" -) - - -def migrate(driver): - if not driver.engine.dialect.supports_alter: - print( - "This engine dialect doesn't support altering so we are not migrating even if necessary!" - ) - return - - md = MetaData() - - table = Table( - UserRefreshToken.__tablename__, md, autoload=True, autoload_with=driver.engine - ) - if str(table.c.expires.type) != "BIGINT": - print("Altering table %s expires to BIGINT" % (UserRefreshToken.__tablename__)) - with driver.session as session: - session.execute(to_timestamp) - with driver.session as session: - session.execute( - "ALTER TABLE {} ALTER COLUMN expires TYPE BIGINT USING pc_datetime_to_timestamp(expires);".format( - UserRefreshToken.__tablename__ - ) - ) - - # username limit migration - - table = Table(User.__tablename__, md, autoload=True, autoload_with=driver.engine) - if str(table.c.username.type) != str(User.username.type): - print( - "Altering table %s column username type to %s" - % (User.__tablename__, str(User.username.type)) - ) - with driver.session as session: - session.execute( - 'ALTER TABLE "{}" ALTER COLUMN username TYPE {};'.format( - User.__tablename__, str(User.username.type) - ) - ) - - # oidc migration - - table = Table(Client.__tablename__, md, autoload=True, autoload_with=driver.engine) - if not ("ix_name" in [constraint.name for constraint in table.constraints]): - with driver.session as session: - session.execute( - "ALTER TABLE {} ADD constraint ix_name unique (name);".format( - Client.__tablename__ - ) - ) - - if "_allowed_scopes" not in table.c: - print( - "Altering table {} to add _allowed_scopes column".format( - Client.__tablename__ - ) - ) - with driver.session as session: - session.execute( - "ALTER TABLE {} ADD COLUMN _allowed_scopes VARCHAR;".format( - Client.__tablename__ - ) - ) - for client in session.query(Client): - if not client._allowed_scopes: - client._allowed_scopes = " ".join(config["CLIENT_ALLOWED_SCOPES"]) - session.add(client) - session.commit() - session.execute( - "ALTER TABLE {} ALTER COLUMN _allowed_scopes SET NOT NULL;".format( - Client.__tablename__ - ) - ) - - add_column_if_not_exist( - table_name=GoogleProxyGroup.__tablename__, - column=Column("email", String), - driver=driver, - metadata=md, - ) - - add_column_if_not_exist( - table_name=AuthorizationCode.__tablename__, - column=Column("refresh_token_expires_in", Integer), - driver=driver, - metadata=md, - ) - - drop_foreign_key_column_if_exist( - table_name=GoogleProxyGroup.__tablename__, - column_name="user_id", - driver=driver, - metadata=md, - ) - - _add_google_project_id(driver, md) - - drop_unique_constraint_if_exist( - table_name=GoogleServiceAccount.__tablename__, - column_name="google_unique_id", - driver=driver, - metadata=md, - ) - - drop_unique_constraint_if_exist( - table_name=GoogleServiceAccount.__tablename__, - column_name="google_project_id", - driver=driver, - metadata=md, - ) - - add_column_if_not_exist( - table_name=GoogleBucketAccessGroup.__tablename__, - column=Column("privileges", ARRAY(String)), - driver=driver, - metadata=md, - ) - - _update_for_authlib(driver, md) - - # Delete-user migration - - # Check if at least one constraint is already migrated and if so skip - # the delete cascade migration. - user = Table(User.__tablename__, md, autoload=True, autoload_with=driver.engine) - found_user_constraint_already_migrated = False - - for fkey in list(user.foreign_key_constraints): - if ( - len(fkey.column_keys) == 1 - and "google_proxy_group_id" in fkey.column_keys - and fkey.ondelete == "SET NULL" - ): - found_user_constraint_already_migrated = True - - if not found_user_constraint_already_migrated: - # do delete user migration in one session - delete_user_session = driver.Session() - try: - # Deleting google proxy group shouldn't delete user - set_foreign_key_constraint_on_delete_setnull( - table_name=User.__tablename__, - column_name="google_proxy_group_id", - fk_table_name=GoogleProxyGroup.__tablename__, - fk_column_name="id", - driver=driver, - session=delete_user_session, - metadata=md, - ) - - _set_on_delete_cascades(driver, delete_user_session, md) - - delete_user_session.commit() - except Exception: - delete_user_session.rollback() - raise - finally: - delete_user_session.close() - - _remove_policy(driver, md) - - add_column_if_not_exist( - table_name=User.__tablename__, - column=Column( - "_last_auth", DateTime(timezone=False), server_default=func.now() - ), - driver=driver, - metadata=md, - ) - - add_column_if_not_exist( - table_name=User.__tablename__, - column=Column("additional_info", JSONB(), server_default=text("'{}'")), - driver=driver, - metadata=md, - ) - - with driver.session as session: - session.execute( - """\ -CREATE OR REPLACE FUNCTION process_user_audit() RETURNS TRIGGER AS $user_audit$ - BEGIN - IF (TG_OP = 'DELETE') THEN - INSERT INTO user_audit_logs (timestamp, operation, old_values) - SELECT now(), 'DELETE', row_to_json(OLD); - RETURN OLD; - ELSIF (TG_OP = 'UPDATE') THEN - INSERT INTO user_audit_logs (timestamp, operation, old_values, new_values) - SELECT now(), 'UPDATE', row_to_json(OLD), row_to_json(NEW); - RETURN NEW; - ELSIF (TG_OP = 'INSERT') THEN - INSERT INTO user_audit_logs (timestamp, operation, new_values) - SELECT now(), 'INSERT', row_to_json(NEW); - RETURN NEW; - END IF; - RETURN NULL; - END; -$user_audit$ LANGUAGE plpgsql;""" - ) - - exist = session.scalar( - "SELECT exists (SELECT * FROM pg_trigger WHERE tgname = 'user_audit')" - ) - session.execute( - ('DROP TRIGGER user_audit ON "User"; ' if exist else "") - + """\ -CREATE TRIGGER user_audit -AFTER INSERT OR UPDATE OR DELETE ON "User" - FOR EACH ROW EXECUTE PROCEDURE process_user_audit();""" - ) - - session.execute( - """\ -CREATE OR REPLACE FUNCTION process_cert_audit() RETURNS TRIGGER AS $cert_audit$ - BEGIN - IF (TG_OP = 'DELETE') THEN - INSERT INTO cert_audit_logs (timestamp, operation, user_id, username, old_values) - SELECT now(), 'DELETE', "User".id, "User".username, row_to_json(OLD) - FROM application INNER JOIN "User" ON application.user_id = "User".id - WHERE OLD.application_id = application.id; - RETURN OLD; - ELSIF (TG_OP = 'UPDATE') THEN - INSERT INTO cert_audit_logs (timestamp, operation, user_id, username, old_values, new_values) - SELECT now(), 'UPDATE', "User".id, "User".username, row_to_json(OLD), row_to_json(NEW) - FROM application INNER JOIN "User" ON application.user_id = "User".id - WHERE NEW.application_id = application.id; - RETURN NEW; - ELSIF (TG_OP = 'INSERT') THEN - INSERT INTO cert_audit_logs (timestamp, operation, user_id, username, new_values) - SELECT now(), 'INSERT', "User".id, "User".username, row_to_json(NEW) - FROM application INNER JOIN "User" ON application.user_id = "User".id - WHERE NEW.application_id = application.id; - RETURN NEW; - END IF; - RETURN NULL; - END; -$cert_audit$ LANGUAGE plpgsql;""" - ) - - exist = session.scalar( - "SELECT exists (SELECT * FROM pg_trigger WHERE tgname = 'cert_audit')" - ) - session.execute( - ("DROP TRIGGER cert_audit ON certificate; " if exist else "") - + """\ -CREATE TRIGGER cert_audit -AFTER INSERT OR UPDATE OR DELETE ON certificate - FOR EACH ROW EXECUTE PROCEDURE process_cert_audit();""" - ) - - # Google Access expiration - - add_column_if_not_exist( - table_name=GoogleProxyGroupToGoogleBucketAccessGroup.__tablename__, - column=Column("expires", BigInteger()), - driver=driver, - metadata=md, - ) - - add_column_if_not_exist( - table_name=Project.__tablename__, - column=Column("authz", String), - driver=driver, - metadata=md, - ) - - -def add_foreign_key_column_if_not_exist( - table_name, - column_name, - column_type, - fk_table_name, - fk_column_name, - driver, - metadata, -): - column = Column(column_name, column_type) - add_column_if_not_exist(table_name, column, driver, metadata) - add_foreign_key_constraint_if_not_exist( - table_name, column_name, fk_table_name, fk_column_name, driver, metadata - ) - - -def drop_foreign_key_column_if_exist(table_name, column_name, driver, metadata): - drop_foreign_key_constraint_if_exist(table_name, column_name, driver, metadata) - drop_column_if_exist(table_name, column_name, driver, metadata) - - -def add_column_if_not_exist(table_name, column, driver, metadata, default=None): - column_name = column.compile(dialect=driver.engine.dialect) - column_type = column.type.compile(driver.engine.dialect) - - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - if str(column_name) not in table.c: - with driver.session as session: - command = 'ALTER TABLE "{}" ADD COLUMN {} {}'.format( - table_name, column_name, column_type - ) - if not column.nullable: - command += " NOT NULL" - if getattr(column, "default"): - default = column.default.arg - if isinstance(default, str): - default = "'{}'".format(default) - command += " DEFAULT {}".format(default) - command += ";" - - session.execute(command) - session.commit() - - -def drop_column_if_exist(table_name, column_name, driver, metadata): - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - if column_name in table.c: - with driver.session as session: - session.execute( - 'ALTER TABLE "{}" DROP COLUMN {};'.format(table_name, column_name) - ) - session.commit() - - -def add_foreign_key_constraint_if_not_exist( - table_name, column_name, fk_table_name, fk_column_name, driver, metadata -): - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - foreign_key_name = "{}_{}_fkey".format(table_name.lower(), column_name) - - if column_name in table.c: - foreign_keys = [fk.name for fk in getattr(table.c, column_name).foreign_keys] - if foreign_key_name not in foreign_keys: - with driver.session as session: - session.execute( - 'ALTER TABLE "{}" ADD CONSTRAINT {} ' - 'FOREIGN KEY({}) REFERENCES "{}" ({});'.format( - table_name, - foreign_key_name, - column_name, - fk_table_name, - fk_column_name, - ) - ) - session.commit() - - -def set_foreign_key_constraint_on_delete_cascade( - table_name, column_name, fk_table_name, fk_column_name, driver, session, metadata -): - set_foreign_key_constraint_on_delete( - table_name, - column_name, - fk_table_name, - fk_column_name, - "CASCADE", - driver, - session, - metadata, - ) - - -def set_foreign_key_constraint_on_delete_setnull( - table_name, column_name, fk_table_name, fk_column_name, driver, session, metadata -): - set_foreign_key_constraint_on_delete( - table_name, - column_name, - fk_table_name, - fk_column_name, - "SET NULL", - driver, - session, - metadata, - ) - - -def set_foreign_key_constraint_on_delete( - table_name, - column_name, - fk_table_name, - fk_column_name, - ondelete, - driver, - session, - metadata, -): - with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", - message="Predicate of partial index \S+ ignored during reflection", - category=sa_exc.SAWarning, - ) - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - foreign_key_name = "{}_{}_fkey".format(table_name.lower(), column_name) - - if column_name in table.c: - session.execute( - 'ALTER TABLE ONLY "{}" DROP CONSTRAINT IF EXISTS {}, ' - 'ADD CONSTRAINT {} FOREIGN KEY ({}) REFERENCES "{}" ({}) ON DELETE {};'.format( - table_name, - foreign_key_name, - foreign_key_name, - column_name, - fk_table_name, - fk_column_name, - ondelete, - ) - ) - - -def drop_foreign_key_constraint_if_exist(table_name, column_name, driver, metadata): - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - foreign_key_name = "{}_{}_fkey".format(table_name.lower(), column_name) - - if column_name in table.c: - foreign_keys = [fk.name for fk in getattr(table.c, column_name).foreign_keys] - if foreign_key_name in foreign_keys: - with driver.session as session: - session.execute( - 'ALTER TABLE "{}" DROP CONSTRAINT {};'.format( - table_name, foreign_key_name - ) - ) - session.commit() - - -def add_unique_constraint_if_not_exist(table_name, column_name, driver, metadata): - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - index_name = "{}_{}_key".format(table_name, column_name) - - if column_name in table.c: - indexes = [index.name for index in table.indexes] - - if index_name not in indexes: - with driver.session as session: - session.execute( - 'ALTER TABLE "{}" ADD CONSTRAINT {} UNIQUE ({});'.format( - table_name, index_name, column_name - ) - ) - session.commit() - - -def drop_unique_constraint_if_exist(table_name, column_name, driver, metadata): - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - constraint_name = "{}_{}_key".format(table_name, column_name) - - if column_name in table.c: - constraints = [ - constaint.name for constaint in getattr(table.c, column_name).constraints - ] - - unique_index = None - for index in table.indexes: - if index.name == constraint_name: - unique_index = index - - if constraint_name in constraints or unique_index: - with driver.session as session: - session.execute( - 'ALTER TABLE "{}" DROP CONSTRAINT {};'.format( - table_name, constraint_name - ) - ) - session.commit() - - -def drop_default_value(table_name, column_name, driver, metadata): - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - - if column_name in table.c: - with driver.session as session: - session.execute( - 'ALTER TABLE "{}" ALTER COLUMN "{}" DROP DEFAULT;'.format( - table_name, column_name - ) - ) - session.commit() - - -def add_not_null_constraint(table_name, column_name, driver, metadata): - table = Table(table_name, metadata, autoload=True, autoload_with=driver.engine) - - if column_name in table.c: - with driver.session as session: - session.execute( - 'ALTER TABLE "{}" ALTER COLUMN "{}" SET NOT NULL;'.format( - table_name, column_name - ) - ) - session.commit() - - -def _remove_policy(driver, md): - with driver.session as session: - session.execute("DROP TABLE IF EXISTS users_to_policies;") - session.execute("DROP TABLE IF EXISTS policy;") - session.commit() - - -def _add_google_project_id(driver, md): - """ - Add new unique not null field to GoogleServiceAccount. - In order to do this without errors, we have to: - - add the field and allow null (for all previous rows) - - update all null entries to be unique - - at the moment this is just for dev environments since we don't - have anything in production. thus, these nonsense values will - be sufficient - - new additions of GoogleServiceAccounts will require this field - to be not null and unique - - add unique constraint - - add not null constraint - """ - # add new google_project_id column - add_column_if_not_exist( - table_name=GoogleServiceAccount.__tablename__, - column=Column("google_project_id", String), - driver=driver, - metadata=md, - ) - - # make rows have unique values for new column - with driver.session as session: - rows_to_make_unique = session.query(GoogleServiceAccount).filter( - GoogleServiceAccount.google_project_id.is_(None) - ) - count = 0 - for row in rows_to_make_unique: - row.google_project_id = count - count += 1 - session.commit() - - # add not null constraint - add_not_null_constraint( - table_name=GoogleServiceAccount.__tablename__, - column_name="google_project_id", - driver=driver, - metadata=md, - ) - - -def _update_for_authlib(driver, md): - """ - Going to authlib=0.9, the OAuth2ClientMixin from authlib, which the client model - inherits from, adds these new columns, some of which were added directly to the - client model in order to override some things like nullability. - """ - CLIENT_COLUMNS_TO_ADD = [ - Column("issued_at", Integer), - Column("expires_at", Integer, nullable=False, default=0), - Column("redirect_uri", Text, nullable=False, default=""), - Column( - "token_endpoint_auth_method", - String(48), - default="client_secret_basic", - server_default="client_secret_basic", - ), - Column("grant_type", Text, nullable=False, default=""), - Column("response_type", Text, nullable=False, default=""), - Column("scope", Text, nullable=False, default=""), - Column("client_name", String(100)), - Column("client_uri", Text), - Column("logo_uri", Text), - Column("contact", Text), - Column("tos_uri", Text), - Column("policy_uri", Text), - Column("jwks_uri", Text), - Column("jwks_text", Text), - Column("i18n_metadata", Text), - Column("software_id", String(36)), - Column("software_version", String(48)), - ] - add_client_col = lambda col: add_column_if_not_exist( - Client.__tablename__, column=col, driver=driver, metadata=md - ) - list(map(add_client_col, CLIENT_COLUMNS_TO_ADD)) - CODE_COLUMNS_TO_ADD = [Column("response_type", Text, default="")] - - with driver.session as session: - for client in session.query(Client).all(): - # add redirect_uri - if not client.redirect_uri: - redirect_uris = getattr(client, "_redirect_uris") or "" - client.redirect_uri = "\n".join(redirect_uris.split()) - # add grant_type; everything prior to migration was just using code grant - if not client.grant_type: - client.grant_type = "authorization_code\nrefresh_token" - session.commit() - - add_code_col = lambda col: add_column_if_not_exist( - AuthorizationCode.__tablename__, column=col, driver=driver, metadata=md - ) - list(map(add_code_col, CODE_COLUMNS_TO_ADD)) - with driver.session as session: - session.execute("ALTER TABLE client ALTER COLUMN client_secret DROP NOT NULL") - session.commit() - - # these ones are "manual" - table = Table( - AuthorizationCode.__tablename__, md, autoload=True, autoload_with=driver.engine - ) - auth_code_columns = list(map(str, table.columns)) - tablename = AuthorizationCode.__tablename__ - # delete expires_at column - if "{}.expires_at".format(tablename) in auth_code_columns: - with driver.session as session: - session.execute("ALTER TABLE {} DROP COLUMN expires_at;".format(tablename)) - session.commit() - # add auth_time column - if "{}.auth_time".format(tablename) not in auth_code_columns: - with driver.session as session: - command = "ALTER TABLE {} ADD COLUMN auth_time Integer NOT NULL DEFAULT extract(epoch from now());".format( - tablename - ) - session.execute(command) - session.commit() - # make sure modifiers on auth_time column are correct - with driver.session as session: - session.execute( - "ALTER TABLE {} ALTER COLUMN auth_time SET NOT NULL;".format(tablename) - ) - session.commit() - session.execute( - "ALTER TABLE {} ALTER COLUMN auth_time SET DEFAULT extract(epoch from now());".format( - tablename - ) - ) - session.commit() - - -def _set_on_delete_cascades(driver, session, md): - set_foreign_key_constraint_on_delete_cascade( - "client", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "authorization_code", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "google_service_account", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "user_google_account", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "user_google_account_to_proxy_group", - "user_google_account_id", - "user_google_account", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "user_google_account_to_proxy_group", - "proxy_group_id", - "google_proxy_group", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "google_service_account_key", - "service_account_id", - "google_service_account", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "google_bucket_access_group", "bucket_id", "bucket", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "google_proxy_group_to_google_bucket_access_group", - "proxy_group_id", - "google_proxy_group", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "google_proxy_group_to_google_bucket_access_group", - "access_group_id", - "google_bucket_access_group", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "service_account_access_privilege", - "project_id", - "project", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "service_account_access_privilege", - "service_account_id", - "user_service_account", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "service_account_to_google_bucket_access_group", - "service_account_id", - "user_service_account", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "service_account_to_google_bucket_access_group", - "access_group_id", - "google_bucket_access_group", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "hmac_keypair", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "hmac_keypair_archive", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "user_to_group", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "user_to_group", "group_id", "Group", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "access_privilege", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "access_privilege", "group_id", "Group", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "access_privilege", "project_id", "project", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "access_privilege", - "provider_id", - "authorization_provider", - "id", - driver, - session, - md, - ) - set_foreign_key_constraint_on_delete_cascade( - "user_to_bucket", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "user_to_bucket", "bucket_id", "bucket", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "bucket", "provider_id", "cloud_provider", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "project_to_bucket", "project_id", "project", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "project_to_bucket", "bucket_id", "bucket", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "compute_access", "project_id", "project", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "compute_access", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "compute_access", "group_id", "Group", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "compute_access", "provider_id", "cloud_provider", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "storage_access", "project_id", "project", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "storage_access", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "storage_access", "group_id", "Group", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "storage_access", "provider_id", "cloud_provider", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "certificate", "application_id", "application", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "s3credential", "user_id", "User", "id", driver, session, md - ) - set_foreign_key_constraint_on_delete_cascade( - "tag", "user_id", "User", "id", driver, session, md - ) diff --git a/fence/scripting/fence_create.py b/fence/scripting/fence_create.py index 23545646d5..6af10a3011 100644 --- a/fence/scripting/fence_create.py +++ b/fence/scripting/fence_create.py @@ -5,6 +5,8 @@ import json import pprint import asyncio + +from alembic.config import main as alembic_main from cirrus import GoogleCloudManager from cirrus.google_cloud.errors import GoogleAuthError from cirrus.config import config as cirrus_config @@ -47,7 +49,6 @@ UserRefreshToken, ServiceAccountToGoogleBucketAccessGroup, query_for_user, - migrate, GA4GHVisaV1, ) from fence.scripting.google_monitor import email_users_without_access, validation_check @@ -1598,7 +1599,7 @@ def notify_problem_users(db, emails, auth_ids, check_linking, google_project_id) def migrate_database(db): driver = SQLAlchemyDriver(db) - migrate(driver) + alembic_main(["--raiseerr", "upgrade", "head"]) logger.info("Done.") diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000000..756251dc2d --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,75 @@ +from alembic import context +from logging.config import fileConfig +import os +from sqlalchemy import engine_from_config, pool + +from userdatamodel import Base + +from fence.config import config as fence_config +from fence.settings import CONFIG_SEARCH_FOLDERS + + +config = context.config +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +target_metadata = Base.metadata + +fence_config.load( + config_path=os.environ.get("TEST_CONFIG_PATH"), # for tests + search_folders=CONFIG_SEARCH_FOLDERS, # for deployments +) +config.set_main_option("sqlalchemy.url", str(fence_config["DB"])) + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + print("Locking database to ensure only 1 migration runs at a time") + connection.execute("SELECT pg_advisory_xact_lock(100);") + context.run_migrations() + print("Releasing database lock") + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000000..2c0156303a --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/e4c7b0ab68d3_create_tables.py b/migrations/versions/e4c7b0ab68d3_create_tables.py new file mode 100644 index 0000000000..8cc47fce22 --- /dev/null +++ b/migrations/versions/e4c7b0ab68d3_create_tables.py @@ -0,0 +1,702 @@ +"""Create tables + +Revision ID: e4c7b0ab68d3 +Revises: +Create Date: 2022-06-03 16:00:02.745086 + +This migration was auto-generated by Alembic based on these versions of +the model: +- https://github.com/uc-cdis/userdatamodel/blob/2.4.0/userdatamodel/user.py +- https://github.com/uc-cdis/fence/blob/2022.07/fence/models.py +""" +from alembic import context, op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from userdatamodel.driver import SQLAlchemyDriver + +from bin.old_migration_script import migrate + +# revision identifiers, used by Alembic. +revision = "e4c7b0ab68d3" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # If this is a new instance of Fence, we can run this initial Alembic + # migration to create the tables. If not, the DB might have been partially + # migrated by the old migration script and we need to make sure all the + # old migrations are run, then Alembic can pick up from there and run more + # recent migrations. + # The state of the DB after the old migration script runs is the same as + # after this initial Alembic migration. + conn = op.get_bind() + inspector = sa.engine.reflection.Inspector.from_engine(conn) + tables = inspector.get_table_names() + if len(tables) > 0 and tables != ["alembic_version"]: + print( + "INFO: Found existing tables: this is not a new instance of Fence. Running the old migration script... Note that future migrations will be run using Alembic." + ) + driver = SQLAlchemyDriver(context.config.get_main_option("sqlalchemy.url")) + migrate(driver) + return + + op.create_table( + "Group", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "assume_role_cache", + sa.Column("arn", sa.String(), nullable=False), + sa.Column("expires_at", sa.Integer(), nullable=True), + sa.Column("aws_access_key_id", sa.String(), nullable=True), + sa.Column("aws_secret_access_key", sa.String(), nullable=True), + sa.Column("aws_session_token", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("arn"), + ) + op.create_table( + "authorization_provider", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "blacklisted_token", + sa.Column("jti", sa.String(length=36), nullable=False), + sa.Column("exp", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("jti"), + ) + op.create_table( + "cert_audit_logs", + sa.Column("id", sa.BigInteger(), nullable=False), + sa.Column( + "timestamp", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("operation", sa.String(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("username", sa.String(length=255), nullable=False), + sa.Column( + "old_values", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'"), + nullable=True, + ), + sa.Column( + "new_values", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'"), + nullable=True, + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "cloud_provider", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("endpoint", sa.String(), nullable=True), + sa.Column("backend", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.Column("service", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("endpoint"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "event_log", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("action", sa.String(), nullable=True), + sa.Column( + "timestamp", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column("target", sa.String(), nullable=True), + sa.Column("target_type", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "ga4gh_passport_cache", + sa.Column("passport_hash", sa.String(length=64), nullable=False), + sa.Column("expires_at", sa.BigInteger(), nullable=False), + sa.Column("user_ids", postgresql.ARRAY(sa.String(length=255)), nullable=False), + sa.PrimaryKeyConstraint("passport_hash"), + ) + op.create_table( + "gcp_assume_role_cache", + sa.Column("gcp_proxy_group_id", sa.String(), nullable=False), + sa.Column("expires_at", sa.Integer(), nullable=True), + sa.Column("gcp_private_key", sa.String(), nullable=True), + sa.Column("gcp_key_db_entry", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("gcp_proxy_group_id"), + ) + op.create_table( + "google_proxy_group", + sa.Column("id", sa.String(length=90), nullable=False), + sa.Column("email", sa.String(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + ) + op.create_table( + "identity_provider", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "organization", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "project", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("auth_id", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.Column("parent_id", sa.Integer(), nullable=True), + sa.Column("authz", sa.String(), nullable=True), + sa.ForeignKeyConstraint( + ["parent_id"], + ["project.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("auth_id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "user_audit_logs", + sa.Column("id", sa.BigInteger(), nullable=False), + sa.Column( + "timestamp", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("operation", sa.String(), nullable=False), + sa.Column( + "old_values", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'"), + nullable=True, + ), + sa.Column( + "new_values", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'"), + nullable=True, + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "user_refresh_token", + sa.Column("jti", sa.String(), nullable=False), + sa.Column("userid", sa.Integer(), nullable=True), + sa.Column("expires", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("jti"), + ) + op.create_table( + "user_service_account", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("google_unique_id", sa.String(), nullable=False), + sa.Column("email", sa.String(), nullable=False), + sa.Column("google_project_id", sa.String(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "bucket", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("provider_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["provider_id"], ["cloud_provider.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "department", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.Column("org_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["org_id"], + ["organization.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "service_account_access_privilege", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("project_id", sa.Integer(), nullable=False), + sa.Column("service_account_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["service_account_id"], ["user_service_account.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "User", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("username", sa.String(length=255), nullable=True), + sa.Column("id_from_idp", sa.String(), nullable=True), + sa.Column("display_name", sa.String(), nullable=True), + sa.Column("phone_number", sa.String(), nullable=True), + sa.Column("email", sa.String(), nullable=True), + sa.Column( + "_last_auth", sa.DateTime(), server_default=sa.text("now()"), nullable=True + ), + sa.Column("idp_id", sa.Integer(), nullable=True), + sa.Column("google_proxy_group_id", sa.String(), nullable=True), + sa.Column("department_id", sa.Integer(), nullable=True), + sa.Column("active", sa.Boolean(), nullable=True), + sa.Column("is_admin", sa.Boolean(), nullable=True), + sa.Column( + "additional_info", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'"), + nullable=True, + ), + sa.ForeignKeyConstraint( + ["department_id"], + ["department.id"], + ), + sa.ForeignKeyConstraint( + ["google_proxy_group_id"], ["google_proxy_group.id"], ondelete="SET NULL" + ), + sa.ForeignKeyConstraint( + ["idp_id"], + ["identity_provider.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), + ) + op.create_table( + "google_bucket_access_group", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("bucket_id", sa.Integer(), nullable=False), + sa.Column("email", sa.String(), nullable=False), + sa.Column("privileges", postgresql.ARRAY(sa.String()), nullable=True), + sa.ForeignKeyConstraint(["bucket_id"], ["bucket.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "project_to_bucket", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("project_id", sa.Integer(), nullable=True), + sa.Column("bucket_id", sa.Integer(), nullable=True), + sa.Column("privilege", postgresql.ARRAY(sa.String()), nullable=True), + sa.ForeignKeyConstraint(["bucket_id"], ["bucket.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "access_privilege", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("group_id", sa.Integer(), nullable=True), + sa.Column("project_id", sa.Integer(), nullable=True), + sa.Column("privilege", postgresql.ARRAY(sa.String()), nullable=True), + sa.Column("provider_id", sa.Integer(), nullable=True), + sa.CheckConstraint( + "user_id is NULL or group_id is NULL", name="check_access_subject" + ), + sa.ForeignKeyConstraint(["group_id"], ["Group.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["provider_id"], ["authorization_provider.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("user_id", "group_id", "project_id", name="uniq_ap"), + ) + op.create_index( + "unique_group_project_id", + "access_privilege", + ["group_id", "project_id"], + unique=True, + postgresql_where=sa.text("user_id is NULL"), + ) + op.create_index( + "unique_user_group_id", + "access_privilege", + ["user_id", "group_id"], + unique=True, + postgresql_where=sa.text("project_id is NULL"), + ) + op.create_index( + "unique_user_project_id", + "access_privilege", + ["user_id", "project_id"], + unique=True, + postgresql_where=sa.text("group_id is NULL"), + ) + op.create_table( + "application", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("resources_granted", postgresql.ARRAY(sa.String()), nullable=True), + sa.Column("message", sa.String(), nullable=True), + sa.ForeignKeyConstraint( + ["user_id"], + ["User.id"], + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "authorization_code", + sa.Column("code", sa.String(length=120), nullable=False), + sa.Column("client_id", sa.String(length=48), nullable=True), + sa.Column("redirect_uri", sa.Text(), nullable=True), + sa.Column("response_type", sa.Text(), nullable=True), + sa.Column("auth_time", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("nonce", sa.String(), nullable=True), + sa.Column("refresh_token_expires_in", sa.Integer(), nullable=True), + sa.Column("_scope", sa.Text(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("code"), + ) + op.create_table( + "client", + sa.Column("issued_at", sa.Integer(), nullable=False), + sa.Column("expires_at", sa.Integer(), nullable=False), + sa.Column("redirect_uri", sa.Text(), nullable=True), + sa.Column("token_endpoint_auth_method", sa.String(length=48), nullable=True), + sa.Column("grant_type", sa.Text(), nullable=False), + sa.Column("response_type", sa.Text(), nullable=False), + sa.Column("scope", sa.Text(), nullable=False), + sa.Column("client_name", sa.String(length=100), nullable=True), + sa.Column("client_uri", sa.Text(), nullable=True), + sa.Column("logo_uri", sa.Text(), nullable=True), + sa.Column("contact", sa.Text(), nullable=True), + sa.Column("tos_uri", sa.Text(), nullable=True), + sa.Column("policy_uri", sa.Text(), nullable=True), + sa.Column("jwks_uri", sa.Text(), nullable=True), + sa.Column("jwks_text", sa.Text(), nullable=True), + sa.Column("i18n_metadata", sa.Text(), nullable=True), + sa.Column("software_id", sa.String(length=36), nullable=True), + sa.Column("software_version", sa.String(length=48), nullable=True), + sa.Column("client_id", sa.String(length=40), nullable=False), + sa.Column("client_secret", sa.String(length=60), nullable=True), + sa.Column("name", sa.String(length=40), nullable=False), + sa.Column("description", sa.String(length=400), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("auto_approve", sa.Boolean(), nullable=True), + sa.Column("is_confidential", sa.Boolean(), nullable=True), + sa.Column("_redirect_uris", sa.Text(), nullable=True), + sa.Column("_allowed_scopes", sa.Text(), nullable=False), + sa.Column("_default_scopes", sa.Text(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("client_id"), + sa.UniqueConstraint("name"), + ) + op.create_index( + op.f("ix_client_client_secret"), "client", ["client_secret"], unique=True + ) + op.create_table( + "compute_access", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("project_id", sa.Integer(), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("group_id", sa.Integer(), nullable=True), + sa.Column("provider_id", sa.Integer(), nullable=True), + sa.Column("instances", sa.Integer(), nullable=True), + sa.Column("cores", sa.Integer(), nullable=True), + sa.Column("ram", sa.BigInteger(), nullable=True), + sa.Column("floating_ips", sa.Integer(), nullable=True), + sa.Column( + "additional_info", postgresql.JSONB(astext_type=sa.Text()), nullable=True + ), + sa.ForeignKeyConstraint(["group_id"], ["Group.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["provider_id"], ["cloud_provider.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "ga4gh_visa_v1", + sa.Column("id", sa.BigInteger(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("ga4gh_visa", sa.Text(), nullable=False), + sa.Column("source", sa.String(), nullable=False), + sa.Column("type", sa.String(), nullable=False), + sa.Column("asserted", sa.BigInteger(), nullable=False), + sa.Column("expires", sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "google_proxy_group_to_google_bucket_access_group", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("proxy_group_id", sa.String(), nullable=False), + sa.Column("access_group_id", sa.Integer(), nullable=False), + sa.Column("expires", sa.BigInteger(), nullable=True), + sa.ForeignKeyConstraint( + ["access_group_id"], ["google_bucket_access_group.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["proxy_group_id"], ["google_proxy_group.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "google_service_account", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("google_unique_id", sa.String(), nullable=False), + sa.Column("client_id", sa.String(length=40), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("google_project_id", sa.String(), nullable=False), + sa.Column("email", sa.String(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + ) + op.create_table( + "hmac_keypair", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("access_key", sa.String(), nullable=True), + sa.Column("secret_key", sa.String(), nullable=True), + sa.Column("timestamp", sa.DateTime(), nullable=False), + sa.Column("expire", sa.Integer(), nullable=True), + sa.Column("active", sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "hmac_keypair_archive", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("access_key", sa.String(), nullable=True), + sa.Column("secret_key", sa.String(), nullable=True), + sa.Column("timestamp", sa.DateTime(), nullable=False), + sa.Column("expire", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "iss_sub_pair_to_user", + sa.Column("iss", sa.String(), nullable=False), + sa.Column("sub", sa.String(), nullable=False), + sa.Column("fk_to_User", sa.Integer(), nullable=False), + sa.Column( + "extra_info", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'"), + nullable=True, + ), + sa.ForeignKeyConstraint(["fk_to_User"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("iss", "sub"), + ) + op.create_table( + "s3credential", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("access_key", sa.String(), nullable=True), + sa.Column("timestamp", sa.DateTime(), nullable=False), + sa.Column("expire", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "service_account_to_google_bucket_access_group", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("service_account_id", sa.Integer(), nullable=False), + sa.Column("expires", sa.BigInteger(), nullable=True), + sa.Column("access_group_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["access_group_id"], ["google_bucket_access_group.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["service_account_id"], ["user_service_account.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "storage_access", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("project_id", sa.Integer(), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("group_id", sa.Integer(), nullable=True), + sa.Column("provider_id", sa.Integer(), nullable=True), + sa.Column("max_objects", sa.BigInteger(), nullable=True), + sa.Column("max_size", sa.BigInteger(), nullable=True), + sa.Column("max_buckets", sa.Integer(), nullable=True), + sa.Column( + "additional_info", postgresql.JSONB(astext_type=sa.Text()), nullable=True + ), + sa.CheckConstraint( + "user_id is NULL or group_id is NULL or project_id is NULL", + name="check_storage_subject", + ), + sa.ForeignKeyConstraint(["group_id"], ["Group.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["provider_id"], ["cloud_provider.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "tag", + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("key", sa.String(), nullable=False), + sa.Column("value", sa.String(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("user_id", "key"), + ) + op.create_table( + "upstream_refresh_token", + sa.Column("id", sa.BigInteger(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("refresh_token", sa.Text(), nullable=False), + sa.Column("expires", sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "user_google_account", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("email", sa.String(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + ) + op.create_table( + "user_to_bucket", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("bucket_id", sa.Integer(), nullable=True), + sa.Column("privilege", postgresql.ARRAY(sa.String()), nullable=True), + sa.ForeignKeyConstraint(["bucket_id"], ["bucket.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "user_to_group", + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("group_id", sa.Integer(), nullable=False), + sa.Column("roles", postgresql.ARRAY(sa.String()), nullable=True), + sa.ForeignKeyConstraint(["group_id"], ["Group.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["user_id"], ["User.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("user_id", "group_id"), + ) + op.create_table( + "certificate", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("application_id", sa.Integer(), nullable=True), + sa.Column("name", sa.String(length=40), nullable=True), + sa.Column("extension", sa.String(), nullable=True), + sa.Column("data", sa.LargeBinary(), nullable=True), + sa.ForeignKeyConstraint( + ["application_id"], ["application.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "google_service_account_key", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("key_id", sa.String(), nullable=False), + sa.Column("service_account_id", sa.Integer(), nullable=False), + sa.Column("expires", sa.BigInteger(), nullable=True), + sa.Column("private_key", sa.String(), nullable=True), + sa.ForeignKeyConstraint( + ["service_account_id"], ["google_service_account.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "user_google_account_to_proxy_group", + sa.Column("user_google_account_id", sa.Integer(), nullable=False), + sa.Column("proxy_group_id", sa.String(), nullable=False), + sa.Column("expires", sa.BigInteger(), nullable=True), + sa.ForeignKeyConstraint( + ["proxy_group_id"], ["google_proxy_group.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["user_google_account_id"], ["user_google_account.id"], ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("user_google_account_id", "proxy_group_id"), + ) + + +def downgrade(): + op.drop_table("user_google_account_to_proxy_group") + op.drop_table("google_service_account_key") + op.drop_table("certificate") + op.drop_table("user_to_group") + op.drop_table("user_to_bucket") + op.drop_table("user_google_account") + op.drop_table("upstream_refresh_token") + op.drop_table("tag") + op.drop_table("storage_access") + op.drop_table("service_account_to_google_bucket_access_group") + op.drop_table("s3credential") + op.drop_table("iss_sub_pair_to_user") + op.drop_table("hmac_keypair_archive") + op.drop_table("hmac_keypair") + op.drop_table("google_service_account") + op.drop_table("google_proxy_group_to_google_bucket_access_group") + op.drop_table("ga4gh_visa_v1") + op.drop_table("compute_access") + op.drop_index(op.f("ix_client_client_secret"), table_name="client") + op.drop_table("client") + op.drop_table("authorization_code") + op.drop_table("application") + op.drop_index( + "unique_user_project_id", + table_name="access_privilege", + postgresql_where=sa.text("group_id is NULL"), + ) + op.drop_index( + "unique_user_group_id", + table_name="access_privilege", + postgresql_where=sa.text("project_id is NULL"), + ) + op.drop_index( + "unique_group_project_id", + table_name="access_privilege", + postgresql_where=sa.text("user_id is NULL"), + ) + op.drop_table("access_privilege") + op.drop_table("project_to_bucket") + op.drop_table("google_bucket_access_group") + op.drop_table("User") + op.drop_table("service_account_access_privilege") + op.drop_table("department") + op.drop_table("bucket") + op.drop_table("user_service_account") + op.drop_table("user_refresh_token") + op.drop_table("user_audit_logs") + op.drop_table("project") + op.drop_table("organization") + op.drop_table("identity_provider") + op.drop_table("google_proxy_group") + op.drop_table("gcp_assume_role_cache") + op.drop_table("ga4gh_passport_cache") + op.drop_table("event_log") + op.drop_table("cloud_provider") + op.drop_table("cert_audit_logs") + op.drop_table("blacklisted_token") + op.drop_table("authorization_provider") + op.drop_table("assume_role_cache") + op.drop_table("Group") diff --git a/poetry.lock b/poetry.lock index 53de10e81e..275b44c6e8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6,6 +6,23 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "alembic" +version = "1.7.7" +description = "A database migration tool for SQLAlchemy." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} +importlib-resources = {version = "*", markers = "python_version < \"3.9\""} +Mako = "*" +SQLAlchemy = ">=1.3.0" + +[package.extras] +tz = ["python-dateutil"] + [[package]] name = "aniso8601" version = "9.0.1" @@ -808,6 +825,21 @@ docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] perf = ["ipython"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +[[package]] +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] + [[package]] name = "isodate" version = "0.6.1" @@ -849,6 +881,21 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "mako" +version = "1.1.6" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["babel"] +lingua = ["lingua"] + [[package]] name = "markdown" version = "3.3.7" @@ -1506,13 +1553,17 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = "^3.6" -content-hash = "5c0ae6cc529d940e7f68498a5849ab702ab629c4a67a75b7043f40fed8121976" +content-hash = "070595cfae2149845b7c6442897a0d482a45c5e250b2c8d7520c575a6be16681" [metadata.files] addict = [ {file = "addict-2.4.0-py3-none-any.whl", hash = "sha256:249bb56bbfd3cdc2a004ea0ff4c2b6ddc84d53bc2194761636eb314d5cfa5dfc"}, {file = "addict-2.4.0.tar.gz", hash = "sha256:b3b2210e0e067a281f5646c8c5db92e99b7231ea8b0eb5f74dbdf9e259d4e494"}, ] +alembic = [ + {file = "alembic-1.7.7-py3-none-any.whl", hash = "sha256:29be0856ec7591c39f4e1cb10f198045d890e6e2274cf8da80cb5e721a09642b"}, + {file = "alembic-1.7.7.tar.gz", hash = "sha256:4961248173ead7ce8a21efb3de378f13b8398e6630fab0eb258dc74a8af24c58"}, +] aniso8601 = [ {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, @@ -1964,6 +2015,10 @@ importlib-metadata = [ {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, ] +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, +] isodate = [ {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, @@ -1980,6 +2035,10 @@ jmespath = [ {file = "jmespath-0.9.2-py2.py3-none-any.whl", hash = "sha256:3f03b90ac8e0f3ba472e8ebff083e460c89501d8d41979771535efe9a343177e"}, {file = "jmespath-0.9.2.tar.gz", hash = "sha256:54c441e2e08b23f12d7fa7d8e6761768c47c969e6aed10eead57505ba760aee9"}, ] +mako = [ + {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, + {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, +] markdown = [ {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, diff --git a/pyproject.toml b/pyproject.toml index 79b32d32eb..53e38668a7 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,7 @@ include = [ [tool.poetry.dependencies] python = "^3.6" +alembic = "^1.7.7" authlib = "^0.11" bcrypt = "^3.1.4" boto3 = "~1.9.91" diff --git a/run.py b/run.py index 794bfd32c6..c75ed627c5 100644 --- a/run.py +++ b/run.py @@ -1,6 +1,9 @@ -from fence import app, app_init, config import argparse +from alembic.config import main as alembic_main + +from fence import app, app_init, config + parser = argparse.ArgumentParser() parser.add_argument( "-c", @@ -25,6 +28,9 @@ patcher = patch("fence.resources.storage.get_client", get_client) patcher.start() +if config.get("ENABLE_DB_MIGRATION"): + alembic_main(["--raiseerr", "upgrade", "head"]) + app_init(app, config_path=args.config_path, config_file_name=args.config_file_name) app.run(debug=True, port=8000) diff --git a/tests/conftest.py b/tests/conftest.py index d1170b8f10..92b6ce782c 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,6 +17,7 @@ import string from addict import Dict +from alembic.config import main as alembic_main from authutils.testing.fixtures import ( _hazmat_rsa_private_key, _hazmat_rsa_private_key_2, @@ -33,7 +34,6 @@ import pytest import requests from sqlalchemy.ext.compiler import compiles -from sqlalchemy.schema import DropTable # Set FENCE_CONFIG_PATH *before* loading the configuration CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) @@ -55,11 +55,6 @@ from tests.utils.oauth2.client import OAuth2TestClient -@compiles(DropTable, "postgresql") -def _compile_drop_table(element, compiler, **kwargs): - return compiler.visit_drop_table(element) + " CASCADE" - - # Allow authlib to use HTTP for local testing. os.environ["AUTHLIB_INSECURE_TRANSPORT"] = "true" @@ -453,6 +448,10 @@ def app(kid, rsa_private_key, rsa_public_key): config_path=os.path.join(root_dir, "test-fence-config.yaml"), ) + # migrate the database to the latest version + os.environ["TEST_CONFIG_PATH"] = os.path.join(root_dir, "test-fence-config.yaml") + alembic_main(["--raiseerr", "upgrade", "head"]) + # We want to set up the keys so that the test application can load keys # from the test keys directory, but the default keypair used will be the # one using the fixtures. So, stick the keypair at the front of the @@ -472,6 +471,7 @@ def app(kid, rsa_private_key, rsa_public_key): yield fence.app + alembic_main(["--raiseerr", "downgrade", "base"]) mocker.unmock_functions() @@ -536,7 +536,10 @@ def db(app, request): """ def drop_all(): - models.Base.metadata.drop_all(app.db.engine) + connection = app.db.engine.connect() + connection.begin() + for table in reversed(models.Base.metadata.sorted_tables): + connection.execute(table.delete()) request.addfinalizer(drop_all)