diff --git a/resultsdb/__init__.py b/resultsdb/__init__.py index 1188a7d..b4a716e 100644 --- a/resultsdb/__init__.py +++ b/resultsdb/__init__.py @@ -42,7 +42,7 @@ # Flask App app = Flask(__name__) -app.secret_key = 'replace-me-with-something-random' +app.secret_key = "replace-me-with-something-random" # make sure app behaves when behind a proxy app.wsgi_app = proxy.ReverseProxied(app.wsgi_app) @@ -51,22 +51,23 @@ original_jsonify = flask.jsonify # Expose the __version__ variable in templates -app.jinja_env.globals['app_version'] = __version__ +app.jinja_env.globals["app_version"] = __version__ def jsonify_with_jsonp(*args, **kwargs): response = original_jsonify(*args, **kwargs) - callback = flask.request.args.get('callback', None) + callback = flask.request.args.get("callback", None) if callback: if not isinstance(callback, basestring): callback = callback[0] - response.mimetype = 'application/javascript' - response.set_data('%s(%s);' % (callback, response.get_data())) + response.mimetype = "application/javascript" + response.set_data("%s(%s);" % (callback, response.get_data())) return response + flask.jsonify = jsonify_with_jsonp # Checks for env variable OPENSHIFT_PROD to trigger OpenShift codepath on init @@ -74,47 +75,47 @@ def jsonify_with_jsonp(*args, **kwargs): # Possible values are: # "1" - OpenShift production deployment # "0" - OpenShift testing deployment -openshift = os.getenv('OPENSHIFT_PROD') +openshift = os.getenv("OPENSHIFT_PROD") # Load default config, then override that with a config file -if os.getenv('DEV') == 'true': - default_config_obj = 'resultsdb.config.DevelopmentConfig' - default_config_file = os.getcwd() + '/conf/settings.py' -elif os.getenv('TEST') == 'true' or openshift == "0": - default_config_obj = 'resultsdb.config.TestingConfig' - default_config_file = '' +if os.getenv("DEV") == "true": + default_config_obj = "resultsdb.config.DevelopmentConfig" + default_config_file = os.getcwd() + "/conf/settings.py" +elif os.getenv("TEST") == "true" or openshift == "0": + default_config_obj = "resultsdb.config.TestingConfig" + default_config_file = "" else: - default_config_obj = 'resultsdb.config.ProductionConfig' - default_config_file = '/etc/resultsdb/settings.py' + default_config_obj = "resultsdb.config.ProductionConfig" + default_config_file = "/etc/resultsdb/settings.py" app.config.from_object(default_config_obj) if openshift: config.openshift_config(app.config, openshift) -config_file = os.environ.get('RESULTSDB_CONFIG', default_config_file) +config_file = os.environ.get("RESULTSDB_CONFIG", default_config_file) if os.path.exists(config_file): app.config.from_pyfile(config_file) -if app.config['PRODUCTION']: - if app.secret_key == 'replace-me-with-something-random': +if app.config["PRODUCTION"]: + if app.secret_key == "replace-me-with-something-random": raise Warning("You need to change the app.secret_key value for production") def setup_logging(): # Use LOGGING if defined instead of the old options - log_config = app.config.get('LOGGING') + log_config = app.config.get("LOGGING") if log_config: logging_config.dictConfig(log_config) return - fmt = '[%(filename)s:%(lineno)d] ' if app.debug else '%(module)-12s ' - fmt += '%(asctime)s %(levelname)-7s %(message)s' - datefmt = '%Y-%m-%d %H:%M:%S' + fmt = "[%(filename)s:%(lineno)d] " if app.debug else "%(module)-12s " + fmt += "%(asctime)s %(levelname)-7s %(message)s" + datefmt = "%Y-%m-%d %H:%M:%S" loglevel = logging.DEBUG if app.debug else logging.INFO formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) - root_logger = logging.getLogger('') + root_logger = logging.getLogger("") root_logger.setLevel(logging.DEBUG) # Keep the old way to setup logging in settings.py or config.py, example: @@ -122,7 +123,7 @@ def setup_logging(): # FILE_LOGGING = False # SYSLOG_LOGGING = False # STREAM_LOGGING = True - if app.config['STREAM_LOGGING']: + if app.config["STREAM_LOGGING"]: print("doing stream logging") stream_handler = logging.StreamHandler() stream_handler.setLevel(loglevel) @@ -130,19 +131,21 @@ def setup_logging(): root_logger.addHandler(stream_handler) app.logger.addHandler(stream_handler) - if app.config['SYSLOG_LOGGING']: + if app.config["SYSLOG_LOGGING"]: print("doing syslog logging") - syslog_handler = logging.handlers.SysLogHandler(address='/dev/log', - facility=logging.handlers.SysLogHandler.LOG_LOCAL4) + syslog_handler = logging.handlers.SysLogHandler( + address="/dev/log", facility=logging.handlers.SysLogHandler.LOG_LOCAL4 + ) syslog_handler.setLevel(loglevel) syslog_handler.setFormatter(formatter) root_logger.addHandler(syslog_handler) app.logger.addHandler(syslog_handler) - if app.config['FILE_LOGGING'] and app.config['LOGFILE']: - print("doing file logging to %s" % app.config['LOGFILE']) + if app.config["FILE_LOGGING"] and app.config["LOGFILE"]: + print("doing file logging to %s" % app.config["LOGFILE"]) file_handler = logging.handlers.RotatingFileHandler( - app.config['LOGFILE'], maxBytes=500000, backupCount=5) + app.config["LOGFILE"], maxBytes=500000, backupCount=5 + ) file_handler.setLevel(loglevel) file_handler.setFormatter(formatter) root_logger.addHandler(file_handler) @@ -151,33 +154,37 @@ def setup_logging(): setup_logging() -if app.config['SHOW_DB_URI']: - app.logger.debug('using DBURI: %s' % app.config['SQLALCHEMY_DATABASE_URI']) +if app.config["SHOW_DB_URI"]: + app.logger.debug("using DBURI: %s" % app.config["SQLALCHEMY_DATABASE_URI"]) db = SQLAlchemy(app) -from resultsdb.controllers.main import main +from resultsdb.controllers.main import main # noqa: E402 + app.register_blueprint(main) -from resultsdb.controllers.api_v2 import api as api_v2 +from resultsdb.controllers.api_v2 import api as api_v2 # noqa: E402 + app.register_blueprint(api_v2, url_prefix="/api/v2.0") -from resultsdb.controllers.api_v3 import api as api_v3, oidc +from resultsdb.controllers.api_v3 import api as api_v3, oidc # noqa: E402 + app.register_blueprint(api_v3, url_prefix="/api/v3") -if app.config['AUTH_MODULE'] == 'oidc': +if app.config["AUTH_MODULE"] == "oidc": + @app.route("/auth/oidclogin") @oidc.require_login def login(): return { - 'username': oidc.user_getfield(app.config["OIDC_USERNAME_FIELD"]), - 'token': oidc.get_access_token(), + "username": oidc.user_getfield(app.config["OIDC_USERNAME_FIELD"]), + "token": oidc.get_access_token(), } oidc.init_app(app) app.oidc = oidc - app.logger.info('OpenIDConnect authentication is enabled') + app.logger.info("OpenIDConnect authentication is enabled") else: - app.logger.info('OpenIDConnect authentication is disabled') + app.logger.info("OpenIDConnect authentication is disabled") app.logger.debug("Finished ResultsDB initialization") diff --git a/resultsdb/alembic/env.py b/resultsdb/alembic/env.py index 02d1cac..1fd3cc5 100644 --- a/resultsdb/alembic/env.py +++ b/resultsdb/alembic/env.py @@ -5,7 +5,8 @@ # add '.' to the pythonpath to support migration inside development env import sys -sys.path.append('.') + +sys.path.append(".") # this is the Alembic Config object, which provides # access to the values within the .ini file in use. @@ -13,13 +14,14 @@ # Interpret the config file for Python logging. # This line sets up loggers basically. -#fileConfig(config.config_file_name) +# fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support from resultsdb import db + target_metadata = db.metadata -#target_metadata = None +# target_metadata = None # other values from the config, defined by the needs of env.py, # can be acquired: @@ -56,18 +58,13 @@ def run_migrations_online(): alembic_config = config.get_section(config.config_ini_section) from resultsdb import app - alembic_config['sqlalchemy.url'] = app.config['SQLALCHEMY_DATABASE_URI'] - engine = engine_from_config( - alembic_config, - prefix='sqlalchemy.', - poolclass=pool.NullPool) + alembic_config["sqlalchemy.url"] = app.config["SQLALCHEMY_DATABASE_URI"] + + engine = engine_from_config(alembic_config, prefix="sqlalchemy.", poolclass=pool.NullPool) connection = engine.connect() - context.configure( - connection=connection, - target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) try: with context.begin_transaction(): @@ -75,6 +72,7 @@ def run_migrations_online(): finally: connection.close() + if context.is_offline_mode(): run_migrations_offline() else: diff --git a/resultsdb/alembic/versions/153c416322c2_create_indexes_on_foreign_keys.py b/resultsdb/alembic/versions/153c416322c2_create_indexes_on_foreign_keys.py index a9e8ba0..b983c0a 100644 --- a/resultsdb/alembic/versions/153c416322c2_create_indexes_on_foreign_keys.py +++ b/resultsdb/alembic/versions/153c416322c2_create_indexes_on_foreign_keys.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '153c416322c2' -down_revision = '17ec41bd6e9a' +revision = "153c416322c2" +down_revision = "17ec41bd6e9a" branch_labels = None depends_on = None @@ -18,15 +18,15 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_index('result_fk_job_id', 'result', ['job_id'], unique=False) - op.create_index('result_fk_testcase_id', 'result', ['testcase_id'], unique=False) - op.create_index('result_data_fk_result_id', 'result_data', ['result_id'], unique=False) + op.create_index("result_fk_job_id", "result", ["job_id"], unique=False) + op.create_index("result_fk_testcase_id", "result", ["testcase_id"], unique=False) + op.create_index("result_data_fk_result_id", "result_data", ["result_id"], unique=False) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_index('result_data_fk_result_id', table_name='result_data') - op.drop_index('result_fk_testcase_id', table_name='result') - op.drop_index('result_fk_job_id', table_name='result') + op.drop_index("result_data_fk_result_id", table_name="result_data") + op.drop_index("result_fk_testcase_id", table_name="result") + op.drop_index("result_fk_job_id", table_name="result") ### end Alembic commands ### diff --git a/resultsdb/alembic/versions/15f5eeb9f635_initial_revision.py b/resultsdb/alembic/versions/15f5eeb9f635_initial_revision.py index 6995830..92dfb98 100644 --- a/resultsdb/alembic/versions/15f5eeb9f635_initial_revision.py +++ b/resultsdb/alembic/versions/15f5eeb9f635_initial_revision.py @@ -7,7 +7,7 @@ """ # revision identifiers, used by Alembic. -revision = '15f5eeb9f635' +revision = "15f5eeb9f635" down_revision = None branch_labels = None depends_on = None diff --git a/resultsdb/alembic/versions/17ec41bd6e9a_added_uuid_column_to_the_job_table.py b/resultsdb/alembic/versions/17ec41bd6e9a_added_uuid_column_to_the_job_table.py index 24a7a29..ffe0f3e 100644 --- a/resultsdb/alembic/versions/17ec41bd6e9a_added_uuid_column_to_the_job_table.py +++ b/resultsdb/alembic/versions/17ec41bd6e9a_added_uuid_column_to_the_job_table.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '17ec41bd6e9a' -down_revision = '433d0b5b3b96' +revision = "17ec41bd6e9a" +down_revision = "433d0b5b3b96" branch_labels = None depends_on = None @@ -18,11 +18,11 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.add_column('job', sa.Column('uuid', sa.String(length=36), nullable=True)) + op.add_column("job", sa.Column("uuid", sa.String(length=36), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_column('job', 'uuid') + op.drop_column("job", "uuid") ### end Alembic commands ### diff --git a/resultsdb/alembic/versions/34760e10040b_add_aborted_outcome.py b/resultsdb/alembic/versions/34760e10040b_add_aborted_outcome.py index bfffd65..c7c60e0 100644 --- a/resultsdb/alembic/versions/34760e10040b_add_aborted_outcome.py +++ b/resultsdb/alembic/versions/34760e10040b_add_aborted_outcome.py @@ -7,32 +7,36 @@ """ # revision identifiers, used by Alembic. -revision = '34760e10040b' -down_revision = '4ace44a44bf' +revision = "34760e10040b" +down_revision = "4ace44a44bf" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa -old_values = ('PASSED', 'INFO', 'FAILED', 'ERROR', 'WAIVED', 'NEEDS_INSPECTION') -new_values = ('PASSED', 'INFO', 'FAILED', 'ERROR', 'WAIVED', 'NEEDS_INSPECTION', 'ABORTED') +old_values = ("PASSED", "INFO", "FAILED", "ERROR", "WAIVED", "NEEDS_INSPECTION") +new_values = ("PASSED", "INFO", "FAILED", "ERROR", "WAIVED", "NEEDS_INSPECTION", "ABORTED") -old_enum = sa.Enum(*old_values, name='resultoutcome') -tmp_enum = sa.Enum(*new_values, name='resultoutcome_tmp') -new_enum = sa.Enum(*new_values, name='resultoutcome') +old_enum = sa.Enum(*old_values, name="resultoutcome") +tmp_enum = sa.Enum(*new_values, name="resultoutcome_tmp") +new_enum = sa.Enum(*new_values, name="resultoutcome") def upgrade(): # this migration is postgresql specific and fails on sqlite if op.get_bind().engine.url.drivername.startswith("postgresql"): tmp_enum.create(op.get_bind(), checkfirst=False) - op.execute('ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp ' - ' USING outcome::text::resultoutcome_tmp') + op.execute( + "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp " + " USING outcome::text::resultoutcome_tmp" + ) old_enum.drop(op.get_bind(), checkfirst=False) new_enum.create(op.get_bind(), checkfirst=False) - op.execute('ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome ' - ' USING outcome::text::resultoutcome') + op.execute( + "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome " + " USING outcome::text::resultoutcome" + ) tmp_enum.drop(op.get_bind(), checkfirst=False) @@ -42,10 +46,14 @@ def downgrade(): op.execute("UPDATE result SET outcome='ERROR' WHERE outcome='ABORTED'") tmp_enum.create(op.get_bind(), checkfirst=False) - op.execute('ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp ' - ' USING outcome::text::resultoutcome_tmp') + op.execute( + "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp " + " USING outcome::text::resultoutcome_tmp" + ) new_enum.drop(op.get_bind(), checkfirst=False) old_enum.create(op.get_bind(), checkfirst=False) - op.execute('ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome ' - ' USING outcome::text::resultoutcome') + op.execute( + "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome " + " USING outcome::text::resultoutcome" + ) tmp_enum.drop(op.get_bind(), checkfirst=False) diff --git a/resultsdb/alembic/versions/433d0b5b3b96_added_index_on_the_keyval_store.py b/resultsdb/alembic/versions/433d0b5b3b96_added_index_on_the_keyval_store.py index 6b98244..7a024b6 100644 --- a/resultsdb/alembic/versions/433d0b5b3b96_added_index_on_the_keyval_store.py +++ b/resultsdb/alembic/versions/433d0b5b3b96_added_index_on_the_keyval_store.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '433d0b5b3b96' -down_revision = '15f5eeb9f635' +revision = "433d0b5b3b96" +down_revision = "15f5eeb9f635" branch_labels = None depends_on = None @@ -17,12 +17,17 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_index('rd_key_value_idx', 'result_data', [ - 'key', 'value'], unique=False, mysql_length={'value': 50, 'key': 20}) + op.create_index( + "rd_key_value_idx", + "result_data", + ["key", "value"], + unique=False, + mysql_length={"value": 50, "key": 20}, + ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_index('rd_key_value_idx', table_name='result_data') + op.drop_index("rd_key_value_idx", table_name="result_data") ### end Alembic commands ### diff --git a/resultsdb/alembic/versions/4ace44a44bf_change_index_on_result_data_so_like_can_.py b/resultsdb/alembic/versions/4ace44a44bf_change_index_on_result_data_so_like_can_.py index 744dce2..1fc504d 100644 --- a/resultsdb/alembic/versions/4ace44a44bf_change_index_on_result_data_so_like_can_.py +++ b/resultsdb/alembic/versions/4ace44a44bf_change_index_on_result_data_so_like_can_.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4ace44a44bf' -down_revision = '153c416322c2' +revision = "4ace44a44bf" +down_revision = "153c416322c2" branch_labels = None depends_on = None @@ -17,14 +17,19 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_index('result_data_idx_key_value', 'result_data', ['key', 'value'], unique=False, - postgresql_ops={'value': 'text_pattern_ops', 'key': 'text_pattern_ops'}) - op.drop_index('rd_key_value_idx', table_name='result_data') + op.create_index( + "result_data_idx_key_value", + "result_data", + ["key", "value"], + unique=False, + postgresql_ops={"value": "text_pattern_ops", "key": "text_pattern_ops"}, + ) + op.drop_index("rd_key_value_idx", table_name="result_data") ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_index('rd_key_value_idx', 'result_data', ['key', 'value'], unique=False) - op.drop_index('result_data_idx_key_value', table_name='result_data') + op.create_index("rd_key_value_idx", "result_data", ["key", "value"], unique=False) + op.drop_index("result_data_idx_key_value", table_name="result_data") ### end Alembic commands ### diff --git a/resultsdb/alembic/versions/4bf1390f06d1_added_index_on_submit_time.py b/resultsdb/alembic/versions/4bf1390f06d1_added_index_on_submit_time.py index cc132e1..9b164dc 100644 --- a/resultsdb/alembic/versions/4bf1390f06d1_added_index_on_submit_time.py +++ b/resultsdb/alembic/versions/4bf1390f06d1_added_index_on_submit_time.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '4bf1390f06d1' -down_revision = '34760e10040b' +revision = "4bf1390f06d1" +down_revision = "34760e10040b" branch_labels = None depends_on = None @@ -18,11 +18,11 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_index('result_submit_time', 'result', ['submit_time'], unique=False) + op.create_index("result_submit_time", "result", ["submit_time"], unique=False) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_index('result_submit_time', table_name='result') + op.drop_index("result_submit_time", table_name="result") ### end Alembic commands ### diff --git a/resultsdb/alembic/versions/4dbe714897fe_remove_the_user_model.py b/resultsdb/alembic/versions/4dbe714897fe_remove_the_user_model.py index b804388..6c8d9c7 100644 --- a/resultsdb/alembic/versions/4dbe714897fe_remove_the_user_model.py +++ b/resultsdb/alembic/versions/4dbe714897fe_remove_the_user_model.py @@ -9,22 +9,22 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '4dbe714897fe' -down_revision = 'dbfab576c81' +revision = "4dbe714897fe" +down_revision = "dbfab576c81" branch_labels = None depends_on = None def upgrade(): - op.drop_table('user') + op.drop_table("user") def downgrade(): op.create_table( - 'user', - sa.Column('id', sa.INTEGER(), nullable=False), - sa.Column('username', sa.VARCHAR(length=80), nullable=True), - sa.Column('pw_hash', sa.VARCHAR(length=120), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('username') + "user", + sa.Column("id", sa.INTEGER(), nullable=False), + sa.Column("username", sa.VARCHAR(length=80), nullable=True), + sa.Column("pw_hash", sa.VARCHAR(length=120), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), ) diff --git a/resultsdb/alembic/versions/540dbe71fa91_change_schema_to_v2_0_step_1.py b/resultsdb/alembic/versions/540dbe71fa91_change_schema_to_v2_0_step_1.py index ff217f0..c11cd9a 100644 --- a/resultsdb/alembic/versions/540dbe71fa91_change_schema_to_v2_0_step_1.py +++ b/resultsdb/alembic/versions/540dbe71fa91_change_schema_to_v2_0_step_1.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '540dbe71fa91' -down_revision = '978007ecd2b' +revision = "540dbe71fa91" +down_revision = "978007ecd2b" branch_labels = None depends_on = None @@ -24,38 +24,46 @@ class Job(Base): - __tablename__ = 'job' + __tablename__ = "job" id = sa.Column(sa.Integer, primary_key=True) uuid = sa.Column(sa.String(36), unique=True) - results = relation('Result', backref='job') + results = relation("Result", backref="job") class Result(Base): - __tablename__ = 'result' + __tablename__ = "result" id = sa.Column(sa.Integer, primary_key=True) - job_id = sa.Column(sa.Integer, sa.ForeignKey('job.id')) + job_id = sa.Column(sa.Integer, sa.ForeignKey("job.id")) def upgrade(): # Merge duplicate Jobs - logger = logging.getLogger('alembic') + logger = logging.getLogger("alembic") connection = op.get_bind() session = Session(bind=connection) merge_targets = {} jobs_to_delete = [] - job_count_query = connection.execute("select count(*) from job where uuid in (select uuid from job group by uuid having count(uuid) > 1);") + job_count_query = connection.execute( + "select count(*) from job where uuid in (select uuid from job group by uuid having count(uuid) > 1);" + ) job_count = -1 for row in job_count_query: job_count = row[0] logger.info("Jobs marked for inspection: %s", job_count) - job_query = session.query(Job).from_statement(text( - "select id, uuid from job where uuid in (select uuid from job group by uuid having count(uuid) > 1) order by id;") - ).yield_per(100) + job_query = ( + session.query(Job) + .from_statement( + text( + "select id, uuid from job where uuid in (select uuid from job group by uuid having count(uuid) > 1) order by id;" + ) + ) + .yield_per(100) + ) j = r = 0 for job in job_query: @@ -80,79 +88,93 @@ def upgrade(): logger.info("Changing table structure") # JOB - op.rename_table('job', 'group') - op.alter_column('group', 'name', new_column_name='description') - op.drop_column(u'group', 'status') - op.drop_column(u'group', 'start_time') - op.drop_column(u'group', 'end_time') - op.create_unique_constraint(None, 'group', ['uuid']) + op.rename_table("job", "group") + op.alter_column("group", "name", new_column_name="description") + op.drop_column("group", "status") + op.drop_column("group", "start_time") + op.drop_column("group", "end_time") + op.create_unique_constraint(None, "group", ["uuid"]) op.create_index( - 'group_idx_uuid', 'group', ['uuid'], unique=False, postgresql_ops={'uuid': 'text_pattern_ops'}) + "group_idx_uuid", + "group", + ["uuid"], + unique=False, + postgresql_ops={"uuid": "text_pattern_ops"}, + ) # RESULT - op.add_column( - u'result', sa.Column('testcase_name', sa.Text(), nullable=True)) - op.alter_column('result', 'summary', new_column_name='note') - op.alter_column('result', 'log_url', new_column_name='ref_url') - op.create_index('result_fk_testcase_name', 'result', [ - 'testcase_name'], unique=False, postgresql_ops={'testcase_name': 'text_pattern_ops'}) - op.drop_index('result_fk_job_id', table_name='result') - op.drop_index('result_fk_testcase_id', table_name='result') - op.drop_constraint( - u'result_testcase_id_fkey', 'result', type_='foreignkey') - op.drop_constraint(u'result_job_id_fkey', 'result', type_='foreignkey') - op.create_foreign_key( - None, 'result', 'testcase', ['testcase_name'], ['name']) + op.add_column("result", sa.Column("testcase_name", sa.Text(), nullable=True)) + op.alter_column("result", "summary", new_column_name="note") + op.alter_column("result", "log_url", new_column_name="ref_url") + op.create_index( + "result_fk_testcase_name", + "result", + ["testcase_name"], + unique=False, + postgresql_ops={"testcase_name": "text_pattern_ops"}, + ) + op.drop_index("result_fk_job_id", table_name="result") + op.drop_index("result_fk_testcase_id", table_name="result") + op.drop_constraint("result_testcase_id_fkey", "result", type_="foreignkey") + op.drop_constraint("result_job_id_fkey", "result", type_="foreignkey") + op.create_foreign_key(None, "result", "testcase", ["testcase_name"], ["name"]) # TESTCASE - op.alter_column('testcase', 'url', new_column_name='ref_url') + op.alter_column("testcase", "url", new_column_name="ref_url") # MANY TO MANY - op.create_table('groups_to_results', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('group_uuid', sa.String(36), nullable=True), - sa.Column('result_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['group_uuid'], ['group.uuid'], ), - sa.ForeignKeyConstraint(['result_id'], ['result.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('gtr_fk_group_uuid', 'groups_to_results', [ - 'group_uuid'], unique=False, postgresql_ops={'uuid': 'text_pattern_ops'}) + op.create_table( + "groups_to_results", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("group_uuid", sa.String(36), nullable=True), + sa.Column("result_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["group_uuid"], + ["group.uuid"], + ), + sa.ForeignKeyConstraint( + ["result_id"], + ["result.id"], + ), + sa.PrimaryKeyConstraint("id"), + ) op.create_index( - 'gtr_fk_result_id', 'groups_to_results', ['result_id'], unique=False) + "gtr_fk_group_uuid", + "groups_to_results", + ["group_uuid"], + unique=False, + postgresql_ops={"uuid": "text_pattern_ops"}, + ) + op.create_index("gtr_fk_result_id", "groups_to_results", ["result_id"], unique=False) def downgrade(): # TESTCASE - op.alter_column('testcase', 'ref_url', new_column_name='url') + op.alter_column("testcase", "ref_url", new_column_name="url") # RESULT - op.alter_column('result', 'note', new_column_name='summary') - op.alter_column('result', 'ref_url', new_column_name='log_url') - op.drop_constraint( - 'result_testcase_name_fkey', 'result', type_='foreignkey') - op.create_index( - 'result_fk_testcase_id', 'result', ['testcase_id'], unique=False) - op.create_index('result_fk_job_id', 'result', ['job_id'], unique=False) - op.drop_index('result_fk_testcase_name', table_name='result') - op.drop_column(u'result', 'testcase_name') + op.alter_column("result", "note", new_column_name="summary") + op.alter_column("result", "ref_url", new_column_name="log_url") + op.drop_constraint("result_testcase_name_fkey", "result", type_="foreignkey") + op.create_index("result_fk_testcase_id", "result", ["testcase_id"], unique=False) + op.create_index("result_fk_job_id", "result", ["job_id"], unique=False) + op.drop_index("result_fk_testcase_name", table_name="result") + op.drop_column("result", "testcase_name") # JOB - op.rename_table('group', 'job') - op.alter_column('job', 'description', new_column_name='name') - op.add_column(u'job', sa.Column('end_time', sa.DateTime(), nullable=True)) - op.add_column( - u'job', sa.Column('start_time', sa.DateTime(), nullable=True)) - op.add_column( - u'job', sa.Column('status', sa.VARCHAR(length=16), nullable=True)) - op.drop_index('group_idx_uuid', table_name='job') + op.rename_table("group", "job") + op.alter_column("job", "description", new_column_name="name") + op.add_column("job", sa.Column("end_time", sa.DateTime(), nullable=True)) + op.add_column("job", sa.Column("start_time", sa.DateTime(), nullable=True)) + op.add_column("job", sa.Column("status", sa.VARCHAR(length=16), nullable=True)) + op.drop_index("group_idx_uuid", table_name="job") # MANY TO MANY - op.drop_index('gtr_fk_result_id', table_name='groups_to_results') - op.drop_index('gtr_fk_group_uuid', table_name='groups_to_results') - op.drop_table('groups_to_results') + op.drop_index("gtr_fk_result_id", table_name="groups_to_results") + op.drop_index("gtr_fk_group_uuid", table_name="groups_to_results") + op.drop_table("groups_to_results") # CONSTRAINTS - op.create_foreign_key(None, 'result', 'job', ['job_id'], ['id']) - op.create_foreign_key(None, 'result', 'testcase', ['testcase_id'], ['id']) - op.drop_constraint('group_uuid_key', 'job') + op.create_foreign_key(None, "result", "job", ["job_id"], ["id"]) + op.create_foreign_key(None, "result", "testcase", ["testcase_id"], ["id"]) + op.drop_constraint("group_uuid_key", "job") diff --git a/resultsdb/alembic/versions/978007ecd2b_changed_testcase_name_to_text.py b/resultsdb/alembic/versions/978007ecd2b_changed_testcase_name_to_text.py index a97aa94..390a545 100644 --- a/resultsdb/alembic/versions/978007ecd2b_changed_testcase_name_to_text.py +++ b/resultsdb/alembic/versions/978007ecd2b_changed_testcase_name_to_text.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = '978007ecd2b' -down_revision = '4bf1390f06d1' +revision = "978007ecd2b" +down_revision = "4bf1390f06d1" branch_labels = None depends_on = None @@ -18,14 +18,19 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.alter_column('testcase', 'name', type_=sa.Text) - op.create_index('testcase_idx_name', 'testcase', [ - 'name'], unique=False, postgresql_ops={'name': 'text_pattern_ops'}) + op.alter_column("testcase", "name", type_=sa.Text) + op.create_index( + "testcase_idx_name", + "testcase", + ["name"], + unique=False, + postgresql_ops={"name": "text_pattern_ops"}, + ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.alter_column('testcase', 'name', type_=sa.String(255)) - op.drop_index('testcase_idx_name', table_name='testcase') + op.alter_column("testcase", "name", type_=sa.String(255)) + op.drop_index("testcase_idx_name", table_name="testcase") ### end Alembic commands ### diff --git a/resultsdb/alembic/versions/cd581d0e83df_change_outcome_from_enum_to_string.py b/resultsdb/alembic/versions/cd581d0e83df_change_outcome_from_enum_to_string.py index 8c82026..7ad4ca1 100644 --- a/resultsdb/alembic/versions/cd581d0e83df_change_outcome_from_enum_to_string.py +++ b/resultsdb/alembic/versions/cd581d0e83df_change_outcome_from_enum_to_string.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'cd581d0e83df' -down_revision = '4dbe714897fe' +revision = "cd581d0e83df" +down_revision = "4dbe714897fe" branch_labels = None depends_on = None @@ -17,11 +17,18 @@ def upgrade(): - op.alter_column('result', 'outcome', type_=sa.String(32)) - op.create_index('result_idx_outcome', 'result', [ - 'outcome'], unique=False, postgresql_ops={'outcome': 'text_pattern_ops'}) + op.alter_column("result", "outcome", type_=sa.String(32)) + op.create_index( + "result_idx_outcome", + "result", + ["outcome"], + unique=False, + postgresql_ops={"outcome": "text_pattern_ops"}, + ) def downgrade(): - op.execute("ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome USING outcome::resultoutcome;") - op.drop_index('result_idx_outcome', table_name='result') + op.execute( + "ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome USING outcome::resultoutcome;" + ) + op.drop_index("result_idx_outcome", table_name="result") diff --git a/resultsdb/alembic/versions/dbfab576c81_change_schema_to_v2_0_step_2.py b/resultsdb/alembic/versions/dbfab576c81_change_schema_to_v2_0_step_2.py index 4351123..9e81031 100644 --- a/resultsdb/alembic/versions/dbfab576c81_change_schema_to_v2_0_step_2.py +++ b/resultsdb/alembic/versions/dbfab576c81_change_schema_to_v2_0_step_2.py @@ -7,8 +7,8 @@ """ # revision identifiers, used by Alembic. -revision = 'dbfab576c81' -down_revision = '540dbe71fa91' +revision = "dbfab576c81" +down_revision = "540dbe71fa91" branch_labels = None depends_on = None @@ -26,27 +26,29 @@ db.relationship = relationship db.relation = relation -RESULT_OUTCOME = ('PASSED', 'INFO', 'FAILED', 'NEEDS_INSPECTION') +RESULT_OUTCOME = ("PASSED", "INFO", "FAILED", "NEEDS_INSPECTION") JOB_STATUS = [] class GroupsToResults(Base): - __tablename__ = 'groups_to_results' + __tablename__ = "groups_to_results" id = db.Column(db.Integer, primary_key=True) - group_uuid = db.Column(db.String(36), db.ForeignKey('group.uuid')) - result_id = db.Column(db.Integer, db.ForeignKey('result.id')) + group_uuid = db.Column(db.String(36), db.ForeignKey("group.uuid")) + result_id = db.Column(db.Integer, db.ForeignKey("result.id")) class Group(Base): - __tablename__ = 'group' + __tablename__ = "group" id = db.Column(db.Integer, primary_key=True) uuid = db.Column(db.String(36), unique=True) + + # results = db.relationship("Result", secondary = 'groups_to_results', backref="groups") class Testcase(Base): - __tablename__ = 'testcase' + __tablename__ = "testcase" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.Text, unique=True) @@ -54,18 +56,18 @@ class Testcase(Base): def upgrade(): class Result(Base): - __tablename__ = 'result' + __tablename__ = "result" id = db.Column(db.Integer, primary_key=True) - job_id = db.Column(db.Integer, db.ForeignKey('group.id')) - testcase_id = db.Column(db.Integer, db.ForeignKey('testcase.id')) + job_id = db.Column(db.Integer, db.ForeignKey("group.id")) + testcase_id = db.Column(db.Integer, db.ForeignKey("testcase.id")) testcase_name = db.Column(db.Text) - groups = db.relationship("Group", secondary='groups_to_results', backref="results") - job = db.relation('Group') # , lazy = False) - testcase = db.relation('Testcase', backref='results') # , lazy = False) + groups = db.relationship("Group", secondary="groups_to_results", backref="results") + job = db.relation("Group") # , lazy = False) + testcase = db.relation("Testcase", backref="results") # , lazy = False) - logger = logging.getLogger('alembic') + logger = logging.getLogger("alembic") connection = op.get_bind() session = Session(bind=connection) i = 0 @@ -92,26 +94,27 @@ class Result(Base): logger.info("Final result commit") session.commit() logger.info("Removing the columns") - op.drop_column('result', 'testcase_id') - op.drop_column('result', 'job_id') + op.drop_column("result", "testcase_id") + op.drop_column("result", "job_id") def downgrade(): class Result(Base): - __tablename__ = 'result' + __tablename__ = "result" id = db.Column(db.Integer, primary_key=True) - job_id = db.Column(db.Integer, db.ForeignKey('group.id')) + job_id = db.Column(db.Integer, db.ForeignKey("group.id")) testcase_id = db.Column(db.Integer) - testcase_name = db.Column(db.Text, db.ForeignKey('testcase.name')) + testcase_name = db.Column(db.Text, db.ForeignKey("testcase.name")) - groups = db.relationship("Group", secondary='groups_to_results', backref="results") - job = db.relation('Group') # , lazy = False) - testcase = db.relation('Testcase', backref='results') # , lazy = False) + groups = db.relationship("Group", secondary="groups_to_results", backref="results") + job = db.relation("Group") # , lazy = False) + testcase = db.relation("Testcase", backref="results") # , lazy = False) - op.add_column('result', db.Column('job_id', db.INTEGER(), autoincrement=False, nullable=True)) - op.add_column('result', db.Column( - 'testcase_id', db.INTEGER(), autoincrement=False, nullable=True)) + op.add_column("result", db.Column("job_id", db.INTEGER(), autoincrement=False, nullable=True)) + op.add_column( + "result", db.Column("testcase_id", db.INTEGER(), autoincrement=False, nullable=True) + ) connection = op.get_bind() session = Session(bind=connection) diff --git a/resultsdb/authorization.py b/resultsdb/authorization.py index 5ded411..c52d934 100644 --- a/resultsdb/authorization.py +++ b/resultsdb/authorization.py @@ -36,8 +36,7 @@ def match_testcase_permissions(testcase, permissions): for permission in permissions: if "testcases" in permission: testcase_match = any( - fnmatch(testcase, testcase_pattern) - for testcase_pattern in permission["testcases"] + fnmatch(testcase, testcase_pattern) for testcase_pattern in permission["testcases"] ) elif "_testcase_regex_pattern" in permission: testcase_match = re.search(permission["_testcase_regex_pattern"], testcase) @@ -51,8 +50,7 @@ def match_testcase_permissions(testcase, permissions): def verify_authorization(user, testcase, permissions, ldap_host, ldap_searches): if not (ldap_host and ldap_searches): raise InternalServerError( - "LDAP_HOST and LDAP_SEARCHES also need to be defined " - "if PERMISSIONS is defined." + "LDAP_HOST and LDAP_SEARCHES also need to be defined " "if PERMISSIONS is defined." ) allowed_groups = [] @@ -64,9 +62,7 @@ def verify_authorization(user, testcase, permissions, ldap_host, ldap_searches): try: import ldap except ImportError: - raise InternalServerError( - "If PERMISSIONS is defined, python-ldap needs to be installed." - ) + raise InternalServerError("If PERMISSIONS is defined, python-ldap needs to be installed.") try: con = ldap.initialize(ldap_host) @@ -84,6 +80,4 @@ def verify_authorization(user, testcase, permissions, ldap_host, ldap_searches): if not any_groups_found: raise Unauthorized(f"Failed to find user {user} in LDAP") - raise Unauthorized( - f"You are not authorized to submit a result for the test case {testcase}" - ) + raise Unauthorized(f"You are not authorized to submit a result for the test case {testcase}") diff --git a/resultsdb/cli.py b/resultsdb/cli.py index 90cf658..7b26078 100644 --- a/resultsdb/cli.py +++ b/resultsdb/cli.py @@ -17,7 +17,6 @@ # Authors: # Josef Skladanka -import os import sys from optparse import OptionParser @@ -30,11 +29,12 @@ from sqlalchemy.engine import reflection + def get_alembic_config(): # the location of the alembic ini file and alembic scripts changes when # installed via package alembic_cfg = Config() - alembic_cfg.set_main_option('script_location', 'resultsdb:alembic') + alembic_cfg.set_main_option("script_location", "resultsdb:alembic") return alembic_cfg @@ -73,7 +73,7 @@ def initialize_db(destructive): # if it does, we assume that the database is empty insp = reflection.Inspector.from_engine(db.engine) table_names = insp.get_table_names() - if 'testcase' not in table_names and 'Testcase' not in table_names: + if "testcase" not in table_names and "Testcase" not in table_names: print(" - Creating tables") db.create_all() print(" - Stamping alembic's current version to 'head'") @@ -99,18 +99,15 @@ def mock_data(destructive): tc1 = Testcase(ref_url="http://example.com/depcheck", name="depcheck") tc2 = Testcase(ref_url="http://example.com/rpmlint", name="rpmlint") - j1 = Group(uuid='5b3f47b4-2ba2-11e5-a343-5254007dccf9', ref_url="http://example.com/job1") + j1 = Group(uuid="5b3f47b4-2ba2-11e5-a343-5254007dccf9", ref_url="http://example.com/job1") - j2 = Group(uuid='4e575b2c-2ba2-11e5-a343-5254007dccf9', ref_url="http://example.com/job2") + j2 = Group(uuid="4e575b2c-2ba2-11e5-a343-5254007dccf9", ref_url="http://example.com/job2") - r1 = Result(groups=[j1], testcase=tc1, outcome='PASSED', ref_url="http://example.com/r1") + r1 = Result(groups=[j1], testcase=tc1, outcome="PASSED", ref_url="http://example.com/r1") r2 = Result( - groups=[j1, j2], - testcase=tc1, - outcome='FAILED', - ref_url="http://example.com/r2" - ) - r3 = Result(groups=[j2], testcase=tc2, outcome='FAILED', ref_url="http://example.com/r2") + groups=[j1, j2], testcase=tc1, outcome="FAILED", ref_url="http://example.com/r2" + ) + r3 = Result(groups=[j2], testcase=tc2, outcome="FAILED", ref_url="http://example.com/r2") ResultData(r1, "item", "cabal-rpm-0.8.3-1.fc18") ResultData(r1, "arch", "x86_64") @@ -134,28 +131,33 @@ def mock_data(destructive): def main(): - possible_commands = ['init_db', 'mock_data', 'upgrade_db', 'init_alembic'] + possible_commands = ["init_db", "mock_data", "upgrade_db", "init_alembic"] - usage = 'usage: [DEV=true] %prog ' + "(%s)" % ' | '.join(possible_commands) + usage = "usage: [DEV=true] %prog " + "(%s)" % " | ".join(possible_commands) parser = OptionParser(usage=usage) - parser.add_option("-d", "--destructive", - action="store_true", dest="destructive", default=False, - help="Drop tables in `init_db`; Store data in `mock_data` " - "even if the tables are not empty") + parser.add_option( + "-d", + "--destructive", + action="store_true", + dest="destructive", + default=False, + help="Drop tables in `init_db`; Store data in `mock_data` " + "even if the tables are not empty", + ) (options, args) = parser.parse_args() if len(args) != 1 or args[0] not in possible_commands: print(usage) print - print('Please use one of the following commands: %s' % str(possible_commands)) + print("Please use one of the following commands: %s" % str(possible_commands)) sys.exit(1) command = { - 'init_db': initialize_db, - 'upgrade_db': upgrade_db, - 'mock_data': mock_data, - 'init_alembic': init_alembic, + "init_db": initialize_db, + "upgrade_db": upgrade_db, + "mock_data": mock_data, + "init_alembic": init_alembic, }[args[0]] if not options.destructive: @@ -166,5 +168,5 @@ def main(): sys.exit(0) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/resultsdb/config.py b/resultsdb/config.py index 8557525..7032cda 100644 --- a/resultsdb/config.py +++ b/resultsdb/config.py @@ -28,42 +28,42 @@ class Config(object): DEBUG = True PRODUCTION = False - SECRET_KEY = 'replace-me-with-something-random' + SECRET_KEY = "replace-me-with-something-random" - HOST = '0.0.0.0' + HOST = "0.0.0.0" PORT = 5001 - SQLALCHEMY_DATABASE_URI = 'sqlite://' + SQLALCHEMY_DATABASE_URI = "sqlite://" SHOW_DB_URI = True LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'loggers': { - 'resultsdb': { - 'level': 'INFO', + "version": 1, + "disable_existing_loggers": False, + "loggers": { + "resultsdb": { + "level": "INFO", }, - 'dogpile': { - 'level': 'WARNING', + "dogpile": { + "level": "WARNING", }, }, - 'handlers': { - 'console': { - 'formatter': 'bare', - 'class': 'logging.StreamHandler', - 'stream': 'ext://sys.stdout', - 'level': 'INFO', + "handlers": { + "console": { + "formatter": "bare", + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + "level": "INFO", }, }, - 'formatters': { - 'bare': { - 'format': '[%(asctime)s] [%(process)d] [%(levelname)s] %(name)s: %(message)s', - 'datefmt': '%Y-%m-%d %H:%M:%S', + "formatters": { + "bare": { + "format": "[%(asctime)s] [%(process)d] [%(levelname)s] %(name)s: %(message)s", + "datefmt": "%Y-%m-%d %H:%M:%S", } }, - 'root': { - 'level': 'WARNING', - 'handlers': ['console'], + "root": { + "level": "WARNING", + "handlers": ["console"], }, } @@ -75,19 +75,19 @@ class Config(object): # Supported values: "oidc" AUTH_MODULE = None - OIDC_CLIENT_SECRETS = '/etc/resultsdb/oauth2_client_secrets.json' - OIDC_REQUIRED_SCOPE = 'resultsdb_scope' - OIDC_USERNAME_FIELD = 'uid' + OIDC_CLIENT_SECRETS = "/etc/resultsdb/oauth2_client_secrets.json" + OIDC_REQUIRED_SCOPE = "resultsdb_scope" + OIDC_USERNAME_FIELD = "uid" - FEDMENU_URL = 'https://apps.fedoraproject.org/fedmenu' - FEDMENU_DATA_URL = 'https://apps.fedoraproject.org/js/data.js' + FEDMENU_URL = "https://apps.fedoraproject.org/fedmenu" + FEDMENU_DATA_URL = "https://apps.fedoraproject.org/js/data.js" # Set this to True or False to enable publishing to a message bus MESSAGE_BUS_PUBLISH = True # Name of the message bus plugin to use goes here. 'fedmsg' is installed by # default, but you could create your own. # Supported values: 'dummy', 'stomp', 'fedmsg' - MESSAGE_BUS_PLUGIN = 'dummy' + MESSAGE_BUS_PLUGIN = "dummy" # You can pass extra arguments to your message bus plugin here. For instance, # the fedmsg plugin expects an extra `modname` argument that can be used to # configure the topic, like this: @@ -95,22 +95,6 @@ class Config(object): # e.g. org.fedoraproject.prod.resultsdb.result.new MESSAGE_BUS_KWARGS = {} - ## Alternatively, you could use the 'stomp' messaging plugin. - #MESSAGE_BUS_PLUGIN = 'stomp' - #MESSAGE_BUS_KWARGS = { - # 'destination': 'topic://VirtualTopic.eng.resultsdb.result.new', - # 'connection': { - # 'host_and_ports': [ - # ('broker01', '61612'), - # ('broker02', '61612'), - # ], - # 'use_ssl': True, - # 'ssl_key_file': '/path/to/key/file', - # 'ssl_cert_file': '/path/to/cert/file', - # 'ssl_ca_certs': '/path/to/ca/certs', - # }, - #} - # Publish Taskotron-compatible fedmsgs on the 'taskotron' topic MESSAGE_BUS_PUBLISH_TASKOTRON = False @@ -119,33 +103,37 @@ class ProductionConfig(Config): DEBUG = False PRODUCTION = True SHOW_DB_URI = False - MESSAGE_BUS_PLUGIN = 'fedmsg' - MESSAGE_BUS_KWARGS = {'modname': 'resultsdb'} + MESSAGE_BUS_PLUGIN = "fedmsg" + MESSAGE_BUS_KWARGS = {"modname": "resultsdb"} class DevelopmentConfig(Config): TRAP_BAD_REQUEST_ERRORS = True - SQLALCHEMY_DATABASE_URI = 'sqlite:////var/tmp/resultsdb_db.sqlite' - OIDC_CLIENT_SECRETS = os.getcwd() + '/conf/oauth2_client_secrets.json.example' + SQLALCHEMY_DATABASE_URI = "sqlite:////var/tmp/resultsdb_db.sqlite" + OIDC_CLIENT_SECRETS = os.getcwd() + "/conf/oauth2_client_secrets.json.example" class TestingConfig(DevelopmentConfig): TRAP_BAD_REQUEST_ERRORS = True - FEDMENU_URL = 'https://apps.stg.fedoraproject.org/fedmenu' - FEDMENU_DATA_URL = 'https://apps.stg.fedoraproject.org/js/data.js' - ADDITIONAL_RESULT_OUTCOMES = ('AMAZING',) - MESSAGE_BUS_PLUGIN = 'dummy' + FEDMENU_URL = "https://apps.stg.fedoraproject.org/fedmenu" + FEDMENU_DATA_URL = "https://apps.stg.fedoraproject.org/js/data.js" + ADDITIONAL_RESULT_OUTCOMES = ("AMAZING",) + MESSAGE_BUS_PLUGIN = "dummy" MESSAGE_BUS_KWARGS = {} - PERMISSIONS = [{ - "users": ["testuser1"], - "testcases": ["testcase1"], - }] - AUTH_MODULE = 'oidc' - LDAP_HOST = 'ldap://ldap.example.com' - LDAP_SEARCHES = [{ - 'BASE': 'ou=Groups,dc=example,dc=com', - 'SEARCH_STRING': '(memberUid={user})', - }] + PERMISSIONS = [ + { + "users": ["testuser1"], + "testcases": ["testcase1"], + } + ] + AUTH_MODULE = "oidc" + LDAP_HOST = "ldap://ldap.example.com" + LDAP_SEARCHES = [ + { + "BASE": "ou=Groups,dc=example,dc=com", + "SEARCH_STRING": "(memberUid={user})", + } + ] def openshift_config(config_object, openshift_production): @@ -156,25 +144,30 @@ def openshift_config(config_object, openshift_production): os.environ["POSTGRESQL_PASSWORD"], os.environ["POSTGRESQL_SERVICE_HOST"], os.environ["POSTGRESQL_SERVICE_PORT"], - os.environ["POSTGRESQL_DATABASE"] + os.environ["POSTGRESQL_DATABASE"], ) config_object["SECRET_KEY"] = os.environ["SECRET_KEY"] except KeyError: - print("OpenShift mode enabled but required values couldn't be fetched. " - "Check, if you have these variables defined in you env: " - "(POSTGRESQL_[USER, PASSWORD, DATABASE, SERVICE_HOST, SERVICE_PORT], " - "SECRET_KEY)", file=sys.stderr) + print( + "OpenShift mode enabled but required values couldn't be fetched. " + "Check, if you have these variables defined in you env: " + "(POSTGRESQL_[USER, PASSWORD, DATABASE, SERVICE_HOST, SERVICE_PORT], " + "SECRET_KEY)", + file=sys.stderr, + ) sys.exit(1) # Nuke out messaging, we don't support this in OpenShift mode # Inject settings.py and disable OpenShift mode if you need this - config_object["MESSAGE_BUS_PLUGIN"] = 'dummy' + config_object["MESSAGE_BUS_PLUGIN"] = "dummy" config_object["MESSAGE_BUS_KWARGS"] = {} if os.getenv("MESSAGE_BUS_PLUGIN") or os.getenv("MESSAGE_BUS_KWARGS"): print("It appears you've tried to set up messaging in OpenShift mode.") - print("This is not supported, you need to inject setting.py and disable " - "OpenShift mode if you need messaging.") + print( + "This is not supported, you need to inject setting.py and disable " + "OpenShift mode if you need messaging." + ) # Danger zone, keep this False out in the wild, always config_object["SHOW_DB_URI"] = False diff --git a/resultsdb/controllers/api_v2.py b/resultsdb/controllers/api_v2.py index 1a21fe2..3524751 100644 --- a/resultsdb/controllers/api_v2.py +++ b/resultsdb/controllers/api_v2.py @@ -40,7 +40,7 @@ from resultsdb.models.results import Group, Result, Testcase, ResultData from resultsdb.models.results import RESULT_OUTCOME -api = Blueprint('api_v2', __name__) +api = Blueprint("api_v2", __name__) try: basestring @@ -63,18 +63,20 @@ def bad_request(error): def not_found(error): return jsonify({"message": "Not found"}), 404 + # ============================================================================= # GLOBAL VARIABLES # ============================================================================= RE_PAGE = re.compile(r"([?&])page=([0-9]+)") RE_CALLBACK = re.compile(r"([?&])callback=[^&]*&?") -RE_CLEAN_AMPERSANDS = re.compile(r'&+') +RE_CLEAN_AMPERSANDS = re.compile(r"&+") # ============================================================================= # GLOBAL METHODS # ============================================================================= + def pagination(q, page, limit): """ Sets the offset/limit for the DB query. @@ -117,7 +119,7 @@ def prev_next_urls(data, limit=QUERY_LIMIT): baseurl = RE_PAGE.sub("%spage=%s" % (flag, placeholder), request.url) baseurl = RE_CALLBACK.sub(r"\1", baseurl) - baseurl = RE_CLEAN_AMPERSANDS.sub('&', baseurl) + baseurl = RE_CLEAN_AMPERSANDS.sub("&", baseurl) if page > 0: prev = baseurl.replace(placeholder, str(page - 1)) @@ -133,20 +135,20 @@ def prev_next_urls(data, limit=QUERY_LIMIT): # ============================================================================= -@api.route('/groups', methods=['GET']) +@api.route("/groups", methods=["GET"]) @validate() def get_groups(query: GroupsParams): q = db.session.query(Group).order_by(db.desc(Group.id)) desc_filters = [] if query.description: - for description in query.description.split(','): + for description in query.description.split(","): if not description.strip(): continue desc_filters.append(Group.description == description) -# desc_filters.append(Group.description.in_(query.description.split(','))) + # desc_filters.append(Group.description.in_(query.description.split(','))) elif query.description_like_: - for description in query.description_like_.split(','): + for description in query.description_like_.split(","): if not description.strip(): continue desc_filters.append(Group.description.like(description.replace("*", "%"))) @@ -155,29 +157,31 @@ def get_groups(query: GroupsParams): # Filter by uuid if query.uuid: - q = q.filter(Group.uuid.in_(query.uuid.split(','))) + q = q.filter(Group.uuid.in_(query.uuid.split(","))) q = pagination(q, query.page, query.limit) data, prev, next = prev_next_urls(q.all(), query.limit) - return jsonify(dict( - prev=prev, - next=next, - data=[SERIALIZE(o) for o in data], - )) + return jsonify( + dict( + prev=prev, + next=next, + data=[SERIALIZE(o) for o in data], + ) + ) -@api.route('/groups/', methods=['GET']) +@api.route("/groups/", methods=["GET"]) def get_group(group_id): q = Group.query.filter_by(uuid=group_id) group = q.first() if not group: - return jsonify({'message': "Group not found"}), 404 + return jsonify({"message": "Group not found"}), 404 return jsonify(SERIALIZE(group)) -@api.route('/groups', methods=['POST']) +@api.route("/groups", methods=["POST"]) @validate() def create_group(body: CreateGroupParams): if body.uuid: @@ -201,19 +205,28 @@ def create_group(body: CreateGroupParams): # ============================================================================= # RESULTS # ============================================================================= -def select_results(since_start=None, since_end=None, outcomes=None, groups=None, testcases=None, testcases_like=None, result_data=None, _sort=None): +def select_results( + since_start=None, + since_end=None, + outcomes=None, + groups=None, + testcases=None, + testcases_like=None, + result_data=None, + _sort=None, +): # Checks if the sort parameter specified in the request is valid before querying. # Sorts by submit_time in a descending order if the sort parameter is absent or invalid. q = db.session.query(Result) query_sorted = False if _sort: - sort_match = re.match(r'^(?Pasc|desc):(?P.+)$', _sort) - if sort_match and sort_match.group('column') == 'submit_time': - sort_order = {'asc': db.asc, 'desc': db.desc}[sort_match.group('order')] - sort_column = getattr(Result, sort_match.group('column')) + sort_match = re.match(r"^(?Pasc|desc):(?P.+)$", _sort) + if sort_match and sort_match.group("column") == "submit_time": + sort_order = {"asc": db.asc, "desc": db.desc}[sort_match.group("order")] + sort_column = getattr(Result, sort_match.group("column")) q = q.order_by(sort_order(sort_column)) query_sorted = True - if _sort and _sort == 'disable_sorting': + if _sort and _sort == "disable_sorting": query_sorted = True if not query_sorted: q = q.order_by(db.desc(Result.submit_time)) @@ -238,7 +251,7 @@ def select_results(since_start=None, since_end=None, outcomes=None, groups=None, filter_by_testcase.append(Result.testcase_name.in_(testcases)) if testcases_like: for testcase in testcases_like: - testcase = testcase.replace('*', '%') + testcase = testcase.replace("*", "%") filter_by_testcase.append(Result.testcase_name.like(testcase)) if filter_by_testcase: q = q.filter(db.or_(*filter_by_testcase)) @@ -247,11 +260,11 @@ def select_results(since_start=None, since_end=None, outcomes=None, groups=None, if result_data is not None: for key, values in result_data.items(): try: - key, modifier = key.split(':') + key, modifier = key.split(":") except ValueError: # no : in key key, modifier = (key, None) - if modifier == 'like': + if modifier == "like": alias = db.aliased(ResultData) if len(values) > 1: # multiple values likes = [] @@ -262,7 +275,7 @@ def select_results(since_start=None, since_end=None, outcomes=None, groups=None, # put it together to (key = key AND (value LIKE foo OR value LIKE bar OR ...)) q = q.join(alias).filter(db.and_(alias.key == key, db.or_(*likes))) else: - value = values[0].replace('*', '%') + value = values[0].replace("*", "%") q = q.join(alias).filter(db.and_(alias.key == key, alias.value.like(value))) else: @@ -273,15 +286,15 @@ def select_results(since_start=None, since_end=None, outcomes=None, groups=None, def __get_results_parse_args(query: ResultsParams): args = { - '_sort': query.sort_, - 'limit': query.limit, - 'page': query.page, - 'testcases': query.testcases, - 'testcases:like': query.testcases_like_, - 'groups': query.groups, - '_distinct_on': query.distinct_on_, - 'outcome': query.outcome, - 'since': query.since, + "_sort": query.sort_, + "limit": query.limit, + "page": query.page, + "testcases": query.testcases, + "testcases:like": query.testcases_like_, + "groups": query.groups, + "_distinct_on": query.distinct_on_, + "outcome": query.outcome, + "since": query.since, } # find results_data with the query parameters @@ -293,61 +306,63 @@ def __get_results_parse_args(query: ResultsParams): results_data = {k: request.args.getlist(k) for k in request.args.keys() if k not in args} for param, values in results_data.items(): for i, value in enumerate(values): - results_data[param][i] = value.split(',') + results_data[param][i] = value.split(",") # flatten the list results_data[param] = [item for sublist in results_data[param] for item in sublist] return { - 'result_data': results_data if results_data else None, - 'args': args, + "result_data": results_data if results_data else None, + "args": args, } def __get_results(query: ResultsParams, group_ids=None, testcase_names=None): p = __get_results_parse_args(query) - args = p['args'] + args = p["args"] - groups = group_ids if group_ids is not None else args['groups'] - testcases = testcase_names if testcase_names is not None else args['testcases'] + groups = group_ids if group_ids is not None else args["groups"] + testcases = testcase_names if testcase_names is not None else args["testcases"] q = select_results( - since_start=args['since']['start'], - since_end=args['since']['end'], - outcomes=args['outcome'], + since_start=args["since"]["start"], + since_end=args["since"]["end"], + outcomes=args["outcome"], groups=groups, testcases=testcases, - testcases_like=args['testcases:like'], - result_data=p['result_data'], - _sort=args['_sort'], + testcases_like=args["testcases:like"], + result_data=p["result_data"], + _sort=args["_sort"], ) - q = pagination(q, args['page'], args['limit']) - data, prev, next = prev_next_urls(q.all(), args['limit']) + q = pagination(q, args["page"], args["limit"]) + data, prev, next = prev_next_urls(q.all(), args["limit"]) - return jsonify(dict( - prev=prev, - next=next, - data=[SERIALIZE(o) for o in data], - )) + return jsonify( + dict( + prev=prev, + next=next, + data=[SERIALIZE(o) for o in data], + ) + ) -@api.route('/results', methods=['GET']) +@api.route("/results", methods=["GET"]) @validate() def get_results(query: ResultsParams): return __get_results(query) -@api.route('/results/latest', methods=['GET']) +@api.route("/results/latest", methods=["GET"]) @validate() def get_results_latest(query: ResultsParams): p = __get_results_parse_args(query) - args = p['args'] - since_start = args['since'].get('start', None) - since_end = args['since'].get('end', None) - groups = args.get('groups', None) - testcases = args.get('testcases', None) - testcases_like = args.get('testcases:like', None) - distinct_on = args.get('_distinct_on', None) + args = p["args"] + since_start = args["since"].get("start", None) + since_end = args["since"].get("end", None) + groups = args.get("groups", None) + testcases = args.get("testcases", None) + testcases_like = args.get("testcases:like", None) + distinct_on = args.get("_distinct_on", None) if not distinct_on: q = select_results( @@ -356,48 +371,67 @@ def get_results_latest(query: ResultsParams): groups=groups, testcases=testcases, testcases_like=testcases_like, - result_data=p['result_data'], + result_data=p["result_data"], ) # Produce a subquery with the same filter criteria as above *except* # test case name, which we group by and join on. - sq = select_results( - since_start=since_start, - since_end=since_end, - groups=groups, - result_data=p['result_data'], - )\ - .order_by(None)\ + sq = ( + select_results( + since_start=since_start, + since_end=since_end, + groups=groups, + result_data=p["result_data"], + ) + .order_by(None) .with_entities( - Result.testcase_name.label('testcase_name'), - db.func.max(Result.submit_time).label('max_submit_time'))\ - .group_by(Result.testcase_name)\ + Result.testcase_name.label("testcase_name"), + db.func.max(Result.submit_time).label("max_submit_time"), + ) + .group_by(Result.testcase_name) .subquery() - q = q.join(sq, db.and_(Result.testcase_name == sq.c.testcase_name, - Result.submit_time == sq.c.max_submit_time)) + ) + q = q.join( + sq, + db.and_( + Result.testcase_name == sq.c.testcase_name, + Result.submit_time == sq.c.max_submit_time, + ), + ) results = q.all() - return jsonify(dict( - data=[SERIALIZE(o) for o in results], - )) - + return jsonify( + dict( + data=[SERIALIZE(o) for o in results], + ) + ) - if not any([testcases, testcases_like, since_start, since_end, groups, p['result_data']]): - return jsonify({'message': ("Please, provide at least one " - "filter beside '_distinct_on'")}), 400 + if not any([testcases, testcases_like, since_start, since_end, groups, p["result_data"]]): + return ( + jsonify({"message": ("Please, provide at least one " "filter beside '_distinct_on'")}), + 400, + ) q = db.session.query(Result) - q = select_results(since_start=since_start, since_end=since_end, - groups=groups, testcases=testcases, - testcases_like=testcases_like, result_data=p['result_data'], _sort="disable_sorting") + q = select_results( + since_start=since_start, + since_end=since_end, + groups=groups, + testcases=testcases, + testcases_like=testcases_like, + result_data=p["result_data"], + _sort="disable_sorting", + ) values_distinct_on = [Result.testcase_name] for i, key in enumerate(distinct_on): - name = 'result_data_%s_%s' % (i, key) - alias = db.aliased(db.session.query(ResultData).filter(ResultData.key == key).subquery(), name=name) + name = "result_data_%s_%s" % (i, key) + alias = db.aliased( + db.session.query(ResultData).filter(ResultData.key == key).subquery(), name=name + ) q = q.outerjoin(alias) - values_distinct_on.append(db.text('{}.value'.format(name))) + values_distinct_on.append(db.text("{}.value".format(name))) q = q.distinct(*values_distinct_on) q = q.order_by(*values_distinct_on).order_by(db.desc(Result.submit_time)) @@ -406,54 +440,54 @@ def get_results_latest(query: ResultsParams): results = dict( data=[SERIALIZE(o) for o in results], ) - results['data'] = sorted(results['data'], key=lambda x: x['submit_time'], reverse=True) + results["data"] = sorted(results["data"], key=lambda x: x["submit_time"], reverse=True) return jsonify(results) -@api.route('/groups//results', methods=['GET']) +@api.route("/groups//results", methods=["GET"]) @validate() def get_results_by_group(group_id: str, query: ResultsParams): group = Group.query.filter_by(uuid=group_id).first() if not group: - return jsonify({'message': "Group not found: %s" % (group_id,)}), 404 + return jsonify({"message": "Group not found: %s" % (group_id,)}), 404 return __get_results(query, group_ids=[group.uuid]) -@api.route('/testcases//results', methods=['GET']) +@api.route("/testcases//results", methods=["GET"]) @validate() def get_results_by_testcase(testcase_name: str, query: ResultsParams): testcase = Testcase.query.filter_by(name=testcase_name).first() if not testcase: - return jsonify({'message': "Testcase not found"}), 404 + return jsonify({"message": "Testcase not found"}), 404 return __get_results(query, testcase_names=[testcase.name]) -@api.route('/results/', methods=['GET']) +@api.route("/results/", methods=["GET"]) def get_result(result_id): try: result = Result.query.filter_by(id=result_id).one() except orm_exc.NoResultFound: - return jsonify({'message': "Result not found"}), 404 + return jsonify({"message": "Result not found"}), 404 return jsonify(SERIALIZE(result)) -@api.route('/results', methods=['POST']) +@api.route("/results", methods=["POST"]) @validate() def create_result(body: CreateResultParams): if body.data: - invalid_keys = [key for key in body.data.keys() if ':' in key] + invalid_keys = [key for key in body.data.keys() if ":" in key] if invalid_keys: app.logger.warning("Colon not allowed in key name: %s", invalid_keys) - return jsonify({'message': "Colon not allowed in key name: %r" % invalid_keys}), 400 + return jsonify({"message": "Colon not allowed in key name: %r" % invalid_keys}), 400 tc = body.testcase - testcase = Testcase.query.filter_by(name=tc['name']).first() + testcase = Testcase.query.filter_by(name=tc["name"]).first() if not testcase: - app.logger.debug("Testcase %s does not exist yet. Creating", tc['name']) - testcase = Testcase(name=tc['name']) - testcase.ref_url = tc.get('ref_url', testcase.ref_url) + app.logger.debug("Testcase %s does not exist yet. Creating", tc["name"]) + testcase = Testcase(name=tc["name"]) + testcase.ref_url = tc.get("ref_url", testcase.ref_url) db.session.add(testcase) # groups is a list of strings(uuid) or dicts(group object) @@ -466,14 +500,14 @@ def create_result(body: CreateResultParams): if isinstance(grp, basestring): grp = dict(uuid=grp) elif isinstance(grp, dict): - grp['uuid'] = grp.get('uuid', str(uuid.uuid1())) + grp["uuid"] = grp.get("uuid", str(uuid.uuid1())) - group = Group.query.filter_by(uuid=grp['uuid']).first() + group = Group.query.filter_by(uuid=grp["uuid"]).first() if not group: - group = Group(uuid=grp['uuid']) + group = Group(uuid=grp["uuid"]) - group.description = grp.get('description', group.description) - group.ref_url = grp.get('ref_url', group.ref_url) + group.description = grp.get("description", group.description) + group.ref_url = grp.get("ref_url", group.ref_url) db.session.add(group) groups.append(group) @@ -483,7 +517,8 @@ def create_result(body: CreateResultParams): # for each key-value pair in body.data # convert keys to unicode # if value is string: NOP - # if value is list or tuple: convert values to unicode, create key-value pair for each value from the list + # if value is list or tuple: + # convert values to unicode, create key-value pair for each value from the list # if value is something else: convert to unicode # Store all the key-value pairs if isinstance(body.data, dict): @@ -492,10 +527,10 @@ def create_result(body: CreateResultParams): if not (isinstance(key, str) or isinstance(key, unicode)): key = unicode(key) - if (isinstance(value, str) or isinstance(value, unicode)): + if isinstance(value, str) or isinstance(value, unicode): to_store.append((key, value)) - elif (isinstance(value, list) or isinstance(value, tuple)): + elif isinstance(value, list) or isinstance(value, tuple): for v in value: if not (isinstance(v, str) or isinstance(v, unicode)): v = unicode(v) @@ -514,15 +549,16 @@ def create_result(body: CreateResultParams): # TESTCASES # ============================================================================= + def select_testcases(args_name=None, args_name_like=None): q = db.session.query(Testcase).order_by(db.asc(Testcase.name)) name_filters = [] if args_name: - for name in [name.strip() for name in args_name.split(',') if name.strip()]: + for name in [name.strip() for name in args_name.split(",") if name.strip()]: name_filters.append(Testcase.name == name) elif args_name_like: - for name in [name.strip() for name in args_name_like.split(',') if name.strip()]: + for name in [name.strip() for name in args_name_like.split(",") if name.strip()]: name_filters.append(Testcase.name.like(name.replace("*", "%"))) if name_filters: q = q.filter(db.or_(*name_filters)) @@ -530,31 +566,33 @@ def select_testcases(args_name=None, args_name_like=None): return q -@api.route('/testcases', methods=['GET']) +@api.route("/testcases", methods=["GET"]) @validate() def get_testcases(query: TestcasesParams): q = select_testcases(query.name, query.name_like_) q = pagination(q, query.page, query.limit) data, prev, next = prev_next_urls(q.all(), query.limit) - return jsonify(dict( - prev=prev, - next=next, - data=[SERIALIZE(o) for o in data], - )) + return jsonify( + dict( + prev=prev, + next=next, + data=[SERIALIZE(o) for o in data], + ) + ) -@api.route('/testcases/', methods=['GET']) +@api.route("/testcases/", methods=["GET"]) def get_testcase(testcase_name): try: testcase = Testcase.query.filter_by(name=testcase_name).one() except orm_exc.NoResultFound: - return jsonify({'message': "Testcase not found"}), 404 + return jsonify({"message": "Testcase not found"}), 404 return jsonify(SERIALIZE(testcase)) -@api.route('/testcases', methods=['POST']) +@api.route("/testcases", methods=["POST"]) @validate() def create_testcase(body: CreateTestcaseParams): testcase = Testcase.query.filter_by(name=body.name).first() @@ -569,7 +607,7 @@ def create_testcase(body: CreateTestcaseParams): return jsonify(SERIALIZE(testcase)), 201 -@api.route('/healthcheck', methods=['GET']) +@api.route("/healthcheck", methods=["GET"]) def healthcheck(): """ Request handler for performing an application-level health check. This is @@ -581,21 +619,27 @@ def healthcheck(): try: db.session.execute("SELECT 1 FROM result LIMIT 0").fetchall() except Exception: - app.logger.exception('Healthcheck failed on DB query.') - return jsonify({"message": 'Unable to communicate with database'}), 503 + app.logger.exception("Healthcheck failed on DB query.") + return jsonify({"message": "Unable to communicate with database"}), 503 - return jsonify({"message": 'Health check OK'}), 200 + return jsonify({"message": "Health check OK"}), 200 -@api.route('', methods=['GET']) -@api.route('/', methods=['GET']) +@api.route("", methods=["GET"]) +@api.route("/", methods=["GET"]) def landing_page(): - return jsonify({"message": "Everything is fine. But choose wisely, for while " - "the true Grail will bring you life, the false " - "Grail will take it from you.", - "documentation": "http://docs.resultsdb20.apiary.io/", - "groups": url_for('.get_groups', _external=True), - "results": url_for('.get_results', _external=True), - "testcases": url_for('.get_testcases', _external=True), - "outcomes": RESULT_OUTCOME, - }), 300 + return ( + jsonify( + { + "message": "Everything is fine. But choose wisely, for while " + "the true Grail will bring you life, the false " + "Grail will take it from you.", + "documentation": "http://docs.resultsdb20.apiary.io/", + "groups": url_for(".get_groups", _external=True), + "results": url_for(".get_results", _external=True), + "testcases": url_for(".get_testcases", _external=True), + "outcomes": RESULT_OUTCOME, + } + ), + 300, + ) diff --git a/resultsdb/controllers/common.py b/resultsdb/controllers/common.py index 75d94a0..4131403 100644 --- a/resultsdb/controllers/common.py +++ b/resultsdb/controllers/common.py @@ -36,9 +36,7 @@ def commit_result(result): plugin.publish(create_message(result)) if app.config["MESSAGE_BUS_PUBLISH_TASKOTRON"]: - app.logger.debug( - "Preparing to publish Taskotron message for result id %d", result.id - ) + app.logger.debug("Preparing to publish Taskotron message for result id %d", result.id) publish_taskotron_message(result) return jsonify(SERIALIZE(result)), 201 diff --git a/resultsdb/controllers/main.py b/resultsdb/controllers/main.py index 13f2212..7a7e43c 100644 --- a/resultsdb/controllers/main.py +++ b/resultsdb/controllers/main.py @@ -19,10 +19,10 @@ from flask import Blueprint, render_template -main = Blueprint('main', __name__) +main = Blueprint("main", __name__) -@main.route('/') -@main.route('/index') +@main.route("/") +@main.route("/index") def index(): - return render_template('index.html') + return render_template("index.html") diff --git a/resultsdb/messaging.py b/resultsdb/messaging.py index 0207387..a4849a0 100644 --- a/resultsdb/messaging.py +++ b/resultsdb/messaging.py @@ -28,15 +28,26 @@ from resultsdb.serializers.api_v2 import Serializer import logging + log = logging.getLogger(__name__) try: from fedora_messaging.api import Message, publish - from fedora_messaging.exceptions import PublishReturned, PublishTimeout, PublishForbidden, ConnectionException + from fedora_messaging.exceptions import ( + PublishReturned, + PublishTimeout, + PublishForbidden, + ConnectionException, + ) except ImportError: - if app.config.get('MESSAGE_BUS_PUBLISH_TASKOTRON') or app.config.get('MESSAGE_BUS_PLUGIN') == 'fedmsg': - log.error('fedora-messaging must be installed if "MESSAGE_BUS_PUBLISH_TASKOTRON" is ' - 'enabled or "MESSAGE_BUS_PLUGIN" is set to "fedmsg"') + if ( + app.config.get("MESSAGE_BUS_PUBLISH_TASKOTRON") + or app.config.get("MESSAGE_BUS_PLUGIN") == "fedmsg" + ): + log.error( + 'fedora-messaging must be installed if "MESSAGE_BUS_PUBLISH_TASKOTRON" is ' + 'enabled or "MESSAGE_BUS_PLUGIN" is set to "fedmsg"' + ) raise @@ -58,10 +69,11 @@ def get_prev_result(result): q = q.filter_by(testcase_name=result.testcase_name) for result_data in result.data: - if result_data.key in ['item', 'type', 'arch']: + if result_data.key in ["item", "type", "arch"]: alias = db.aliased(ResultData) q = q.join(alias).filter( - db.and_(alias.key == result_data.key, alias.value == result_data.value)) + db.and_(alias.key == result_data.key, alias.value == result_data.value) + ) q = q.order_by(db.desc(Result.submit_time)) return q.first() @@ -88,35 +100,36 @@ def publish_taskotron_message(result): task = dict( (datum.key, datum.value) for datum in result.data - if datum.key in ('item', 'type',) + if datum.key + in ( + "item", + "type", + ) ) - task['name'] = result.testcase.name + task["name"] = result.testcase.name body = { - 'task': task, - 'result': { - 'id': result.id, - 'submit_time': result.submit_time.strftime("%Y-%m-%d %H:%M:%S UTC"), - 'prev_outcome': prev_result.outcome if prev_result else None, - 'outcome': result.outcome, - 'log_url': result.ref_url, - } + "task": task, + "result": { + "id": result.id, + "submit_time": result.submit_time.strftime("%Y-%m-%d %H:%M:%S UTC"), + "prev_outcome": prev_result.outcome if prev_result else None, + "outcome": result.outcome, + "log_url": result.ref_url, + }, } try: - msg = Message ( - topic='taskotron.result.new', - body=body - ) + msg = Message(topic="taskotron.result.new", body=body) publish(msg) log.debug("Message published") except PublishReturned as e: - log.error('Fedora Messaging broker rejected message {}: {}'.format(msg.id, e)) + log.error("Fedora Messaging broker rejected message {}: {}".format(msg.id, e)) except PublishTimeout: - log.error('Timeout publishing message {}'.format(msg.id)) + log.error("Timeout publishing message {}".format(msg.id)) except PublishForbidden as e: - log.error('Permission error publishing message {}: {}'.format(msg.id, e)) + log.error("Permission error publishing message {}: {}".format(msg.id, e)) except ConnectionException as e: - log.error('Error sending message {}: {}'.format(msg.id, e.reason)) + log.error("Error sending message {}: {}".format(msg.id, e.reason)) def create_message(result): @@ -125,12 +138,13 @@ def create_message(result): class MessagingPlugin(object): - """ Abstract base class that messaging plugins must extend. + """Abstract base class that messaging plugins must extend. One abstract method is declared which must be implemented: - publish(message) """ + __metaclass__ = abc.ABCMeta def __init__(self, **kwargs): @@ -143,7 +157,8 @@ def publish(self, message): class DummyPlugin(MessagingPlugin): - """ A dummy plugin used for testing. Just logs the messages. """ + """A dummy plugin used for testing. Just logs the messages.""" + # A class attribute where we store all messages published. # Used by the test suite. This would cause a memory leak if used in prod. history = [] @@ -154,56 +169,53 @@ def publish(self, message): class FedmsgPlugin(MessagingPlugin): - """ A fedmsg plugin, used to publish to the fedmsg bus. """ + """A fedmsg plugin, used to publish to the fedmsg bus.""" def publish(self, message): try: - msg = Message( - topic='{}.result.new'.format(self.modname), - body=message - ) + msg = Message(topic="{}.result.new".format(self.modname), body=message) publish(msg) log.debug("Message published") except PublishReturned as e: - log.error('Fedora Messaging broker rejected message {}: {}'.format(msg.id, e)) + log.error("Fedora Messaging broker rejected message {}: {}".format(msg.id, e)) except PublishTimeout: - log.error('Timeout publishing message {}'.format(msg.id)) + log.error("Timeout publishing message {}".format(msg.id)) except PublishForbidden as e: - log.error('Permission error publishing message {}: {}'.format(msg.id, e)) + log.error("Permission error publishing message {}: {}".format(msg.id, e)) except ConnectionException as e: - log.error('Error sending message {}: {}'.format(msg.id, e.reason)) + log.error("Error sending message {}: {}".format(msg.id, e.reason)) class StompPlugin(MessagingPlugin): def __init__(self, **kwargs): args = kwargs.copy() - conn_args = args['connection'].copy() - if 'use_ssl' in conn_args: - use_ssl = conn_args['use_ssl'] - del conn_args['use_ssl'] + conn_args = args["connection"].copy() + if "use_ssl" in conn_args: + use_ssl = conn_args["use_ssl"] + del conn_args["use_ssl"] else: use_ssl = False - ssl_args = {'for_hosts': conn_args.get('host_and_ports', [])} - for attr in ('key_file', 'cert_file', 'ca_certs'): - conn_attr = f'ssl_{attr}' + ssl_args = {"for_hosts": conn_args.get("host_and_ports", [])} + for attr in ("key_file", "cert_file", "ca_certs"): + conn_attr = f"ssl_{attr}" if conn_attr in conn_args: ssl_args[attr] = conn_args[conn_attr] del conn_args[conn_attr] - if 'ssl_version' in conn_args: - ssl_args['ssl_version'] = conn_args['ssl_version'] - del conn_args['ssl_version'] + if "ssl_version" in conn_args: + ssl_args["ssl_version"] = conn_args["ssl_version"] + del conn_args["ssl_version"] - args['connection'] = conn_args - args['use_ssl'] = use_ssl - args['ssl_args'] = ssl_args + args["connection"] = conn_args + args["use_ssl"] = use_ssl + args["ssl_args"] = ssl_args super(StompPlugin, self).__init__(**args) # Validate that some required config is present - required = ['connection', 'destination'] + required = ["connection", "destination"] for attr in required: if getattr(self, attr, None) is None: raise ValueError("%r required for %r." % (attr, self)) @@ -226,9 +238,9 @@ def publish(self, msg): def load_messaging_plugin(name, kwargs): - """ Instantiate and return the appropriate messaging plugin. """ - points = pkg_resources.iter_entry_points('resultsdb.messaging.plugins') - classes = {'dummy': DummyPlugin} + """Instantiate and return the appropriate messaging plugin.""" + points = pkg_resources.iter_entry_points("resultsdb.messaging.plugins") + classes = {"dummy": DummyPlugin} classes.update(dict([(point.name, point.load()) for point in points])) log.debug("Found the following installed messaging plugin %r" % classes) diff --git a/resultsdb/models/results.py b/resultsdb/models/results.py index c17ac1d..a4b07fb 100644 --- a/resultsdb/models/results.py +++ b/resultsdb/models/results.py @@ -24,25 +24,26 @@ from resultsdb.serializers import DBSerialize -__all__ = ['Testcase', 'Group', 'Result', 'ResultData', 'GroupsToResults', 'RESULT_OUTCOME'] +__all__ = ["Testcase", "Group", "Result", "ResultData", "GroupsToResults", "RESULT_OUTCOME"] -PRESET_OUTCOMES = ('PASSED', 'INFO', 'FAILED', 'NEEDS_INSPECTION') -ADDITIONAL_RESULT_OUTCOMES = tuple(app.config.get('ADDITIONAL_RESULT_OUTCOMES', [])) +PRESET_OUTCOMES = ("PASSED", "INFO", "FAILED", "NEEDS_INSPECTION") +ADDITIONAL_RESULT_OUTCOMES = tuple(app.config.get("ADDITIONAL_RESULT_OUTCOMES", [])) RESULT_OUTCOME = PRESET_OUTCOMES + ADDITIONAL_RESULT_OUTCOMES JOB_STATUS = [] class GroupsToResults(db.Model): - __tablename__ = 'groups_to_results' + __tablename__ = "groups_to_results" id = db.Column(db.Integer, primary_key=True) - group_uuid = db.Column(db.String(36), db.ForeignKey('group.uuid')) - result_id = db.Column(db.Integer, db.ForeignKey('result.id')) + group_uuid = db.Column(db.String(36), db.ForeignKey("group.uuid")) + result_id = db.Column(db.Integer, db.ForeignKey("result.id")) __table_args__ = ( - db.Index('gtr_fk_group_uuid', 'group_uuid', postgresql_ops={'uuid': 'text_pattern_ops'}), - db.Index('gtr_fk_result_id', 'result_id'), + db.Index("gtr_fk_group_uuid", "group_uuid", postgresql_ops={"uuid": "text_pattern_ops"}), + db.Index("gtr_fk_result_id", "result_id"), ) + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # # DO NOT FORGET TO UPDATE SERIALIZERS AFTER CHANGING STRUCTURE @@ -57,12 +58,14 @@ class Group(db.Model, DBSerialize): description = db.Column(db.Text) ref_url = db.Column(db.Text) - results = db.relationship("Result", secondary='groups_to_results', backref="groups") + results = db.relationship("Result", secondary="groups_to_results", backref="groups") __table_args__ = ( - db.Index('group_idx_uuid', 'uuid', - postgresql_ops={'uuid': 'text_pattern_ops'}, - ), + db.Index( + "group_idx_uuid", + "uuid", + postgresql_ops={"uuid": "text_pattern_ops"}, + ), ) def __init__(self, uuid=None, ref_url=None, description=None): @@ -80,9 +83,11 @@ class Testcase(db.Model, DBSerialize): ref_url = db.Column(db.Text) __table_args__ = ( - db.Index('testcase_idx_name', 'name', - postgresql_ops={'name': 'text_pattern_ops'}, - ), + db.Index( + "testcase_idx_name", + "name", + postgresql_ops={"name": "text_pattern_ops"}, + ), ) def __init__(self, name, ref_url=None): @@ -93,23 +98,28 @@ def __init__(self, name, ref_url=None): class Result(db.Model, DBSerialize): id = db.Column(db.Integer, primary_key=True) - testcase_name = db.Column(db.Text, db.ForeignKey('testcase.name')) + testcase_name = db.Column(db.Text, db.ForeignKey("testcase.name")) submit_time = db.Column(db.DateTime, default=datetime.datetime.utcnow) outcome = db.Column(db.String(32)) note = db.Column(db.Text) ref_url = db.Column(db.Text) - testcase = db.relation('Testcase', backref='results') # , lazy = False) - data = db.relation('ResultData', backref='result') # , lazy = False) + testcase = db.relation("Testcase", backref="results") # , lazy = False) + data = db.relation("ResultData", backref="result") # , lazy = False) __table_args__ = ( - db.Index('result_fk_testcase_name', 'testcase_name', - postgresql_ops={'testcase_name': 'text_pattern_ops'}), - db.Index('result_submit_time', 'submit_time'), - db.Index('result_idx_outcome', 'outcome', - postgresql_ops={'outcome': 'text_pattern_ops'}, - ), + db.Index( + "result_fk_testcase_name", + "testcase_name", + postgresql_ops={"testcase_name": "text_pattern_ops"}, + ), + db.Index("result_submit_time", "submit_time"), + db.Index( + "result_idx_outcome", + "outcome", + postgresql_ops={"outcome": "text_pattern_ops"}, + ), ) def __init__(self, testcase, outcome, groups=None, ref_url=None, note=None, submit_time=None): @@ -124,16 +134,19 @@ def __init__(self, testcase, outcome, groups=None, ref_url=None, note=None, subm class ResultData(db.Model, DBSerialize): id = db.Column(db.Integer, primary_key=True) - result_id = db.Column(db.Integer, db.ForeignKey('result.id')) + result_id = db.Column(db.Integer, db.ForeignKey("result.id")) key = db.Column(db.Text) value = db.Column(db.Text) __table_args__ = ( - db.Index('result_data_idx_key_value', 'key', 'value', - postgresql_ops={'key': 'text_pattern_ops', 'value': 'text_pattern_ops'}, - ), - db.Index('result_data_fk_result_id', 'result_id'), + db.Index( + "result_data_idx_key_value", + "key", + "value", + postgresql_ops={"key": "text_pattern_ops", "value": "text_pattern_ops"}, + ), + db.Index("result_data_fk_result_id", "result_id"), ) def __init__(self, result, key, value): diff --git a/resultsdb/parsers/api_v2.py b/resultsdb/parsers/api_v2.py index 61bdda6..38ee966 100644 --- a/resultsdb/parsers/api_v2.py +++ b/resultsdb/parsers/api_v2.py @@ -15,7 +15,7 @@ def parse_since(since): since_start = None since_end = None - s = since.split(',') + s = since.split(",") since_start = iso8601.parse_date(s[0]) try: since_start = since_start.replace(tzinfo=None) # we need to strip timezone info @@ -46,7 +46,7 @@ class BaseListParams(BaseModel): class GroupsParams(BaseListParams): uuid: Optional[str] description: Optional[str] - description_like_: Optional[str] = Field(alias='description:like') + description_like_: Optional[str] = Field(alias="description:like") class CreateGroupParams(BaseModel): @@ -63,34 +63,30 @@ def __get_validators__(cls): @classmethod def validate(cls, v): if isinstance(v, str): - return cls([ - x for x in (x.strip() for x in v.split(',')) if x - ]) + return cls([x for x in (x.strip() for x in v.split(",")) if x]) if isinstance(v, list) and len(v) == 1 and isinstance(v[0], str): - return cls([ - x for x in (x.strip() for x in v[0].split(',')) if x - ]) + return cls([x for x in (x.strip() for x in v[0].split(",")) if x]) return cls(v) class ResultsParams(BaseListParams): - sort_: str = Field(alias='_sort', default='') - since: dict = {'start': None, 'end': None} + sort_: str = Field(alias="_sort", default="") + since: dict = {"start": None, "end": None} outcome: Optional[QueryList] groups: Optional[QueryList] testcases: Optional[QueryList] - testcases_like_: Optional[QueryList] = Field(alias='testcases:like') - distinct_on_: Optional[QueryList] = Field(alias='_distinct_on') + testcases_like_: Optional[QueryList] = Field(alias="testcases:like") + distinct_on_: Optional[QueryList] = Field(alias="_distinct_on") - @validator('since', pre=True) + @validator("since", pre=True) def parse_since(cls, v): try: s, e = parse_since(v[0]) except iso8601.iso8601.ParseError: - raise ValueError('must be in ISO8601 format') - return {'start': s, 'end': e} + raise ValueError("must be in ISO8601 format") + return {"start": s, "end": e} - @validator('outcome') + @validator("outcome") def outcome_must_be_valid(cls, v): outcomes = [x.upper() for x in v] if any(x not in RESULT_OUTCOME for x in outcomes): @@ -107,15 +103,15 @@ class CreateResultParams(BaseModel): ref_url: Optional[str] submit_time: Any - @validator('testcase', pre=True) + @validator("testcase", pre=True) def parse_testcase(cls, v): - if not v or (isinstance(v, dict) and not v.get('name')): - raise ValueError('testcase name must be non-empty') + if not v or (isinstance(v, dict) and not v.get("name")): + raise ValueError("testcase name must be non-empty") if isinstance(v, str): - return {'name': v} + return {"name": v} return v - @validator('submit_time', pre=True) + @validator("submit_time", pre=True) def parse_submit_time(cls, v): if isinstance(v, datetime): return v @@ -124,9 +120,9 @@ def parse_submit_time(cls, v): if isinstance(v, Number): return time_from_milliseconds(v) if isinstance(v, str): - for suffix in ('Z', '', '%z', '+00'): + for suffix in ("Z", "", "%z", "+00"): try: - return datetime.strptime(v, f'%Y-%m-%dT%H:%M:%S.%f{suffix}') + return datetime.strptime(v, f"%Y-%m-%dT%H:%M:%S.%f{suffix}") except ValueError: pass @@ -140,13 +136,13 @@ def parse_submit_time(cls, v): " got %r" % v ) - @validator('testcase') + @validator("testcase") def testcase_must_be_valid(cls, v): - if isinstance(v, dict) and not v.get('name'): + if isinstance(v, dict) and not v.get("name"): raise "" return v - @validator('outcome') + @validator("outcome") def outcome_must_be_valid(cls, v): if v not in RESULT_OUTCOME: raise ValueError(f'must be one of: {", ".join(RESULT_OUTCOME)}') @@ -155,7 +151,7 @@ def outcome_must_be_valid(cls, v): class TestcasesParams(BaseListParams): name: Optional[str] - name_like_: Optional[str] = Field(alias='name:like') + name_like_: Optional[str] = Field(alias="name:like") class CreateTestcaseParams(BaseModel): diff --git a/resultsdb/parsers/api_v3.py b/resultsdb/parsers/api_v3.py index a5a0f25..93845ff 100644 --- a/resultsdb/parsers/api_v3.py +++ b/resultsdb/parsers/api_v3.py @@ -230,12 +230,10 @@ def outcome_must_be_valid(cls, v): @root_validator def only_available_for_error_outcome(cls, values): - if ( - values["error_reason"] is not None or values["issue_url"] is not None - ) and values.get("outcome") != "ERROR": - raise ValueError( - "error_reason and issue_url can be only set for ERROR outcome" - ) + if (values["error_reason"] is not None or values["issue_url"] is not None) and values.get( + "outcome" + ) != "ERROR": + raise ValueError("error_reason and issue_url can be only set for ERROR outcome") return values @classmethod diff --git a/resultsdb/proxy.py b/resultsdb/proxy.py index 78792b3..c28ad4d 100644 --- a/resultsdb/proxy.py +++ b/resultsdb/proxy.py @@ -2,18 +2,18 @@ # Copyright 2009-2014, Red Hat, Inc. # License: GPL-2.0+ -''' +""" Makes fedocal an application behind a reverse proxy and thus ensure the redirects are using ``https``. Original Source: http://flask.pocoo.org/snippets/35/ by Peter Hansen Source: https://github.com/fedora-infra/fedocal/blob/master/fedocal/proxy.py -''' +""" class ReverseProxied(object): - '''Wrap the application in this middleware and configure the + """Wrap the application in this middleware and configure the front-end server to add these headers, to let you quietly bind this to a URL other than / and to an HTTP scheme that is different than what is used locally. @@ -38,24 +38,25 @@ class ReverseProxied(object): } :param app: the WSGI application - ''' + """ def __init__(self, app): self.app = app def __call__(self, environ, start_response): - script_name = environ.get('HTTP_X_SCRIPT_NAME', '') + script_name = environ.get("HTTP_X_SCRIPT_NAME", "") if script_name: - environ['SCRIPT_NAME'] = script_name - path_info = environ['PATH_INFO'] + environ["SCRIPT_NAME"] = script_name + path_info = environ["PATH_INFO"] if path_info.startswith(script_name): - environ['PATH_INFO'] = path_info[len(script_name):] + prefix_len = len(script_name) + environ["PATH_INFO"] = path_info[prefix_len:] - server = environ.get('HTTP_X_FORWARDED_HOST', '') + server = environ.get("HTTP_X_FORWARDED_HOST", "") if server: - environ['HTTP_HOST'] = server + environ["HTTP_HOST"] = server - scheme = environ.get('HTTP_X_FORWARDED_SCHEME', '') + scheme = environ.get("HTTP_X_FORWARDED_SCHEME", "") if scheme: - environ['wsgi.url_scheme'] = scheme + environ["wsgi.url_scheme"] = scheme return self.app(environ, start_response) diff --git a/resultsdb/serializers/__init__.py b/resultsdb/serializers/__init__.py index c10a91e..664eb43 100644 --- a/resultsdb/serializers/__init__.py +++ b/resultsdb/serializers/__init__.py @@ -24,17 +24,17 @@ except NameError: basestring = (str, bytes) + class DBSerialize(object): pass class BaseSerializer(object): - def serialize(self, value, **kwargs): # serialize the database objects # the specific serializer needs to implement serialize_CLASSNAME methods if DBSerialize in value.__class__.__bases__: - return getattr(self, '_serialize_%s' % value.__class__.__name__)(value, **kwargs) + return getattr(self, "_serialize_%s" % value.__class__.__name__)(value, **kwargs) # convert datetimes to the right format if type(value) in (datetime, date): @@ -46,12 +46,12 @@ def serialize(self, value, **kwargs): ret[k] = self.serialize(v, **kwargs) return ret - #in py3 string-like types have __iter__ causing endless loops + # in py3 string-like types have __iter__ causing endless loops if isinstance(value, basestring): return value # convert iterables to list of serialized stuff - if hasattr(value, '__iter__'): + if hasattr(value, "__iter__"): ret = [] for v in value: ret.append(self.serialize(v, **kwargs)) diff --git a/resultsdb/serializers/api_v2.py b/resultsdb/serializers/api_v2.py index c47dece..615c834 100644 --- a/resultsdb/serializers/api_v2.py +++ b/resultsdb/serializers/api_v2.py @@ -22,15 +22,14 @@ class Serializer(BaseSerializer): - def _serialize_Group(self, o, **kwargs): rv = dict( uuid=o.uuid, description=o.description, ref_url=o.ref_url, - results=url_for('api_v2.get_results', groups=[o.uuid], _external=True), + results=url_for("api_v2.get_results", groups=[o.uuid], _external=True), results_count=len(o.results), - href=url_for('api_v2.get_group', group_id=o.uuid, _external=True), + href=url_for("api_v2.get_group", group_id=o.uuid, _external=True), ) return {key: self.serialize(value) for key, value in rv.items()} @@ -39,7 +38,7 @@ def _serialize_Testcase(self, o, **kwargs): rv = dict( name=o.name, ref_url=o.ref_url, - href=url_for('api_v2.get_testcase', testcase_name=o.name, _external=True), + href=url_for("api_v2.get_testcase", testcase_name=o.name, _external=True), ) return {key: self.serialize(value) for key, value in rv.items()} @@ -61,7 +60,7 @@ def _serialize_Result(self, o, **kwargs): note=o.note, ref_url=o.ref_url, data=result_data, - href=url_for('api_v2.get_result', result_id=o.id, _external=True), + href=url_for("api_v2.get_result", result_id=o.id, _external=True), ) return {key: self.serialize(value) for key, value in rv.items()} diff --git a/testing/conftest.py b/testing/conftest.py index 45d6997..195ea35 100644 --- a/testing/conftest.py +++ b/testing/conftest.py @@ -8,20 +8,21 @@ def mock_db(tmpdir_factory): postgres_port = os.getenv("POSTGRES_5432_TCP", None) if postgres_port: - dburi = ( - "postgresql+psycopg2://resultsdb:resultsdb@" - f"localhost:{postgres_port}/resultsdb" - ) + dburi = "postgresql+psycopg2://resultsdb:resultsdb@" f"localhost:{postgres_port}/resultsdb" else: dbfile = tmpdir_factory.mktemp("data").join("test_db.sqlite") dburi = f"sqlite:///{dbfile}" - with patch.dict("resultsdb.app.config", { + with patch.dict( + "resultsdb.app.config", + { "SQLALCHEMY_DATABASE_URI": dburi, "MESSAGE_BUS_PUBLISH": True, "MESSAGE_BUS_PLUGIN": "dummy", - }): + }, + ): import resultsdb + resultsdb.db.drop_all() resultsdb.db.create_all() yield @@ -33,16 +34,17 @@ def pytest_addoption(parser): should be detected and run """ - parser.addoption('-F', '--functional', action='store_true', default=False, - help='Add functional tests') + parser.addoption( + "-F", "--functional", action="store_true", default=False, help="Add functional tests" + ) def pytest_ignore_collect(path, config): """Prevents collection of any files named functest* to speed up non - integration tests""" - if path.fnmatch('*functest*'): + integration tests""" + if path.fnmatch("*functest*"): try: - is_functional = config.getvalue('functional') + is_functional = config.getvalue("functional") except KeyError: return True @@ -53,4 +55,4 @@ def pytest_configure(config): """Called after command line options have been parsed and all plugins and initial conftest files been loaded.""" - os.environ['TEST'] = 'true' + os.environ["TEST"] = "true" diff --git a/testing/functest_api_v20.py b/testing/functest_api_v20.py index 4b94b85..964d1c4 100644 --- a/testing/functest_api_v20.py +++ b/testing/functest_api_v20.py @@ -21,6 +21,7 @@ import datetime import os import copy +from unittest import TestCase import resultsdb import resultsdb.messaging @@ -32,19 +33,28 @@ class AboutTime(object): - def __eq__(self, value): start = (datetime.datetime.utcnow() - datetime.timedelta(seconds=10)).isoformat() stop = (datetime.datetime.utcnow() + datetime.timedelta(seconds=10)).isoformat() return start <= value <= stop -class TestFuncApiV20(): +class TestFuncApiV20(TestCase): + def require_postgres(self): + if os.getenv("NO_CAN_HAS_POSTGRES", None): + self.skipTest("PostgreSQL server not available (disabled with NO_CAN_HAS_POSTGRES)") + + if resultsdb.app.config["SQLALCHEMY_DATABASE_URI"].startswith("sqlite"): + raise RuntimeError( + "This test requires PostgreSQL to work properly. " + "You can disable it by setting NO_CAN_HAS_POSTGRES " + "env variable to any non-empty value" + ) @classmethod def setup_class(cls): - resultsdb.app.config['MESSAGE_BUS_PUBLISH'] = True - resultsdb.app.config['MESSAGE_BUS_PLUGIN'] = 'dummy' + resultsdb.app.config["MESSAGE_BUS_PUBLISH"] = True + resultsdb.app.config["MESSAGE_BUS_PLUGIN"] = "dummy" def setup_method(self, method): resultsdb.db.drop_all() @@ -58,48 +68,53 @@ def setup_method(self, method): "http://example.com/fedora-ci.koji-build./plans/basic.functional" ) self.ref_testcase = { - 'name': self.ref_testcase_name, - 'ref_url': self.ref_testcase_ref_url, - 'href': self.ref_url_prefix + '/testcases/' + self.ref_testcase_name + "name": self.ref_testcase_name, + "ref_url": self.ref_testcase_ref_url, + "href": self.ref_url_prefix + "/testcases/" + self.ref_testcase_name, } # Group data - self.ref_group_uuid = '3ce5f6d7-ce34-489b-ab61-325ce634eab5' - self.ref_group_description = 'Testing Group' - self.ref_group_ref_url = 'http://example.com/testing.group' + self.ref_group_uuid = "3ce5f6d7-ce34-489b-ab61-325ce634eab5" + self.ref_group_description = "Testing Group" + self.ref_group_ref_url = "http://example.com/testing.group" self.ref_group = { - 'uuid': self.ref_group_uuid, - 'description': self.ref_group_description, - 'ref_url': self.ref_group_ref_url, - 'href': self.ref_url_prefix + '/groups/' + self.ref_group_uuid, - 'results_count': 0, - 'results': self.ref_url_prefix + '/results?groups=' + self.ref_group_uuid, + "uuid": self.ref_group_uuid, + "description": self.ref_group_description, + "ref_url": self.ref_group_ref_url, + "href": self.ref_url_prefix + "/groups/" + self.ref_group_uuid, + "results_count": 0, + "results": self.ref_url_prefix + "/results?groups=" + self.ref_group_uuid, } # Result data self.ref_result_id = 1 - self.ref_result_outcome = 'PASSED' - self.ref_result_note = 'Result Note' - self.ref_result_item = 'perl-Specio-0.25-1.fc26' - self.ref_result_type = 'koji_build' - self.ref_result_arch = 'x86_64' + self.ref_result_outcome = "PASSED" + self.ref_result_note = "Result Note" + self.ref_result_item = "perl-Specio-0.25-1.fc26" + self.ref_result_type = "koji_build" + self.ref_result_arch = "x86_64" self.ref_result_data = { - 'item': self.ref_result_item, - 'type': self.ref_result_type, - 'arch': self.ref_result_arch, - 'moo': ['boo', 'woof'], + "item": self.ref_result_item, + "type": self.ref_result_type, + "arch": self.ref_result_arch, + "moo": ["boo", "woof"], } - self.ref_result_ref_url = 'http://example.com/testing.result' + self.ref_result_ref_url = "http://example.com/testing.result" self.ref_result = { - 'id': self.ref_result_id, - 'groups': [self.ref_group['uuid']], - 'testcase': self.ref_testcase, - 'submit_time': AboutTime(), - 'outcome': self.ref_result_outcome, - 'note': self.ref_result_note, - 'ref_url': self.ref_result_ref_url, - 'data': dict(((key, [value] if isinstance(value, basestring) else value) for key, value in self.ref_result_data.items())), - 'href': self.ref_url_prefix + '/results/1', + "id": self.ref_result_id, + "groups": [self.ref_group["uuid"]], + "testcase": self.ref_testcase, + "submit_time": AboutTime(), + "outcome": self.ref_result_outcome, + "note": self.ref_result_note, + "ref_url": self.ref_result_ref_url, + "data": dict( + ( + (key, [value] if isinstance(value, basestring) else value) + for key, value in self.ref_result_data.items() + ) + ), + "href": self.ref_url_prefix + "/results/1", } def teardown_method(self, method): @@ -113,8 +128,8 @@ def helper_create_testcase(self, name=None, ref_url=None): name = self.ref_testcase_name if ref_url is None: ref_url = self.ref_testcase_ref_url - ref_data = json.dumps({'name': name, 'ref_url': ref_url}) - r = self.app.post('/api/v2.0/testcases', data=ref_data, content_type='application/json') + ref_data = json.dumps({"name": name, "ref_url": ref_url}) + r = self.app.post("/api/v2.0/testcases", data=ref_data, content_type="application/json") data = json.loads(r.data) return r, data @@ -124,33 +139,33 @@ def test_create_testcase(self): assert data == self.ref_testcase def test_create_testcase_missing_data(self): - ref_data = json.dumps({'ref_url': self.ref_testcase_ref_url}) + ref_data = json.dumps({"ref_url": self.ref_testcase_ref_url}) - r = self.app.post('/api/v2.0/testcases', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/testcases", data=ref_data, content_type="application/json") assert r.status_code == 400 assert r.json == { - 'validation_error': { - 'body_params': [{ - 'loc': ['name'], - 'msg': 'field required', - 'type': 'value_error.missing' - }] + "validation_error": { + "body_params": [ + {"loc": ["name"], "msg": "field required", "type": "value_error.missing"} + ] } } def test_create_testcase_empty_name(self): - ref_data = json.dumps({'name': ''}) + ref_data = json.dumps({"name": ""}) - r = self.app.post('/api/v2.0/testcases', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/testcases", data=ref_data, content_type="application/json") assert r.status_code == 400 assert r.json == { - 'validation_error': { - 'body_params': [{ - 'ctx': {'limit_value': 1}, - 'loc': ['name'], - 'msg': 'ensure this value has at least 1 characters', - 'type': 'value_error.any_str.min_length' - }] + "validation_error": { + "body_params": [ + { + "ctx": {"limit_value": 1}, + "loc": ["name"], + "msg": "ensure this value has at least 1 characters", + "type": "value_error.any_str.min_length", + } + ] } } @@ -158,11 +173,11 @@ def test_update_testcase(self): self.test_create_testcase() testcase = copy.copy(self.ref_testcase) - testcase['ref_url'] = 'Updated' + testcase["ref_url"] = "Updated" - ref_data = json.dumps({'name': self.ref_testcase_name, 'ref_url': testcase['ref_url']}) + ref_data = json.dumps({"name": self.ref_testcase_name, "ref_url": testcase["ref_url"]}) - r = self.app.post('/api/v2.0/testcases', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/testcases", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 201 @@ -171,7 +186,7 @@ def test_update_testcase(self): def test_get_testcase(self): self.test_create_testcase() - r = self.app.get('/api/v2.0/testcases/%s' % self.ref_testcase_name) + r = self.app.get("/api/v2.0/testcases/%s" % self.ref_testcase_name) data = json.loads(r.data) @@ -179,45 +194,45 @@ def test_get_testcase(self): assert data == self.ref_testcase def test_get_missing_testcase(self): - r = self.app.get('/api/v2.0/testcases/%s' % self.ref_testcase_name) + r = self.app.get("/api/v2.0/testcases/%s" % self.ref_testcase_name) data = json.loads(r.data) assert r.status_code == 404 - assert data['message'] == "Testcase not found" + assert data["message"] == "Testcase not found" def test_get_testcases(self): - r = self.app.get('/api/v2.0/testcases') + r = self.app.get("/api/v2.0/testcases") data = json.loads(r.data) assert r.status_code == 200 - assert data['data'] == [] + assert data["data"] == [] self.test_create_testcase() - r = self.app.get('/api/v2.0/testcases') + r = self.app.get("/api/v2.0/testcases") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_testcase + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_testcase def test_get_testcases_by_name(self): self.test_create_testcase() - r = self.app.get('/api/v2.0/testcases?name=%s' % self.ref_testcase_name) + r = self.app.get("/api/v2.0/testcases?name=%s" % self.ref_testcase_name) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_testcase + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_testcase - r = self.app.get('/api/v2.0/testcases?name:like=*%s*' % self.ref_testcase_name[1:-1]) + r = self.app.get("/api/v2.0/testcases?name:like=*%s*" % self.ref_testcase_name[1:-1]) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_testcase + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_testcase # =============== GROUPS ================== @@ -228,13 +243,9 @@ def helper_create_group(self, uuid=None, description=None, ref_url=None): description = self.ref_group_description if ref_url is None: ref_url = self.ref_group_ref_url - ref_data = json.dumps({ - 'uuid': uuid, - 'description': description, - 'ref_url': ref_url - }) + ref_data = json.dumps({"uuid": uuid, "description": description, "ref_url": ref_url}) - r = self.app.post('/api/v2.0/groups', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/groups", data=ref_data, content_type="application/json") data = json.loads(r.data) return r, data @@ -246,31 +257,29 @@ def test_create_group(self): def test_create_group_no_data(self): ref_data = json.dumps({}) - r = self.app.post('/api/v2.0/groups', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/groups", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 201 - assert len(data['uuid']) == len(self.ref_group_uuid) - assert data['description'] is None - assert data['ref_url'] is None - assert data['href'] == self.ref_url_prefix + '/groups/' + data['uuid'] - assert data['results_count'] == 0 - assert data['results'] == self.ref_url_prefix + '/results?groups=' + data['uuid'] + assert len(data["uuid"]) == len(self.ref_group_uuid) + assert data["description"] is None + assert data["ref_url"] is None + assert data["href"] == self.ref_url_prefix + "/groups/" + data["uuid"] + assert data["results_count"] == 0 + assert data["results"] == self.ref_url_prefix + "/results?groups=" + data["uuid"] def test_update_group(self): self.test_create_group() - ref_data = json.dumps({ - 'uuid': self.ref_group_uuid, - 'description': 'Changed', - 'ref_url': 'Changed' - }) + ref_data = json.dumps( + {"uuid": self.ref_group_uuid, "description": "Changed", "ref_url": "Changed"} + ) - r = self.app.post('/api/v2.0/groups', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/groups", data=ref_data, content_type="application/json") data = json.loads(r.data) group = copy.copy(self.ref_group) - group['description'] = group['ref_url'] = 'Changed' + group["description"] = group["ref_url"] = "Changed" assert r.status_code == 201 assert data == group @@ -278,77 +287,78 @@ def test_update_group(self): def test_get_group(self): self.test_create_group() - r = self.app.get('/api/v2.0/groups/%s' % self.ref_group_uuid) + r = self.app.get("/api/v2.0/groups/%s" % self.ref_group_uuid) data = json.loads(r.data) assert r.status_code == 200 assert data == self.ref_group def test_get_missing_group(self): - r = self.app.get('/api/v2.0/groups/missing') + r = self.app.get("/api/v2.0/groups/missing") data = json.loads(r.data) assert r.status_code == 404 - assert data['message'] == 'Group not found' + assert data["message"] == "Group not found" def test_get_groups(self): - r = self.app.get('/api/v2.0/groups') + r = self.app.get("/api/v2.0/groups") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 0 + assert len(data["data"]) == 0 self.test_create_group() - r = self.app.get('/api/v2.0/groups') + r = self.app.get("/api/v2.0/groups") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_group + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_group def test_get_groups_by_description(self): self.test_create_group() - r = self.app.get('/api/v2.0/groups?description=%s' % self.ref_group_description) + r = self.app.get("/api/v2.0/groups?description=%s" % self.ref_group_description) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_group + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_group - r = self.app.get('/api/v2.0/groups?description:like=*%s*' % - self.ref_group_description[1:-1]) + r = self.app.get( + "/api/v2.0/groups?description:like=*%s*" % self.ref_group_description[1:-1] + ) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_group + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_group def test_get_groups_by_more_descriptions(self): r, data = self.helper_create_group(uuid="1", description="FooBar") r, data = self.helper_create_group(uuid="2", description="BarFoo") - r = self.app.get('/api/v2.0/groups?description=FooBar,BarFoo') + r = self.app.get("/api/v2.0/groups?description=FooBar,BarFoo") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 2 + assert len(data["data"]) == 2 - r = self.app.get('/api/v2.0/groups?description:like=*oo*,*ar*') + r = self.app.get("/api/v2.0/groups?description:like=*oo*,*ar*") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 2 + assert len(data["data"]) == 2 def test_get_groups_by_more_uuids(self): r, data = self.helper_create_group(uuid="FooBar") r, data = self.helper_create_group(uuid="BarFoo") - r = self.app.get('/api/v2.0/groups?uuid=FooBar,BarFoo') + r = self.app.get("/api/v2.0/groups?uuid=FooBar,BarFoo") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 2 + assert len(data["data"]) == 2 # =============== RESULTS ================== @@ -362,16 +372,18 @@ def helper_create_result(self, outcome=None, groups=None, testcase=None, data=No if data is None: data = self.ref_result_data - ref_data = json.dumps(dict( - outcome=outcome, - testcase=testcase, - groups=groups, - note=self.ref_result_note, - data=data, - ref_url=self.ref_result_ref_url, - )) + ref_data = json.dumps( + dict( + outcome=outcome, + testcase=testcase, + groups=groups, + note=self.ref_result_note, + data=data, + ref_url=self.ref_result_ref_url, + ) + ) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) return r, data @@ -388,9 +400,9 @@ def test_create_result_custom_outcome(self): self.test_create_group() self.test_create_testcase() ref_result = copy.deepcopy(self.ref_result) - ref_result['outcome'] = 'AMAZING' + ref_result["outcome"] = "AMAZING" - r, data = self.helper_create_result(outcome='AMAZING') + r, data = self.helper_create_result(outcome="AMAZING") assert r.status_code == 201 assert data == ref_result @@ -398,93 +410,95 @@ def test_create_result_custom_outcome(self): def test_create_result_with_testcase_name(self): self.test_create_group() self.test_create_testcase() - testcase_name = self.ref_result['testcase']['name'] + testcase_name = self.ref_result["testcase"]["name"] - r, data = self.helper_create_result(outcome='AMAZING', testcase=testcase_name) + r, data = self.helper_create_result(outcome="AMAZING", testcase=testcase_name) assert r.status_code == 201 - assert data['testcase']['name'] == testcase_name + assert data["testcase"]["name"] == testcase_name def test_create_result_empty_testcase(self): - r = self.app.post('/api/v2.0/results', json={'outcome': 'passed', 'testcase': ''}) + r = self.app.post("/api/v2.0/results", json={"outcome": "passed", "testcase": ""}) data = json.loads(r.data) assert r.status_code == 400 assert data == { - 'validation_error': { - 'body_params': [{ - 'loc': ['testcase'], - 'msg': 'testcase name must be non-empty', - 'type': 'value_error' - }] + "validation_error": { + "body_params": [ + { + "loc": ["testcase"], + "msg": "testcase name must be non-empty", + "type": "value_error", + } + ] } } def test_create_result_empty_testcase_name(self): r = self.app.post( - '/api/v2.0/results', json={'outcome': 'passed', 'testcase': {'name': ''}}) + "/api/v2.0/results", json={"outcome": "passed", "testcase": {"name": ""}} + ) data = json.loads(r.data) assert r.status_code == 400 assert data == { - 'validation_error': { - 'body_params': [{ - 'loc': ['testcase'], - 'msg': 'testcase name must be non-empty', - 'type': 'value_error' - }] + "validation_error": { + "body_params": [ + { + "loc": ["testcase"], + "msg": "testcase name must be non-empty", + "type": "value_error", + } + ] } } def test_create_result_empty_testcase_dict(self): - r = self.app.post( - '/api/v2.0/results', json={'outcome': 'passed', 'testcase': {}}) + r = self.app.post("/api/v2.0/results", json={"outcome": "passed", "testcase": {}}) data = json.loads(r.data) assert r.status_code == 400 assert data == { - 'validation_error': { - 'body_params': [{ - 'loc': ['testcase'], - 'msg': 'testcase name must be non-empty', - 'type': 'value_error' - }] + "validation_error": { + "body_params": [ + { + "loc": ["testcase"], + "msg": "testcase name must be non-empty", + "type": "value_error", + } + ] } } def test_create_result_missing_testcase(self): - r = self.app.post('/api/v2.0/results', json={'outcome': 'passed'}) + r = self.app.post("/api/v2.0/results", json={"outcome": "passed"}) data = json.loads(r.data) assert r.status_code == 400 assert data == { - 'validation_error': { - 'body_params': [{ - 'loc': ['testcase'], - 'msg': 'field required', - 'type': 'value_error.missing' - }] + "validation_error": { + "body_params": [ + {"loc": ["testcase"], "msg": "field required", "type": "value_error.missing"} + ] } } def test_create_result_missing_outcome(self): - ref_data = json.dumps({'testcase': self.ref_testcase}) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + ref_data = json.dumps({"testcase": self.ref_testcase}) + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 400 assert data == { - 'validation_error': { - 'body_params': [{ - 'loc': ['outcome'], - 'msg': 'field required', - 'type': 'value_error.missing' - }] + "validation_error": { + "body_params": [ + {"loc": ["outcome"], "msg": "field required", "type": "value_error.missing"} + ] } } def test_create_result_multiple_groups(self): - uuid2 = '1c26effb-7c07-4d90-9428-86aac053288c' + uuid2 = "1c26effb-7c07-4d90-9428-86aac053288c" self.test_create_group() self.helper_create_group(uuid=uuid2) self.test_create_testcase() @@ -492,231 +506,246 @@ def test_create_result_multiple_groups(self): r, data = self.helper_create_result(groups=[self.ref_group, uuid2]) assert r.status_code == 201 - assert len(data['groups']) == 2 - assert self.ref_group_uuid in ' '.join(data['groups']) - assert uuid2 in ';'.join(data['groups']) + assert len(data["groups"]) == 2 + assert self.ref_group_uuid in " ".join(data["groups"]) + assert uuid2 in ";".join(data["groups"]) ref_result = copy.deepcopy(self.ref_result) - ref_result['groups'] = None - data['groups'] = None + ref_result["groups"] = None + data["groups"] = None assert data == ref_result def test_create_result_group_is_none(self): - ref_data = json.dumps(dict( - outcome=self.ref_result_outcome, - testcase=self.ref_testcase, - groups=None, - )) + ref_data = json.dumps( + dict( + outcome=self.ref_result_outcome, + testcase=self.ref_testcase, + groups=None, + ) + ) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 201 - assert data['groups'] == [] + assert data["groups"] == [] def test_create_result_group_did_not_exist(self): self.helper_create_result(groups=[self.ref_group]) - r = self.app.get('/api/v2.0/groups/%s' % self.ref_group_uuid) + r = self.app.get("/api/v2.0/groups/%s" % self.ref_group_uuid) data = json.loads(r.data) ref_group = copy.deepcopy(self.ref_group) - ref_group['results_count'] = 1 + ref_group["results_count"] = 1 assert r.status_code == 200 assert data == ref_group - uuid2 = '1c26effb-7c07-4d90-9428-86aac053288c' + uuid2 = "1c26effb-7c07-4d90-9428-86aac053288c" self.helper_create_result(groups=[uuid2]) - r = self.app.get('/api/v2.0/groups/%s' % uuid2) + r = self.app.get("/api/v2.0/groups/%s" % uuid2) data = json.loads(r.data) assert r.status_code == 200 - assert data['uuid'] == uuid2 - assert data['description'] is None - assert data['ref_url'] is None + assert data["uuid"] == uuid2 + assert data["description"] is None + assert data["ref_url"] is None def test_create_result_testcase_did_not_exist(self): self.helper_create_result(testcase=self.ref_testcase) - r = self.app.get('/api/v2.0/testcases/%s' % self.ref_testcase_name) + r = self.app.get("/api/v2.0/testcases/%s" % self.ref_testcase_name) data = json.loads(r.data) assert r.status_code == 200 assert data == self.ref_testcase - name2 = self.ref_testcase_name + '.fake' + name2 = self.ref_testcase_name + ".fake" self.helper_create_result(testcase=name2) - r = self.app.get('/api/v2.0/testcases/%s' % name2) + r = self.app.get("/api/v2.0/testcases/%s" % name2) data = json.loads(r.data) assert r.status_code == 200 - assert data['name'] == name2 + assert data["name"] == name2 def test_create_result_invalid_outcome(self): - ref_data = json.dumps({'outcome': 'FAKEOUTCOME', 'testcase': self.ref_testcase}) + ref_data = json.dumps({"outcome": "FAKEOUTCOME", "testcase": self.ref_testcase}) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 400 assert data == { - 'validation_error': { - 'body_params': [{ - 'loc': ['outcome'], - 'msg': 'must be one of: PASSED, INFO, FAILED, NEEDS_INSPECTION, AMAZING', - 'type': 'value_error' - }] + "validation_error": { + "body_params": [ + { + "loc": ["outcome"], + "msg": "must be one of: PASSED, INFO, FAILED, NEEDS_INSPECTION, AMAZING", + "type": "value_error", + } + ] } } def test_create_result_invalid_data(self): - ref_data = json.dumps({ - 'outcome': self.ref_result_outcome, - 'testcase': self.ref_testcase, - 'data': {'validkey': 1, 'invalid:key': 2, 'another:invalid:key': 3}, - }) + ref_data = json.dumps( + { + "outcome": self.ref_result_outcome, + "testcase": self.ref_testcase, + "data": {"validkey": 1, "invalid:key": 2, "another:invalid:key": 3}, + } + ) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 400 - assert data['message'].startswith("Colon not allowed in key name:") + assert data["message"].startswith("Colon not allowed in key name:") def test_create_result_submit_time_as_number(self): - ref_data = json.dumps(dict( - outcome=self.ref_result_outcome, - testcase=self.ref_testcase, - submit_time=1661324097123, - )) + ref_data = json.dumps( + dict( + outcome=self.ref_result_outcome, + testcase=self.ref_testcase, + submit_time=1661324097123, + ) + ) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 201, data - assert data['submit_time'] == '2022-08-24T06:54:57.123000' + assert data["submit_time"] == "2022-08-24T06:54:57.123000" def test_create_result_submit_time_as_number_string(self): - ref_data = json.dumps(dict( - outcome=self.ref_result_outcome, - testcase=self.ref_testcase, - submit_time="1661324097123", - )) + ref_data = json.dumps( + dict( + outcome=self.ref_result_outcome, + testcase=self.ref_testcase, + submit_time="1661324097123", + ) + ) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 201, data - assert data['submit_time'] == '2022-08-24T06:54:57.123000' + assert data["submit_time"] == "2022-08-24T06:54:57.123000" def test_create_result_submit_time_as_datetime(self): - for suffix in ('', 'Z', '+00:00', '+0000', '+00'): - ref_data = json.dumps(dict( - outcome=self.ref_result_outcome, - testcase=self.ref_testcase, - submit_time=f'2022-08-24T06:54:57.123456{suffix}', - )) + for suffix in ("", "Z", "+00:00", "+0000", "+00"): + ref_data = json.dumps( + dict( + outcome=self.ref_result_outcome, + testcase=self.ref_testcase, + submit_time=f"2022-08-24T06:54:57.123456{suffix}", + ) + ) - r = self.app.post( - '/api/v2.0/results', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 201, data - assert data['submit_time'] == '2022-08-24T06:54:57.123456' + assert data["submit_time"] == "2022-08-24T06:54:57.123456" def test_create_result_submit_time_as_invalid(self): - ref_data = json.dumps(dict( - outcome=self.ref_result_outcome, - testcase=self.ref_testcase, - submit_time='now', - )) + ref_data = json.dumps( + dict( + outcome=self.ref_result_outcome, + testcase=self.ref_testcase, + submit_time="now", + ) + ) - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) assert r.status_code == 400, data assert data == { "validation_error": { - "body_params": [{ - "loc": ["submit_time"], - "msg": ( - "Expected timestamp in milliseconds or datetime" - " (in format YYYY-MM-DDTHH:MM:SS.ffffff), got 'now'" - ), - "type": "value_error" - }] + "body_params": [ + { + "loc": ["submit_time"], + "msg": ( + "Expected timestamp in milliseconds or datetime" + " (in format YYYY-MM-DDTHH:MM:SS.ffffff), got 'now'" + ), + "type": "value_error", + } + ] } } def test_get_result(self): self.test_create_result() - r = self.app.get('/api/v2.0/results/%d' % self.ref_result_id) + r = self.app.get("/api/v2.0/results/%d" % self.ref_result_id) data = json.loads(r.data) assert r.status_code == 200 assert data == self.ref_result def test_get_missing_result(self): - r = self.app.get('/api/v2.0/results/%d' % self.ref_result_id) + r = self.app.get("/api/v2.0/results/%d" % self.ref_result_id) data = json.loads(r.data) assert r.status_code == 404 - assert data['message'] == "Result not found" + assert data["message"] == "Result not found" def test_get_results(self): - r = self.app.get('/api/v2.0/results') + r = self.app.get("/api/v2.0/results") data = json.loads(r.data) assert r.status_code == 200 - assert data['data'] == [] + assert data["data"] == [] self.test_create_result() - r = self.app.get('/api/v2.0/results') + r = self.app.get("/api/v2.0/results") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result def test_get_results_sorted_by_submit_time_desc_by_default(self): r1 = self.helper_create_result() r2 = self.helper_create_result() - r = self.app.get('/api/v2.0/results') + r = self.app.get("/api/v2.0/results") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 2 + assert len(data["data"]) == 2 - assert data['data'][0]['id']== r2[1]['id'] - assert data['data'][1]['id'] == r1[1]['id'] + assert data["data"][0]["id"] == r2[1]["id"] + assert data["data"][1]["id"] == r1[1]["id"] def test_get_results_by_group(self): - uuid2 = '1c26effb-7c07-4d90-9428-86aac053288c' + uuid2 = "1c26effb-7c07-4d90-9428-86aac053288c" self.helper_create_group(uuid=uuid2) self.test_create_result() self.helper_create_result(groups=[uuid2]) - r1 = self.app.get('/api/v2.0/groups/%s/results' % self.ref_group_uuid) - r2 = self.app.get('/api/v2.0/results?groups=%s' % self.ref_group_uuid) + r1 = self.app.get("/api/v2.0/groups/%s/results" % self.ref_group_uuid) + r2 = self.app.get("/api/v2.0/results?groups=%s" % self.ref_group_uuid) data1 = json.loads(r1.data) data2 = json.loads(r2.data) assert r1.status_code == 200, r1.text assert r2.status_code == 200, r2.text - assert len(data1['data']) == len(data2['data']) == 1 + assert len(data1["data"]) == len(data2["data"]) == 1 assert data1 == data2 - assert data1['data'][0] == self.ref_result + assert data1["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?groups=%s,%s' % (self.ref_group_uuid, uuid2)) + r = self.app.get("/api/v2.0/results?groups=%s,%s" % (self.ref_group_uuid, uuid2)) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 2 + assert len(data["data"]) == 2 def test_get_results_by_testcase(self): name2 = self.ref_testcase_name + ".fake" @@ -725,22 +754,22 @@ def test_get_results_by_testcase(self): self.test_create_result() self.helper_create_result(testcase=name2) - r1 = self.app.get('/api/v2.0/testcases/%s/results' % self.ref_testcase_name) - r2 = self.app.get('/api/v2.0/results?testcases=%s' % self.ref_testcase_name) + r1 = self.app.get("/api/v2.0/testcases/%s/results" % self.ref_testcase_name) + r2 = self.app.get("/api/v2.0/results?testcases=%s" % self.ref_testcase_name) data1 = json.loads(r1.data) data2 = json.loads(r2.data) assert r1.status_code == 200, r1.text assert r2.status_code == 200, r2.text - assert data1['data'][0] == self.ref_result - assert data2['data'][0] == self.ref_result + assert data1["data"][0] == self.ref_result + assert data2["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?testcases=%s,%s' % (self.ref_testcase_name, name2)) + r = self.app.get("/api/v2.0/results?testcases=%s,%s" % (self.ref_testcase_name, name2)) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 2 + assert len(data["data"]) == 2 def test_get_results_by_testcase_like(self): name2 = self.ref_testcase_name + ".fake" @@ -749,20 +778,22 @@ def test_get_results_by_testcase_like(self): self.test_create_result() self.helper_create_result(testcase=name2) - r1 = self.app.get('/api/v2.0/testcases/%s/results' % self.ref_testcase_name) - r2 = self.app.get('/api/v2.0/results?testcases:like=%s' % self.ref_testcase_name) + r1 = self.app.get("/api/v2.0/testcases/%s/results" % self.ref_testcase_name) + r2 = self.app.get("/api/v2.0/results?testcases:like=%s" % self.ref_testcase_name) data1 = json.loads(r1.data) data2 = json.loads(r2.data) assert r1.status_code == 200, r1.text assert r2.status_code == 200, r2.text - assert data1['data'][0] == self.ref_result - assert data2['data'][0] == self.ref_result + assert data1["data"][0] == self.ref_result + assert data2["data"][0] == self.ref_result - r1 = self.app.get('/api/v2.0/results?testcases:like=%s*' % (self.ref_testcase_name,)) - r2 = self.app.get('/api/v2.0/results?testcases:like=%s,%s*' % - (self.ref_testcase_name, self.ref_testcase_name)) + r1 = self.app.get("/api/v2.0/results?testcases:like=%s*" % (self.ref_testcase_name,)) + r2 = self.app.get( + "/api/v2.0/results?testcases:like=%s,%s*" + % (self.ref_testcase_name, self.ref_testcase_name) + ) data1 = json.loads(r1.data) data2 = json.loads(r2.data) @@ -772,20 +803,20 @@ def test_get_results_by_testcase_like(self): def test_get_results_by_outcome(self): self.test_create_result() - self.helper_create_result(outcome='FAILED') + self.helper_create_result(outcome="FAILED") - r = self.app.get('/api/v2.0/results?outcome=PASSED') + r = self.app.get("/api/v2.0/results?outcome=PASSED") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?outcome=PASSED,FAILED') + r = self.app.get("/api/v2.0/results?outcome=PASSED,FAILED") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 2 + assert len(data["data"]) == 2 def test_get_results_sorting_by_submit_time(self): name1 = "aa_fake." + self.ref_testcase_name @@ -794,30 +825,30 @@ def test_get_results_sorting_by_submit_time(self): self.test_create_result() self.helper_create_result(testcase=name1) - r1 = self.app.get('/api/v2.0/results?_sort=desc:submit_time') + r1 = self.app.get("/api/v2.0/results?_sort=desc:submit_time") data1 = json.loads(r1.data) assert r1.status_code == 200 - assert len(data1['data']) == 2 + assert len(data1["data"]) == 2 - r2 = self.app.get('/api/v2.0/results?_sort=asc:submit_time') + r2 = self.app.get("/api/v2.0/results?_sort=asc:submit_time") data2 = json.loads(r2.data) assert r2.status_code == 200 - assert len(data2['data']) == 2 + assert len(data2["data"]) == 2 # Checks if the first result retrieved from a parameterless API call # is the last result of an API call with the '_sort' parameter and vice-versa. - assert data1['data'][0]['submit_time'] == data2['data'][1]['submit_time'] - assert data1['data'][1]['submit_time'] == data2['data'][0]['submit_time'] + assert data1["data"][0]["submit_time"] == data2["data"][1]["submit_time"] + assert data1["data"][1]["submit_time"] == data2["data"][0]["submit_time"] # Confirms if the results are in descending order. - assert data1['data'][0]['testcase']['name'] == name1 - assert data1['data'][1]['testcase']['name'] == self.ref_testcase_name + assert data1["data"][0]["testcase"]["name"] == name1 + assert data1["data"][1]["testcase"]["name"] == self.ref_testcase_name # Confirms if the results are in ascending order. - assert data2['data'][0]['testcase']['name'] == self.ref_testcase_name - assert data2['data'][1]['testcase']['name'] == name1 + assert data2["data"][0]["testcase"]["name"] == self.ref_testcase_name + assert data2["data"][1]["testcase"]["name"] == name1 def test_get_results_by_since(self): self.test_create_result() @@ -825,60 +856,60 @@ def test_get_results_by_since(self): before2 = (datetime.datetime.utcnow() - datetime.timedelta(seconds=99)).isoformat() after = (datetime.datetime.utcnow() + datetime.timedelta(seconds=100)).isoformat() - r = self.app.get('/api/v2.0/results?since=%s' % before1) + r = self.app.get("/api/v2.0/results?since=%s" % before1) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?since=%s,%s' % (before1, after)) + r = self.app.get("/api/v2.0/results?since=%s,%s" % (before1, after)) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?since=%s' % (after)) + r = self.app.get("/api/v2.0/results?since=%s" % (after)) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 0 + assert len(data["data"]) == 0 - r = self.app.get('/api/v2.0/results?since=%s,%s' % (before1, before2)) + r = self.app.get("/api/v2.0/results?since=%s,%s" % (before1, before2)) data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 0 + assert len(data["data"]) == 0 def test_get_results_by_result_data(self): self.test_create_result() - r = self.app.get('/api/v2.0/results?item=perl-Specio-0.25-1.fc26') + r = self.app.get("/api/v2.0/results?item=perl-Specio-0.25-1.fc26") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?item=perl-Specio-0.25-1.fc26&moo=boo,woof') + r = self.app.get("/api/v2.0/results?item=perl-Specio-0.25-1.fc26&moo=boo,woof") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?item=perl-Specio-0.25-1.fc26&moo=boo,fake') + r = self.app.get("/api/v2.0/results?item=perl-Specio-0.25-1.fc26&moo=boo,fake") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?moo:like=*oo*') + r = self.app.get("/api/v2.0/results?moo:like=*oo*") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result - r = self.app.get('/api/v2.0/results?moo:like=*fake*,*oo*') + r = self.app.get("/api/v2.0/results?moo:like=*fake*,*oo*") data = json.loads(r.data) assert r.status_code == 200 - assert len(data['data']) == 1 - assert data['data'][0] == self.ref_result + assert len(data["data"]) == 1 + assert data["data"][0] == self.ref_result def test_get_results_latest(self): self.helper_create_testcase() @@ -886,26 +917,26 @@ def test_get_results_latest(self): self.helper_create_testcase(name=self.ref_testcase_name + ".2") self.helper_create_result(outcome="PASSED") - r = self.app.get('/api/v2.0/results/latest') + r = self.app.get("/api/v2.0/results/latest") data = json.loads(r.data) - assert len(data['data']) == 1 + assert len(data["data"]) == 1 self.helper_create_result(outcome="FAILED") - r = self.app.get('/api/v2.0/results/latest') + r = self.app.get("/api/v2.0/results/latest") data = json.loads(r.data) - assert len(data['data']) == 1 - assert data['data'][0]['outcome'] == 'FAILED' + assert len(data["data"]) == 1 + assert data["data"][0]["outcome"] == "FAILED" self.helper_create_result(testcase=self.ref_testcase_name + ".1") - r = self.app.get('/api/v2.0/results/latest') + r = self.app.get("/api/v2.0/results/latest") data = json.loads(r.data) - assert len(data['data']) == 2 - assert data['data'][0]['testcase']['name'] == self.ref_testcase_name + ".1" - assert data['data'][1]['testcase']['name'] == self.ref_testcase_name - assert data['data'][1]['outcome'] == "FAILED" + assert len(data["data"]) == 2 + assert data["data"][0]["testcase"]["name"] == self.ref_testcase_name + ".1" + assert data["data"][1]["testcase"]["name"] == self.ref_testcase_name + assert data["data"][1]["outcome"] == "FAILED" def test_get_results_latest_modifiers(self): self.helper_create_testcase() @@ -916,102 +947,107 @@ def test_get_results_latest_modifiers(self): self.helper_create_result(outcome="FAILED") self.helper_create_result(testcase=self.ref_testcase_name + ".1", outcome="PASSED") self.helper_create_result( - testcase=self.ref_testcase_name + ".1", - groups=["foobargroup"], - outcome="FAILED" - ) + testcase=self.ref_testcase_name + ".1", groups=["foobargroup"], outcome="FAILED" + ) - r = self.app.get('/api/v2.0/results/latest?testcases=%s' % self.ref_testcase_name) + r = self.app.get("/api/v2.0/results/latest?testcases=%s" % self.ref_testcase_name) data = json.loads(r.data) - assert len(data['data']) == 1 - assert data['data'][0]['testcase']['name'] == self.ref_testcase_name - assert data['data'][0]['outcome'] == "FAILED" + assert len(data["data"]) == 1 + assert data["data"][0]["testcase"]["name"] == self.ref_testcase_name + assert data["data"][0]["outcome"] == "FAILED" - r = self.app.get('/api/v2.0/results/latest?testcases=%s,%s' % ( - self.ref_testcase_name, self.ref_testcase_name + '.1')) + r = self.app.get( + "/api/v2.0/results/latest?testcases=%s,%s" + % (self.ref_testcase_name, self.ref_testcase_name + ".1") + ) data = json.loads(r.data) - assert len(data['data']) == 2 - assert data['data'][0]['testcase']['name'] == self.ref_testcase_name + ".1" - assert data['data'][0]['outcome'] == "FAILED" - assert data['data'][1]['testcase']['name'] == self.ref_testcase_name - assert data['data'][1]['outcome'] == "FAILED" + assert len(data["data"]) == 2 + assert data["data"][0]["testcase"]["name"] == self.ref_testcase_name + ".1" + assert data["data"][0]["outcome"] == "FAILED" + assert data["data"][1]["testcase"]["name"] == self.ref_testcase_name + assert data["data"][1]["outcome"] == "FAILED" - r = self.app.get('/api/v2.0/results/latest?testcases:like=*') + r = self.app.get("/api/v2.0/results/latest?testcases:like=*") data = json.loads(r.data) - assert len(data['data']) == 2 - assert data['data'][0]['testcase']['name'] == self.ref_testcase_name + ".1" - assert data['data'][0]['outcome'] == "FAILED" - assert data['data'][1]['testcase']['name'] == self.ref_testcase_name - assert data['data'][1]['outcome'] == "FAILED" + assert len(data["data"]) == 2 + assert data["data"][0]["testcase"]["name"] == self.ref_testcase_name + ".1" + assert data["data"][0]["outcome"] == "FAILED" + assert data["data"][1]["testcase"]["name"] == self.ref_testcase_name + assert data["data"][1]["outcome"] == "FAILED" - r = self.app.get('/api/v2.0/results/latest?groups=%s' % self.ref_group_uuid) + r = self.app.get("/api/v2.0/results/latest?groups=%s" % self.ref_group_uuid) data = json.loads(r.data) - assert len(data['data']) == 2 - assert data['data'][0]['testcase']['name'] == self.ref_testcase_name + ".1" - assert data['data'][0]['outcome'] == "PASSED" - assert data['data'][1]['testcase']['name'] == self.ref_testcase_name - assert data['data'][1]['outcome'] == "FAILED" + assert len(data["data"]) == 2 + assert data["data"][0]["testcase"]["name"] == self.ref_testcase_name + ".1" + assert data["data"][0]["outcome"] == "PASSED" + assert data["data"][1]["testcase"]["name"] == self.ref_testcase_name + assert data["data"][1]["outcome"] == "FAILED" def test_get_results_latest_distinct_on(self): """This test requires PostgreSQL, because DISTINCT ON does work differently in SQLite""" - if os.getenv('NO_CAN_HAS_POSTGRES', None): - return - if resultsdb.app.config['SQLALCHEMY_DATABASE_URI'].startswith('sqlite'): - raise Exception("This test requires PostgreSQL to work properly. You can disable it by setting NO_CAN_HAS_POSTGRES env variable to any non-empty value") + self.require_postgres() self.helper_create_testcase() - self.helper_create_result(outcome="PASSED", data={'scenario': 'scenario1'}, testcase=self.ref_testcase_name) - self.helper_create_result(outcome="FAILED", data={'scenario': 'scenario2'}, testcase=self.ref_testcase_name) + self.helper_create_result( + outcome="PASSED", data={"scenario": "scenario1"}, testcase=self.ref_testcase_name + ) + self.helper_create_result( + outcome="FAILED", data={"scenario": "scenario2"}, testcase=self.ref_testcase_name + ) - r = self.app.get('/api/v2.0/results/latest?testcases=' + self.ref_testcase_name + '&_distinct_on=scenario') + r = self.app.get( + "/api/v2.0/results/latest?testcases=" + + self.ref_testcase_name + + "&_distinct_on=scenario" + ) data = json.loads(r.data) - assert len(data['data']) == 2 - assert data['data'][0]['data']['scenario'][0] == 'scenario2' - assert data['data'][1]['data']['scenario'][0] == 'scenario1' + assert len(data["data"]) == 2 + assert data["data"][0]["data"]["scenario"][0] == "scenario2" + assert data["data"][1]["data"]["scenario"][0] == "scenario1" - r = self.app.get('/api/v2.0/results/latest?testcases=' + self.ref_testcase_name) + r = self.app.get("/api/v2.0/results/latest?testcases=" + self.ref_testcase_name) data = json.loads(r.data) - assert len(data['data']) == 1 - assert data['data'][0]['data']['scenario'][0] == 'scenario2' + assert len(data["data"]) == 1 + assert data["data"][0]["data"]["scenario"][0] == "scenario2" def test_get_results_latest_distinct_on_more_specific_cases_1(self): """This test requires PostgreSQL, because DISTINCT ON does work differently in SQLite""" - if os.getenv('NO_CAN_HAS_POSTGRES', None): - return - if resultsdb.app.config['SQLALCHEMY_DATABASE_URI'].startswith('sqlite'): - raise Exception("This test requires PostgreSQL to work properly. You can disable it by setting NO_CAN_HAS_POSTGRES env variable to any non-empty value") + self.require_postgres() - ''' + """ | id | testcase | scenario | |----|----------|----------| | 1 | tc_1 | s_1 | | 2 | tc_2 | s_1 | | 3 | tc_2 | s_2 | | 4 | tc_3 | | - ''' - self.helper_create_result(outcome="PASSED", testcase='tc_1', data={'item': 'grub', 'scenario': 's_1'}) - self.helper_create_result(outcome="PASSED", testcase='tc_2', data={'item': 'grub', 'scenario': 's_1'}) - self.helper_create_result(outcome="PASSED", testcase='tc_2', data={'item': 'grub', 'scenario': 's_2'}) - self.helper_create_result(outcome="PASSED", testcase='tc_3', data={'item': 'grub'}) + """ + self.helper_create_result( + outcome="PASSED", testcase="tc_1", data={"item": "grub", "scenario": "s_1"} + ) + self.helper_create_result( + outcome="PASSED", testcase="tc_2", data={"item": "grub", "scenario": "s_1"} + ) + self.helper_create_result( + outcome="PASSED", testcase="tc_2", data={"item": "grub", "scenario": "s_2"} + ) + self.helper_create_result(outcome="PASSED", testcase="tc_3", data={"item": "grub"}) - r = self.app.get('/api/v2.0/results/latest?item=grub&_distinct_on=scenario') + r = self.app.get("/api/v2.0/results/latest?item=grub&_distinct_on=scenario") data = json.loads(r.data) - assert len(data['data']) == 4 + assert len(data["data"]) == 4 def test_get_results_latest_distinct_on_more_specific_cases_2(self): """This test requires PostgreSQL, because DISTINCT ON does work differently in SQLite""" - if os.getenv('NO_CAN_HAS_POSTGRES', None): - return - if resultsdb.app.config['SQLALCHEMY_DATABASE_URI'].startswith('sqlite'): - raise Exception("This test requires PostgreSQL to work properly. You can disable it by setting NO_CAN_HAS_POSTGRES env variable to any non-empty value") + self.require_postgres() - ''' + """ | id | testcase | scenario | |----|----------|----------| | 1 | tc_1 | s_1 | @@ -1019,26 +1055,29 @@ def test_get_results_latest_distinct_on_more_specific_cases_2(self): | 3 | tc_2 | s_2 | | 4 | tc_3 | | | 5 | tc_1 | | - ''' - self.helper_create_result(outcome="PASSED", testcase='tc_1', data={'item': 'grub', 'scenario': 's_1'}) - self.helper_create_result(outcome="PASSED", testcase='tc_2', data={'item': 'grub', 'scenario': 's_1'}) - self.helper_create_result(outcome="PASSED", testcase='tc_2', data={'item': 'grub', 'scenario': 's_2'}) - self.helper_create_result(outcome="PASSED", testcase='tc_3', data={'item': 'grub'}) - self.helper_create_result(outcome="FAILED", testcase='tc_1', data={'item': 'grub'}) + """ + self.helper_create_result( + outcome="PASSED", testcase="tc_1", data={"item": "grub", "scenario": "s_1"} + ) + self.helper_create_result( + outcome="PASSED", testcase="tc_2", data={"item": "grub", "scenario": "s_1"} + ) + self.helper_create_result( + outcome="PASSED", testcase="tc_2", data={"item": "grub", "scenario": "s_2"} + ) + self.helper_create_result(outcome="PASSED", testcase="tc_3", data={"item": "grub"}) + self.helper_create_result(outcome="FAILED", testcase="tc_1", data={"item": "grub"}) - r = self.app.get('/api/v2.0/results/latest?item=grub&_distinct_on=scenario') + r = self.app.get("/api/v2.0/results/latest?item=grub&_distinct_on=scenario") data = json.loads(r.data) - assert len(data['data']) == 5 + assert len(data["data"]) == 5 - def test_get_results_latest_distinct_on_more_specific_cases_2(self): + def test_get_results_latest_distinct_on_more_specific_cases_3(self): """This test requires PostgreSQL, because DISTINCT ON does work differently in SQLite""" - if os.getenv('NO_CAN_HAS_POSTGRES', None): - return - if resultsdb.app.config['SQLALCHEMY_DATABASE_URI'].startswith('sqlite'): - raise Exception("This test requires PostgreSQL to work properly. You can disable it by setting NO_CAN_HAS_POSTGRES env variable to any non-empty value") + self.require_postgres() - ''' + """ | id | testcase | scenario | |----|----------|----------| | 1 | tc_1 | s_1 | @@ -1047,76 +1086,85 @@ def test_get_results_latest_distinct_on_more_specific_cases_2(self): | 4 | tc_3 | | | 5 | tc_1 | | | 6 | tc_1 | s_1 | - ''' - self.helper_create_result(outcome="PASSED", testcase='tc_1', data={'item': 'grub', 'scenario': 's_1'}) - self.helper_create_result(outcome="PASSED", testcase='tc_2', data={'item': 'grub', 'scenario': 's_1'}) - self.helper_create_result(outcome="PASSED", testcase='tc_2', data={'item': 'grub', 'scenario': 's_2'}) - self.helper_create_result(outcome="PASSED", testcase='tc_3', data={'item': 'grub'}) - self.helper_create_result(outcome="FAILED", testcase='tc_1', data={'item': 'grub'}) - self.helper_create_result(outcome="INFO", testcase='tc_1', data={'item': 'grub', 'scenario': 's_1'}) + """ + self.helper_create_result( + outcome="PASSED", testcase="tc_1", data={"item": "grub", "scenario": "s_1"} + ) + self.helper_create_result( + outcome="PASSED", testcase="tc_2", data={"item": "grub", "scenario": "s_1"} + ) + self.helper_create_result( + outcome="PASSED", testcase="tc_2", data={"item": "grub", "scenario": "s_2"} + ) + self.helper_create_result(outcome="PASSED", testcase="tc_3", data={"item": "grub"}) + self.helper_create_result(outcome="FAILED", testcase="tc_1", data={"item": "grub"}) + self.helper_create_result( + outcome="INFO", testcase="tc_1", data={"item": "grub", "scenario": "s_1"} + ) - r = self.app.get('/api/v2.0/results/latest?item=grub&_distinct_on=scenario') + r = self.app.get("/api/v2.0/results/latest?item=grub&_distinct_on=scenario") data = json.loads(r.data) - assert len(data['data']) == 5 - tc_1s = [r for r in data['data'] if r['testcase']['name'] == 'tc_1'] + assert len(data["data"]) == 5 + tc_1s = [r for r in data["data"] if r["testcase"]["name"] == "tc_1"] assert len(tc_1s) == 2 - assert tc_1s[0]['outcome'] == 'INFO' - assert tc_1s[1]['outcome'] == 'FAILED' + assert tc_1s[0]["outcome"] == "INFO" + assert tc_1s[1]["outcome"] == "FAILED" def test_get_results_latest_distinct_on_with_scenario_not_defined(self): """This test requires PostgreSQL, because DISTINCT ON does work differently in SQLite""" - if os.getenv('NO_CAN_HAS_POSTGRES', None): - return - if resultsdb.app.config['SQLALCHEMY_DATABASE_URI'].startswith('sqlite'): - raise Exception("This test requires PostgreSQL to work properly. You can disable it by setting NO_CAN_HAS_POSTGRES env variable to any non-empty value") + self.require_postgres() self.helper_create_testcase() self.helper_create_result(outcome="PASSED", testcase=self.ref_testcase_name) self.helper_create_result(outcome="FAILED", testcase=self.ref_testcase_name) - r = self.app.get('/api/v2.0/results/latest?testcases=' + self.ref_testcase_name + '&_distinct_on=scenario') + r = self.app.get( + "/api/v2.0/results/latest?testcases=" + + self.ref_testcase_name + + "&_distinct_on=scenario" + ) data = json.loads(r.data) - assert len(data['data']) == 1 - assert data['data'][0]['outcome'] == 'FAILED' + assert len(data["data"]) == 1 + assert data["data"][0]["outcome"] == "FAILED" def test_get_results_latest_distinct_on_wrong_params(self): - r = self.app.get('/api/v2.0/results/latest?_distinct_on=scenario') + r = self.app.get("/api/v2.0/results/latest?_distinct_on=scenario") data = json.loads(r.data) assert r.status_code == 400 - assert data['message'] == "Please, provide at least one filter beside '_distinct_on'" + assert data["message"] == "Please, provide at least one filter beside '_distinct_on'" def test_message_publication(self): self.helper_create_result() plugin = resultsdb.messaging.DummyPlugin assert len(plugin.history) == 1, plugin.history - assert plugin.history[0]['data']['item'] == [self.ref_result_item] - assert plugin.history[0]['data']['type'] == [self.ref_result_type] - assert plugin.history[0]['id'] == 1 - assert plugin.history[0]['outcome'] == self.ref_result_outcome - assert plugin.history[0]['ref_url'] == self.ref_result_ref_url - assert plugin.history[0]['groups'] == [self.ref_group_uuid] - assert plugin.history[0]['note'] == self.ref_result_note - assert plugin.history[0]['testcase']['name'] == self.ref_testcase_name + assert plugin.history[0]["data"]["item"] == [self.ref_result_item] + assert plugin.history[0]["data"]["type"] == [self.ref_result_type] + assert plugin.history[0]["id"] == 1 + assert plugin.history[0]["outcome"] == self.ref_result_outcome + assert plugin.history[0]["ref_url"] == self.ref_result_ref_url + assert plugin.history[0]["groups"] == [self.ref_group_uuid] + assert plugin.history[0]["note"] == self.ref_result_note + assert plugin.history[0]["testcase"]["name"] == self.ref_testcase_name def test_get_outcomes_on_landing_page(self): - r = self.app.get('/api/v2.0/') + r = self.app.get("/api/v2.0/") data = json.loads(r.data) assert r.status_code == 300 - assert data['outcomes'] == ['PASSED', 'INFO', 'FAILED', 'NEEDS_INSPECTION', 'AMAZING'] + assert data["outcomes"] == ["PASSED", "INFO", "FAILED", "NEEDS_INSPECTION", "AMAZING"] def test_healthcheck_success(self): - r = self.app.get('/api/v2.0/healthcheck') + r = self.app.get("/api/v2.0/healthcheck") assert r.status_code == 200 data = json.loads(r.data) - assert data.get('message') == 'Health check OK' + assert data.get("message") == "Health check OK" def test_healthcheck_fail(self): - resultsdb.db.session.execute('DROP TABLE result CASCADE') - r = self.app.get('/api/v2.0/healthcheck') + resultsdb.db.session.execute("DROP TABLE result CASCADE") + r = self.app.get("/api/v2.0/healthcheck") assert r.status_code == 503 data = json.loads(r.data) - assert data.get('message') == 'Unable to communicate with database' + assert data.get("message") == "Unable to communicate with database" diff --git a/testing/functest_create_fedmsg.py b/testing/functest_create_fedmsg.py index e9afadf..d3947f2 100644 --- a/testing/functest_create_fedmsg.py +++ b/testing/functest_create_fedmsg.py @@ -26,34 +26,31 @@ class MyResultData(object): - def __init__(self, key, value): self.key = key self.value = value class MyResult(object): - def __init__(self, id, testcase_name, outcome, item, item_type, arch): self.id = id self.testcase_name = testcase_name self.outcome = outcome self.data = [ - MyResultData('item', item), - MyResultData('type', item_type), - MyResultData('arch', arch), + MyResultData("item", item), + MyResultData("type", item_type), + MyResultData("arch", arch), ] class AboutTime(object): - def __eq__(self, value): start = (datetime.datetime.utcnow() - datetime.timedelta(seconds=10)).isoformat() stop = (datetime.datetime.utcnow() + datetime.timedelta(seconds=10)).isoformat() return start <= value <= stop -class TestFuncCreateFedmsg(): +class TestFuncCreateFedmsg: def setup_method(self, method): resultsdb.db.session.rollback() resultsdb.db.drop_all() @@ -65,23 +62,29 @@ def setup_method(self, method): self.ref_testcase_name = "scratch.testing.mytestcase" # Group data - self.ref_group_uuid = '3ce5f6d7-ce34-489b-ab61-325ce634eab5' + self.ref_group_uuid = "3ce5f6d7-ce34-489b-ab61-325ce634eab5" # Result data - self.ref_result_outcome = 'PASSED' - self.ref_result_note = 'Result Note' - self.ref_result_item = 'perl-Specio-0.25-1.fc26' - self.ref_result_type = 'koji_build' - self.ref_result_arch = 'x86_64' + self.ref_result_outcome = "PASSED" + self.ref_result_note = "Result Note" + self.ref_result_item = "perl-Specio-0.25-1.fc26" + self.ref_result_type = "koji_build" + self.ref_result_arch = "x86_64" self.ref_result_data = { - 'item': self.ref_result_item, - 'type': self.ref_result_type, - 'arch': self.ref_result_arch, - 'moo': ['boo', 'woof'], + "item": self.ref_result_item, + "type": self.ref_result_type, + "arch": self.ref_result_arch, + "moo": ["boo", "woof"], } - self.ref_result_ref_url = 'http://example.com/testing.result' + self.ref_result_ref_url = "http://example.com/testing.result" self.ref_result_obj = MyResult( - 0, self.ref_testcase_name, self.ref_result_outcome, self.ref_result_item, self.ref_result_type, self.ref_result_arch) + 0, + self.ref_testcase_name, + self.ref_result_outcome, + self.ref_result_item, + self.ref_result_type, + self.ref_result_arch, + ) def teardown_method(self, method): # Reset this for each test. @@ -97,16 +100,18 @@ def helper_create_result(self, outcome=None, groups=None, testcase=None, data=No if data is None: data = self.ref_result_data - ref_data = json.dumps(dict( - outcome=outcome, - testcase=testcase, - groups=groups, - note=self.ref_result_note, - data=data, - ref_url=self.ref_result_ref_url, - )) - - r = self.app.post('/api/v2.0/results', data=ref_data, content_type='application/json') + ref_data = json.dumps( + dict( + outcome=outcome, + testcase=testcase, + groups=groups, + note=self.ref_result_note, + data=data, + ref_url=self.ref_result_ref_url, + ) + ) + + r = self.app.post("/api/v2.0/results", data=ref_data, content_type="application/json") data = json.loads(r.data) return r, data @@ -123,11 +128,11 @@ def test_get_prev_result_exists(self): assert prev_result.outcome == self.ref_result_outcome assert prev_result.testcase_name == self.ref_testcase_name for result_data in prev_result.data: - if result_data.key == 'item': + if result_data.key == "item": assert result_data.value == self.ref_result_item - if result_data.key == 'type': + if result_data.key == "type": assert result_data.value == self.ref_result_type - if result_data.key == 'arch': + if result_data.key == "arch": assert result_data.value == self.ref_result_arch self.helper_create_result() @@ -137,16 +142,16 @@ def test_get_prev_result_exists(self): assert prev_result.outcome == self.ref_result_outcome assert prev_result.testcase_name == self.ref_testcase_name for result_data in prev_result.data: - if result_data.key == 'item': + if result_data.key == "item": assert result_data.value == self.ref_result_item - if result_data.key == 'type': + if result_data.key == "type": assert result_data.value == self.ref_result_type - if result_data.key == 'arch': + if result_data.key == "arch": assert result_data.value == self.ref_result_arch - ref_outcome = 'FAILED' + ref_outcome = "FAILED" if self.ref_result_outcome == ref_outcome: - ref_outcome = 'PASSED' + ref_outcome = "PASSED" self.helper_create_result(outcome=ref_outcome) prev_result = resultsdb.messaging.get_prev_result(self.ref_result_obj) @@ -154,16 +159,16 @@ def test_get_prev_result_exists(self): assert prev_result.outcome == ref_outcome assert prev_result.testcase_name == self.ref_testcase_name for result_data in prev_result.data: - if result_data.key == 'item': + if result_data.key == "item": assert result_data.value == self.ref_result_item - if result_data.key == 'type': + if result_data.key == "type": assert result_data.value == self.ref_result_type - if result_data.key == 'arch': + if result_data.key == "arch": assert result_data.value == self.ref_result_arch def test_get_prev_result_different_item(self): data = copy.deepcopy(self.ref_result_data) - data['item'] = data['item'] + '.fake' + data["item"] = data["item"] + ".fake" self.helper_create_result(data=data) prev_result = resultsdb.messaging.get_prev_result(self.ref_result_obj) @@ -171,7 +176,7 @@ def test_get_prev_result_different_item(self): def test_get_prev_result_different_type(self): data = copy.deepcopy(self.ref_result_data) - data['type'] = data['type'] + '.fake' + data["type"] = data["type"] + ".fake" self.helper_create_result(data=data) prev_result = resultsdb.messaging.get_prev_result(self.ref_result_obj) @@ -179,14 +184,14 @@ def test_get_prev_result_different_type(self): def test_get_prev_result_different_arch(self): data = copy.deepcopy(self.ref_result_data) - data['arch'] = data['arch'] + '.fake' + data["arch"] = data["arch"] + ".fake" self.helper_create_result(data=data) prev_result = resultsdb.messaging.get_prev_result(self.ref_result_obj) assert prev_result is None def test_get_prev_result_different_testcase_name(self): - self.helper_create_result(testcase={'name': self.ref_testcase_name + '.fake'}) + self.helper_create_result(testcase={"name": self.ref_testcase_name + ".fake"}) prev_result = resultsdb.messaging.get_prev_result(self.ref_result_obj) assert prev_result is None diff --git a/testing/test_api_v3.py b/testing/test_api_v3.py index da385c2..0adf0ed 100644 --- a/testing/test_api_v3.py +++ b/testing/test_api_v3.py @@ -18,9 +18,7 @@ def app(): def mock_ldap(): with patch("ldap.initialize") as ldap_init: con = Mock() - con.search_s.return_value = [ - ("ou=Groups,dc=example,dc=com", {"cn": [b"testgroup1"]}) - ] + con.search_s.return_value = [("ou=Groups,dc=example,dc=com", {"cn": [b"testgroup1"]})] ldap_init.return_value = con yield con @@ -244,10 +242,7 @@ def test_api_v3_permission_denied(client, permissions): data = brew_build_request_data() r = client.post("/api/v3/results/brew-builds", json=data) assert r.status_code == 401, r.text - assert ( - "You are not authorized to submit a result for the test case testcase1" - in r.text - ) + assert "You are not authorized to submit a result for the test case testcase1" in r.text def test_api_v3_permission_matches_username(client, permissions): diff --git a/testing/test_general.py b/testing/test_general.py index 9286f52..91693af 100644 --- a/testing/test_general.py +++ b/testing/test_general.py @@ -7,18 +7,16 @@ class MyRequest(object): - def __init__(self, url): self.url = url -class TestPrevNextURL(): - +class TestPrevNextURL: def setup_method(self, method): - self.rq = MyRequest(url='') + self.rq = MyRequest(url="") def test_no_data_no_page_in_url(self, monkeypatch): - monkeypatch.setattr(apiv2, 'request', self.rq) + monkeypatch.setattr(apiv2, "request", self.rq) data, prev, next = apiv2.prev_next_urls([], 1) assert data == [] @@ -26,8 +24,8 @@ def test_no_data_no_page_in_url(self, monkeypatch): assert next is None def test_no_data_page_in_url(self, monkeypatch): - self.rq.url = '?page=0' - monkeypatch.setattr(apiv2, 'request', self.rq) + self.rq.url = "?page=0" + monkeypatch.setattr(apiv2, "request", self.rq) data, prev, next = apiv2.prev_next_urls([], 1) assert data == [] @@ -35,61 +33,60 @@ def test_no_data_page_in_url(self, monkeypatch): assert next is None def test_data_no_page_in_url(self, monkeypatch): - self.rq.url = 'URL' - monkeypatch.setattr(apiv2, 'request', self.rq) + self.rq.url = "URL" + monkeypatch.setattr(apiv2, "request", self.rq) data, prev, next = apiv2.prev_next_urls(list(range(10)), 1) assert data == [0] assert prev is None - assert next == 'URL?page=1' + assert next == "URL?page=1" def test_data_no_page_in_url_stuff_in_url(self, monkeypatch): - self.rq.url = 'URL?stuff=some' - monkeypatch.setattr(apiv2, 'request', self.rq) + self.rq.url = "URL?stuff=some" + monkeypatch.setattr(apiv2, "request", self.rq) data, prev, next = apiv2.prev_next_urls(list(range(10)), 1) assert data == [0] assert prev is None - assert next == 'URL?stuff=some&page=1' + assert next == "URL?stuff=some&page=1" def test_data_page_and_limit_in_url(self, monkeypatch): - self.rq.url = 'URL?page=1&limit=1' - monkeypatch.setattr(apiv2, 'request', self.rq) + self.rq.url = "URL?page=1&limit=1" + monkeypatch.setattr(apiv2, "request", self.rq) data, prev, next = apiv2.prev_next_urls(list(range(10)), 1) assert data == [0] - assert prev == 'URL?page=0&limit=1' - assert next == 'URL?page=2&limit=1' + assert prev == "URL?page=0&limit=1" + assert next == "URL?page=2&limit=1" - self.rq.url = 'URL?limit=1&page=1' - monkeypatch.setattr(apiv2, 'request', self.rq) + self.rq.url = "URL?limit=1&page=1" + monkeypatch.setattr(apiv2, "request", self.rq) data, prev, next = apiv2.prev_next_urls(list(range(10)), 1) assert data == [0] - assert prev == 'URL?limit=1&page=0' - assert next == 'URL?limit=1&page=2' + assert prev == "URL?limit=1&page=0" + assert next == "URL?limit=1&page=2" - self.rq.url = 'URL&page=1&limit=1' - monkeypatch.setattr(apiv2, 'request', self.rq) + self.rq.url = "URL&page=1&limit=1" + monkeypatch.setattr(apiv2, "request", self.rq) data, prev, next = apiv2.prev_next_urls(list(range(10)), 1) assert data == [0] - assert prev == 'URL&page=0&limit=1' - assert next == 'URL&page=2&limit=1' + assert prev == "URL&page=0&limit=1" + assert next == "URL&page=2&limit=1" - self.rq.url = 'URL&limit=1&page=1' - monkeypatch.setattr(apiv2, 'request', self.rq) + self.rq.url = "URL&limit=1&page=1" + monkeypatch.setattr(apiv2, "request", self.rq) data, prev, next = apiv2.prev_next_urls(list(range(10)), 1) assert data == [0] - assert prev == 'URL&limit=1&page=0' - assert next == 'URL&limit=1&page=2' + assert prev == "URL&limit=1&page=0" + assert next == "URL&limit=1&page=2" -class TestParseSince(): - +class TestParseSince: def setup_method(self, method): - self.date_str = '2016-01-01T01:02:03.04' + self.date_str = "2016-01-01T01:02:03.04" self.date_obj = datetime.datetime.strptime(self.date_str, "%Y-%m-%dT%H:%M:%S.%f") def test_parse_start(self): @@ -98,30 +95,30 @@ def test_parse_start(self): assert end is None def test_parse_start_with_timezone_info(self): - start, end = parse_since(self.date_str + 'Z') + start, end = parse_since(self.date_str + "Z") assert start == self.date_obj assert end is None - start, end = parse_since(self.date_str + '+01') + start, end = parse_since(self.date_str + "+01") assert start == self.date_obj assert end is None def test_parse_end(self): - start, end = parse_since(self.date_str + ',' + self.date_str) + start, end = parse_since(self.date_str + "," + self.date_str) assert start == self.date_obj assert end == self.date_obj -class TestMessaging(): - +class TestMessaging: def test_load_plugin(self): - plugin = messaging.load_messaging_plugin('dummy', {}) + plugin = messaging.load_messaging_plugin("dummy", {}) assert isinstance(plugin, messaging.DummyPlugin) try: - plugin = messaging.load_messaging_plugin('fedmsg', {}) + plugin = messaging.load_messaging_plugin("fedmsg", {}) except KeyError as err: if "not found" in str(err): - print("""=============== HINT =============== + print( + """=============== HINT =============== This exception can be caused by the fact, that you did not run `python setup.py develop` before executing the testsuite. @@ -131,7 +128,8 @@ def test_load_plugin(self): If you ran `python setup.py develop` and are still seeing this error, then: - you might me missing the 'fedmsg' entrypoint in setup.py - - there can be an error in the plugin loading code""") + - there can be an error in the plugin loading code""" + ) raise assert isinstance(plugin, messaging.FedmsgPlugin), ( "check whether `fedmsg` entrypoint in setup.py points to" @@ -140,45 +138,44 @@ def test_load_plugin(self): def test_load_stomp(self): message_bus_kwargs = { - 'destination': 'results.new', - 'connection': { - 'host_and_ports': [('localhost', 1234)], + "destination": "results.new", + "connection": { + "host_and_ports": [("localhost", 1234)], }, } - plugin = messaging.load_messaging_plugin('stomp', message_bus_kwargs) + plugin = messaging.load_messaging_plugin("stomp", message_bus_kwargs) assert isinstance(plugin, messaging.StompPlugin) - assert plugin.destination == 'results.new' + assert plugin.destination == "results.new" def test_stomp_ssl(self): message_bus_kwargs = { - 'destination': 'results.new', - 'connection': { - 'host_and_ports': [('localhost', 1234)], - - 'use_ssl': True, - 'ssl_version': ssl.PROTOCOL_TLSv1_2, - 'ssl_key_file': '/etc/secret/umb-client.key', - 'ssl_cert_file': '/etc/secret/umb-client.crt', - 'ssl_ca_certs': '/etc/secret/ca.pem' + "destination": "results.new", + "connection": { + "host_and_ports": [("localhost", 1234)], + "use_ssl": True, + "ssl_version": ssl.PROTOCOL_TLSv1_2, + "ssl_key_file": "/etc/secret/umb-client.key", + "ssl_cert_file": "/etc/secret/umb-client.crt", + "ssl_ca_certs": "/etc/secret/ca.pem", }, } # Run twice to ensure that the original configuration is not modified. for _ in (1, 2): - plugin = messaging.load_messaging_plugin('stomp', message_bus_kwargs) + plugin = messaging.load_messaging_plugin("stomp", message_bus_kwargs) assert plugin.connection == { - 'host_and_ports': [('localhost', 1234)], + "host_and_ports": [("localhost", 1234)], } assert plugin.use_ssl is True assert plugin.ssl_args == { - 'for_hosts': [('localhost', 1234)], - 'key_file': '/etc/secret/umb-client.key', - 'cert_file': '/etc/secret/umb-client.crt', - 'ca_certs': '/etc/secret/ca.pem', - 'ssl_version': ssl.PROTOCOL_TLSv1_2, + "for_hosts": [("localhost", 1234)], + "key_file": "/etc/secret/umb-client.key", + "cert_file": "/etc/secret/umb-client.crt", + "ca_certs": "/etc/secret/ca.pem", + "ssl_version": ssl.PROTOCOL_TLSv1_2, } -class TestGetResultsParseArgs(): +class TestGetResultsParseArgs: # TODO: write something! pass diff --git a/tox.ini b/tox.ini index f89637d..e55121a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py39 +envlist = black,flake8,py39 requires = tox-docker isolated_build = True @@ -39,6 +39,13 @@ commands = mypy -p resultsdb --install-types --non-interactive --ignore-missing-imports --exclude resultsdb/alembic +[testenv:black] +skip_install = true +deps = + black +commands = + black --check --diff --line-length=99 resultsdb testing + [testenv:py39] docker = resultsdb-postgres extras = diff --git a/wsgi.py b/wsgi.py index bc863f7..88b5d4e 100644 --- a/wsgi.py +++ b/wsgi.py @@ -1 +1,3 @@ -from resultsdb import app as application +# SPDX-License-Identifier: GPL-2.0+ +import resultsdb +application = resultsdb.app