Skip to content

Commit

Permalink
CI: Enable black and flake8 checks
Browse files Browse the repository at this point in the history
  • Loading branch information
hluk committed Dec 1, 2022
1 parent 4fe8563 commit e381fc3
Show file tree
Hide file tree
Showing 34 changed files with 1,318 additions and 1,152 deletions.
85 changes: 46 additions & 39 deletions resultsdb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@

# Flask App
app = Flask(__name__)
app.secret_key = 'replace-me-with-something-random'
app.secret_key = "replace-me-with-something-random"

# make sure app behaves when behind a proxy
app.wsgi_app = proxy.ReverseProxied(app.wsgi_app)
Expand All @@ -51,98 +51,101 @@
original_jsonify = flask.jsonify

# Expose the __version__ variable in templates
app.jinja_env.globals['app_version'] = __version__
app.jinja_env.globals["app_version"] = __version__


def jsonify_with_jsonp(*args, **kwargs):
response = original_jsonify(*args, **kwargs)

callback = flask.request.args.get('callback', None)
callback = flask.request.args.get("callback", None)

if callback:
if not isinstance(callback, basestring):
callback = callback[0]
response.mimetype = 'application/javascript'
response.set_data('%s(%s);' % (callback, response.get_data()))
response.mimetype = "application/javascript"
response.set_data("%s(%s);" % (callback, response.get_data()))

return response


flask.jsonify = jsonify_with_jsonp

# Checks for env variable OPENSHIFT_PROD to trigger OpenShift codepath on init
# The main difference is that settings will be queried from env (check config.openshift_config())
# Possible values are:
# "1" - OpenShift production deployment
# "0" - OpenShift testing deployment
openshift = os.getenv('OPENSHIFT_PROD')
openshift = os.getenv("OPENSHIFT_PROD")

# Load default config, then override that with a config file
if os.getenv('DEV') == 'true':
default_config_obj = 'resultsdb.config.DevelopmentConfig'
default_config_file = os.getcwd() + '/conf/settings.py'
elif os.getenv('TEST') == 'true' or openshift == "0":
default_config_obj = 'resultsdb.config.TestingConfig'
default_config_file = ''
if os.getenv("DEV") == "true":
default_config_obj = "resultsdb.config.DevelopmentConfig"
default_config_file = os.getcwd() + "/conf/settings.py"
elif os.getenv("TEST") == "true" or openshift == "0":
default_config_obj = "resultsdb.config.TestingConfig"
default_config_file = ""
else:
default_config_obj = 'resultsdb.config.ProductionConfig'
default_config_file = '/etc/resultsdb/settings.py'
default_config_obj = "resultsdb.config.ProductionConfig"
default_config_file = "/etc/resultsdb/settings.py"

app.config.from_object(default_config_obj)

if openshift:
config.openshift_config(app.config, openshift)

config_file = os.environ.get('RESULTSDB_CONFIG', default_config_file)
config_file = os.environ.get("RESULTSDB_CONFIG", default_config_file)
if os.path.exists(config_file):
app.config.from_pyfile(config_file)

if app.config['PRODUCTION']:
if app.secret_key == 'replace-me-with-something-random':
if app.config["PRODUCTION"]:
if app.secret_key == "replace-me-with-something-random":
raise Warning("You need to change the app.secret_key value for production")


def setup_logging():
# Use LOGGING if defined instead of the old options
log_config = app.config.get('LOGGING')
log_config = app.config.get("LOGGING")
if log_config:
logging_config.dictConfig(log_config)
return

fmt = '[%(filename)s:%(lineno)d] ' if app.debug else '%(module)-12s '
fmt += '%(asctime)s %(levelname)-7s %(message)s'
datefmt = '%Y-%m-%d %H:%M:%S'
fmt = "[%(filename)s:%(lineno)d] " if app.debug else "%(module)-12s "
fmt += "%(asctime)s %(levelname)-7s %(message)s"
datefmt = "%Y-%m-%d %H:%M:%S"
loglevel = logging.DEBUG if app.debug else logging.INFO
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)

root_logger = logging.getLogger('')
root_logger = logging.getLogger("")
root_logger.setLevel(logging.DEBUG)

# Keep the old way to setup logging in settings.py or config.py, example:
# LOGFILE = '/var/log/resultsdb/resultsdb.log'
# FILE_LOGGING = False
# SYSLOG_LOGGING = False
# STREAM_LOGGING = True
if app.config['STREAM_LOGGING']:
if app.config["STREAM_LOGGING"]:
print("doing stream logging")
stream_handler = logging.StreamHandler()
stream_handler.setLevel(loglevel)
stream_handler.setFormatter(formatter)
root_logger.addHandler(stream_handler)
app.logger.addHandler(stream_handler)

if app.config['SYSLOG_LOGGING']:
if app.config["SYSLOG_LOGGING"]:
print("doing syslog logging")
syslog_handler = logging.handlers.SysLogHandler(address='/dev/log',
facility=logging.handlers.SysLogHandler.LOG_LOCAL4)
syslog_handler = logging.handlers.SysLogHandler(
address="/dev/log", facility=logging.handlers.SysLogHandler.LOG_LOCAL4
)
syslog_handler.setLevel(loglevel)
syslog_handler.setFormatter(formatter)
root_logger.addHandler(syslog_handler)
app.logger.addHandler(syslog_handler)

if app.config['FILE_LOGGING'] and app.config['LOGFILE']:
print("doing file logging to %s" % app.config['LOGFILE'])
if app.config["FILE_LOGGING"] and app.config["LOGFILE"]:
print("doing file logging to %s" % app.config["LOGFILE"])
file_handler = logging.handlers.RotatingFileHandler(
app.config['LOGFILE'], maxBytes=500000, backupCount=5)
app.config["LOGFILE"], maxBytes=500000, backupCount=5
)
file_handler.setLevel(loglevel)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
Expand All @@ -151,33 +154,37 @@ def setup_logging():

setup_logging()

if app.config['SHOW_DB_URI']:
app.logger.debug('using DBURI: %s' % app.config['SQLALCHEMY_DATABASE_URI'])
if app.config["SHOW_DB_URI"]:
app.logger.debug("using DBURI: %s" % app.config["SQLALCHEMY_DATABASE_URI"])

db = SQLAlchemy(app)

from resultsdb.controllers.main import main
from resultsdb.controllers.main import main # noqa: E402

app.register_blueprint(main)

from resultsdb.controllers.api_v2 import api as api_v2
from resultsdb.controllers.api_v2 import api as api_v2 # noqa: E402

app.register_blueprint(api_v2, url_prefix="/api/v2.0")

from resultsdb.controllers.api_v3 import api as api_v3, oidc
from resultsdb.controllers.api_v3 import api as api_v3, oidc # noqa: E402

app.register_blueprint(api_v3, url_prefix="/api/v3")

if app.config['AUTH_MODULE'] == 'oidc':
if app.config["AUTH_MODULE"] == "oidc":

@app.route("/auth/oidclogin")
@oidc.require_login
def login():
return {
'username': oidc.user_getfield(app.config["OIDC_USERNAME_FIELD"]),
'token': oidc.get_access_token(),
"username": oidc.user_getfield(app.config["OIDC_USERNAME_FIELD"]),
"token": oidc.get_access_token(),
}

oidc.init_app(app)
app.oidc = oidc
app.logger.info('OpenIDConnect authentication is enabled')
app.logger.info("OpenIDConnect authentication is enabled")
else:
app.logger.info('OpenIDConnect authentication is disabled')
app.logger.info("OpenIDConnect authentication is disabled")

app.logger.debug("Finished ResultsDB initialization")
22 changes: 10 additions & 12 deletions resultsdb/alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,23 @@

# add '.' to the pythonpath to support migration inside development env
import sys
sys.path.append('.')

sys.path.append(".")

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
#fileConfig(config.config_file_name)
# fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
from resultsdb import db

target_metadata = db.metadata
#target_metadata = None
# target_metadata = None

# other values from the config, defined by the needs of env.py,
# can be acquired:
Expand Down Expand Up @@ -56,25 +58,21 @@ def run_migrations_online():

alembic_config = config.get_section(config.config_ini_section)
from resultsdb import app
alembic_config['sqlalchemy.url'] = app.config['SQLALCHEMY_DATABASE_URI']

engine = engine_from_config(
alembic_config,
prefix='sqlalchemy.',
poolclass=pool.NullPool)
alembic_config["sqlalchemy.url"] = app.config["SQLALCHEMY_DATABASE_URI"]

engine = engine_from_config(alembic_config, prefix="sqlalchemy.", poolclass=pool.NullPool)

connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)
context.configure(connection=connection, target_metadata=target_metadata)

try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()


if context.is_offline_mode():
run_migrations_offline()
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
"""

# revision identifiers, used by Alembic.
revision = '153c416322c2'
down_revision = '17ec41bd6e9a'
revision = "153c416322c2"
down_revision = "17ec41bd6e9a"
branch_labels = None
depends_on = None

Expand All @@ -18,15 +18,15 @@

def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('result_fk_job_id', 'result', ['job_id'], unique=False)
op.create_index('result_fk_testcase_id', 'result', ['testcase_id'], unique=False)
op.create_index('result_data_fk_result_id', 'result_data', ['result_id'], unique=False)
op.create_index("result_fk_job_id", "result", ["job_id"], unique=False)
op.create_index("result_fk_testcase_id", "result", ["testcase_id"], unique=False)
op.create_index("result_data_fk_result_id", "result_data", ["result_id"], unique=False)
### end Alembic commands ###


def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('result_data_fk_result_id', table_name='result_data')
op.drop_index('result_fk_testcase_id', table_name='result')
op.drop_index('result_fk_job_id', table_name='result')
op.drop_index("result_data_fk_result_id", table_name="result_data")
op.drop_index("result_fk_testcase_id", table_name="result")
op.drop_index("result_fk_job_id", table_name="result")
### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"""

# revision identifiers, used by Alembic.
revision = '15f5eeb9f635'
revision = "15f5eeb9f635"
down_revision = None
branch_labels = None
depends_on = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
"""

# revision identifiers, used by Alembic.
revision = '17ec41bd6e9a'
down_revision = '433d0b5b3b96'
revision = "17ec41bd6e9a"
down_revision = "433d0b5b3b96"
branch_labels = None
depends_on = None

Expand All @@ -18,11 +18,11 @@

def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('job', sa.Column('uuid', sa.String(length=36), nullable=True))
op.add_column("job", sa.Column("uuid", sa.String(length=36), nullable=True))
### end Alembic commands ###


def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('job', 'uuid')
op.drop_column("job", "uuid")
### end Alembic commands ###
38 changes: 23 additions & 15 deletions resultsdb/alembic/versions/34760e10040b_add_aborted_outcome.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,32 +7,36 @@
"""

# revision identifiers, used by Alembic.
revision = '34760e10040b'
down_revision = '4ace44a44bf'
revision = "34760e10040b"
down_revision = "4ace44a44bf"
branch_labels = None
depends_on = None

from alembic import op
import sqlalchemy as sa

old_values = ('PASSED', 'INFO', 'FAILED', 'ERROR', 'WAIVED', 'NEEDS_INSPECTION')
new_values = ('PASSED', 'INFO', 'FAILED', 'ERROR', 'WAIVED', 'NEEDS_INSPECTION', 'ABORTED')
old_values = ("PASSED", "INFO", "FAILED", "ERROR", "WAIVED", "NEEDS_INSPECTION")
new_values = ("PASSED", "INFO", "FAILED", "ERROR", "WAIVED", "NEEDS_INSPECTION", "ABORTED")

old_enum = sa.Enum(*old_values, name='resultoutcome')
tmp_enum = sa.Enum(*new_values, name='resultoutcome_tmp')
new_enum = sa.Enum(*new_values, name='resultoutcome')
old_enum = sa.Enum(*old_values, name="resultoutcome")
tmp_enum = sa.Enum(*new_values, name="resultoutcome_tmp")
new_enum = sa.Enum(*new_values, name="resultoutcome")


def upgrade():
# this migration is postgresql specific and fails on sqlite
if op.get_bind().engine.url.drivername.startswith("postgresql"):
tmp_enum.create(op.get_bind(), checkfirst=False)
op.execute('ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp '
' USING outcome::text::resultoutcome_tmp')
op.execute(
"ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp "
" USING outcome::text::resultoutcome_tmp"
)
old_enum.drop(op.get_bind(), checkfirst=False)
new_enum.create(op.get_bind(), checkfirst=False)
op.execute('ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome '
' USING outcome::text::resultoutcome')
op.execute(
"ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome "
" USING outcome::text::resultoutcome"
)
tmp_enum.drop(op.get_bind(), checkfirst=False)


Expand All @@ -42,10 +46,14 @@ def downgrade():
op.execute("UPDATE result SET outcome='ERROR' WHERE outcome='ABORTED'")

tmp_enum.create(op.get_bind(), checkfirst=False)
op.execute('ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp '
' USING outcome::text::resultoutcome_tmp')
op.execute(
"ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome_tmp "
" USING outcome::text::resultoutcome_tmp"
)
new_enum.drop(op.get_bind(), checkfirst=False)
old_enum.create(op.get_bind(), checkfirst=False)
op.execute('ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome '
' USING outcome::text::resultoutcome')
op.execute(
"ALTER TABLE result ALTER COLUMN outcome TYPE resultoutcome "
" USING outcome::text::resultoutcome"
)
tmp_enum.drop(op.get_bind(), checkfirst=False)
Loading

0 comments on commit e381fc3

Please sign in to comment.