Skip to content

Commit

Permalink
Add pre-commit (#250)
Browse files Browse the repository at this point in the history
JIRA: RHELWF-10948
  • Loading branch information
hluk committed Apr 25, 2024
1 parent b09f147 commit e6dd6d8
Show file tree
Hide file tree
Showing 72 changed files with 4,726 additions and 4,059 deletions.
8 changes: 2 additions & 6 deletions .github/workflows/gating.yaml
Expand Up @@ -56,8 +56,6 @@ jobs:
strategy:
matrix:
tox_env:
- bandit
- lint
- mypy
- semgrep

Expand Down Expand Up @@ -161,10 +159,8 @@ jobs:

- name: Install docker-compose
run: |
echo "deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_20.04/ /" | sudo tee /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list &&
curl -L "https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_20.04/Release.key" | sudo apt-key add - &&
sudo apt-get -y update &&
sudo apt-get -y -o Dpkg::Options::="--force-overwrite" install podman
python -m ensurepip --upgrade &&
pip install podman-compose
- name: Test Image
env:
Expand Down
80 changes: 80 additions & 0 deletions .pre-commit-config.yaml
@@ -0,0 +1,80 @@
---
ci:
skip:
- hadolint-docker

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-merge-conflict
- id: check-yaml
args:
- --allow-multiple-documents
- --unsafe
- id: debug-statements
- id: end-of-file-fixer
- id: trailing-whitespace

# Sort imports
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
name: isort
args:
- --line-length=79
- --profile=black

# Remove unused imports, variables, statements
- repo: https://github.com/PyCQA/autoflake
rev: v2.3.1
hooks:
- id: autoflake

# Auto-update syntax
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.2
hooks:
- id: pyupgrade
args:
- --py311-plus

# Linter and formatter
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5
hooks:
- id: ruff
args:
# ignore: E501 Line too long
- --ignore=E501
- id: ruff-format

# Linter and formatter
- repo: https://github.com/Instagram/Fixit
rev: v2.1.0
hooks:
- id: fixit-fix

# Type linter
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.9.0
hooks:
- id: mypy
args:
- --disable-error-code=import-untyped
- --ignore-missing-imports

# Security linter
- repo: https://github.com/pycqa/bandit
rev: 1.7.8
hooks:
- id: bandit
name: bandit
exclude: tests/

# Dockerfile linter
- repo: https://github.com/hadolint/hadolint
rev: v2.12.1-beta
hooks:
- id: hadolint-docker
6 changes: 3 additions & 3 deletions README.md
Expand Up @@ -2,9 +2,9 @@

![logo of Greenwave](https://github.com/release-engineering/greenwave/raw/master/logo.png)

Greenwave is a service to decide whether a software artifact can pass certain
gating points in a software delivery pipeline, based on test results stored in
[ResultsDB](https://github.com/release-engineering/resultsdb) and waivers stored in
Greenwave is a service to decide whether a software artifact can pass certain
gating points in a software delivery pipeline, based on test results stored in
[ResultsDB](https://github.com/release-engineering/resultsdb) and waivers stored in
[WaiverDB](https://github.com/release-engineering/waiverdb).

## Documentation
Expand Down
2 changes: 1 addition & 1 deletion conf/policies/fedora.yaml
Expand Up @@ -11,7 +11,7 @@ product_versions:
decision_context: bodhi_update_push_stable
subject_type: koji_build
excluded_packages:
# see the excluded list for dist.abicheck
# see the excluded list for dist.abicheck
# https://infrastructure.fedoraproject.org/cgit/ansible.git/tree/roles/taskotron/taskotron-trigger/templates/trigger_rules.yml.j2#n17
- firefox
- thunderbird
Expand Down
15 changes: 0 additions & 15 deletions conftest.py

This file was deleted.

58 changes: 27 additions & 31 deletions docker/greenwave-settings.py
@@ -1,5 +1,5 @@
SECRET_KEY = 'greenwave'
HOST = '127.0.0.1'
SECRET_KEY = "greenwave" # nosec
HOST = "127.0.0.1"
PORT = 8080
DEBUG = True
POLICIES_DIR = "/etc/greenwave/policies/"
Expand All @@ -17,48 +17,44 @@
}
CACHE = {
# 'backend': 'dogpile.cache.null',
'backend': 'dogpile.cache.pymemcache',
'expiration_time': 1, # 1 is 1 second, keep to see that memcached
# service is working
'arguments': {
'url': 'memcached:11211',
'distributed_lock': True
}
"backend": "dogpile.cache.pymemcache",
"expiration_time": 1, # 1 is 1 second, keep to see that memcached
# service is working
"arguments": {"url": "memcached:11211", "distributed_lock": True},
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'loggers': {
'greenwave': {
'level': 'DEBUG',
"version": 1,
"disable_existing_loggers": False,
"loggers": {
"greenwave": {
"level": "DEBUG",
},
'dogpile.cache': {
'level': 'DEBUG',
"dogpile.cache": {
"level": "DEBUG",
},
"stomp.py": {
"level": "DEBUG",
},
},
'handlers': {
'console': {
'formatter': 'bare',
'class': 'logging.StreamHandler',
'stream': 'ext://sys.stdout',
'level': 'DEBUG',
"handlers": {
"console": {
"formatter": "bare",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
"level": "DEBUG",
},
},
'formatters': {
'bare': {
'format': '[%(asctime)s] [%(process)d] [%(levelname)s] %(name)s: %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
"formatters": {
"bare": {
"format": "[%(asctime)s] [%(process)d] [%(levelname)s] %(name)s: %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
}
},
'root': {
'level': 'INFO',
'handlers': ['console'],
"root": {
"level": "INFO",
"handlers": ["console"],
},
}

OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = 'http://jaeger:4318/v1/traces'
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT = "http://jaeger:4318/v1/traces"
OTEL_EXPORTER_SERVICE_NAME = "greenwave"

10 changes: 6 additions & 4 deletions docker/resultsdb-settings.py
@@ -1,12 +1,14 @@
import os

SECRET_KEY = 'resultsdb'
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://resultsdb:resultsdb@resultsdb-db:5432/resultsdb'
SECRET_KEY = "resultsdb" # nosec
SQLALCHEMY_DATABASE_URI = (
"postgresql+psycopg2://resultsdb:resultsdb@resultsdb-db:5432/resultsdb" # notsecret
)
FILE_LOGGING = False
LOGFILE = '/var/log/resultsdb/resultsdb.log'
LOGFILE = "/var/log/resultsdb/resultsdb.log"
SYSLOG_LOGGING = False
STREAM_LOGGING = True
RUN_HOST = '0.0.0.0'
RUN_HOST = "127.0.0.1"
RUN_PORT = 5001
ADDITIONAL_RESULT_OUTCOMES = ("RUNNING", "QUEUED", "ERROR")

Expand Down
9 changes: 4 additions & 5 deletions docker/scripts/consume.py
@@ -1,9 +1,8 @@
#!/usr/bin/env python3

import itertools
import json
import os
import itertools

from pprint import pprint

from rhmsg.activemq.consumer import AMQConsumer
Expand All @@ -29,7 +28,7 @@ def message_handler(message, data):
if isinstance(body, str):
body = body.encode("utf-8", "backslashreplace")
if data["dump"]:
print("------------- ({0}) {1} --------------".format(num, message.id))
print(f"------------- ({num}) {message.id} --------------")
print("address:", message.address)
print("subject:", message.subject)
print("properties:", message.properties)
Expand All @@ -55,7 +54,7 @@ def message_handler(message, data):


def main():
os.environ['PN_TRACE_FRM'] = '1'
os.environ["PN_TRACE_FRM"] = "1"
consumer = InsecureAMQConsumer(urls=URLS)
consumer.consume(
ADDRESS,
Expand All @@ -72,5 +71,5 @@ def main():
)


if __name__ == '__main__':
if __name__ == "__main__":
main()
27 changes: 10 additions & 17 deletions docker/scripts/produce.py
@@ -1,24 +1,19 @@
#!/usr/bin/env python3

import json
import proton
import os
import sys

import proton
from rhmsg.activemq.producer import AMQProducer

TOPIC = "VirtualTopic.eng.resultsdb.result.new"
URLS = ["amqp://localhost:5671"]
SUBJECT = f"test_message_{sys.argv[1]}"
MESSAGE = {
"submit_time": "2019-08-27T13:57:53.490376",
"testcase": {
"name": "example_test"
},
"data": {
"type": ["brew-build"],
"item": ["example-container"]
}
"testcase": {"name": "example_test"},
"data": {"type": ["brew-build"], "item": ["example-container"]},
}
MESSAGE = {
"data": {
Expand All @@ -33,37 +28,35 @@
"log": ["https://jenkins.example.com/job/x/build/y/console"],
"publisher_id": ["msg-greenwave-segment-test"],
"type": ["koji_build"],
"version": ["3.5.202110051331.w9756"]
"version": ["3.5.202110051331.w9756"],
},
"groups": ["52c6b84b-b617-4b79-af47-8975d11bb635"],
"href": "http://resultsdb/api/v2.0/results/123",
"id": 123,
"id": "123",
"note": "",
"outcome": "PASSED",
"ref_url": "https://jenkins.example.com/job/x/build/y",
"submit_time": "2021-10-05T13:35:29.721850",
"testcase": {
"href": "http://resultsdb/api/v2.0/testcases/dist.abicheck",
"name": "dist.abicheck",
"ref_url": "https://jenkins.example.com/job/x/build/y"
}
"ref_url": "https://jenkins.example.com/job/x/build/y",
},
}


def main():
os.environ['PN_TRACE_FRM'] = '1'
os.environ["PN_TRACE_FRM"] = "1"

with AMQProducer(urls=URLS) as producer:
# Disable SSL
del producer.conf["cert"]

producer.through_topic(TOPIC)
body = json.dumps(MESSAGE)
message = proton.Message(
subject=SUBJECT,
body=body)
message = proton.Message(subject=SUBJECT, body=body)
producer.send(message)


if __name__ == '__main__':
if __name__ == "__main__":
main()
20 changes: 11 additions & 9 deletions docker/waiverdb-settings.py
@@ -1,17 +1,19 @@
import os

DATABASE_URI = 'postgresql+psycopg2://waiverdb:waiverdb@waiverdb-db:5433/waiverdb'
DATABASE_URI = (
"postgresql+psycopg2://waiverdb:waiverdb@waiverdb-db:5433/waiverdb" # notsecret
)

if os.getenv('TEST') == 'true':
DATABASE_URI += '_test'
if os.getenv("TEST") == "true":
DATABASE_URI += "_test"

HOST = '127.0.0.1'
HOST = "127.0.0.1"
PORT = 5004
#AUTH_METHOD = 'OIDC'
AUTH_METHOD = 'dummy'
SUPERUSERS = ['dummy']
#OIDC_CLIENT_SECRETS = '/etc/secret/client_secrets.json'
RESULTSDB_API_URL = 'http://resultsdb:5001/api/v2.0'
# AUTH_METHOD = 'OIDC'
AUTH_METHOD = "dummy"
SUPERUSERS = ["dummy"]
# OIDC_CLIENT_SECRETS = '/etc/secret/client_secrets.json'
RESULTSDB_API_URL = "http://resultsdb:5001/api/v2.0"

MESSAGE_BUS_PUBLISH = os.environ.get("GREENWAVE_LISTENERS", "") not in ("", "0")
MESSAGE_PUBLISHER = "stomp"
Expand Down

0 comments on commit e6dd6d8

Please sign in to comment.