From 7b7a6884cf18a47124cdf70a829a4bc15787dcfa Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 17 Mar 2026 15:08:16 +0100 Subject: [PATCH 001/201] Implement request API --- .gitignore | 2 + .../haproxy_route_policy/asgi.py | 2 +- .../haproxy_route_policy/settings.py | 72 +++++----- .../haproxy_route_policy/test_settings.py | 13 ++ .../haproxy_route_policy/urls.py | 8 +- haproxy-route-policy/policy/__init__.py | 2 + haproxy-route-policy/policy/apps.py | 12 ++ haproxy-route-policy/policy/db_models.py | 85 +++++++++++ .../policy/migrations/0001_initial.py | 28 ++++ .../policy/migrations/__init__.py | 0 haproxy-route-policy/policy/tests/__init__.py | 2 + .../policy/tests/test_models.py | 64 +++++++++ .../policy/tests/test_views.py | 136 ++++++++++++++++++ haproxy-route-policy/policy/urls.py | 21 +++ haproxy-route-policy/policy/views.py | 64 +++++++++ 15 files changed, 474 insertions(+), 37 deletions(-) create mode 100644 haproxy-route-policy/haproxy_route_policy/test_settings.py create mode 100644 haproxy-route-policy/policy/__init__.py create mode 100644 haproxy-route-policy/policy/apps.py create mode 100644 haproxy-route-policy/policy/db_models.py create mode 100644 haproxy-route-policy/policy/migrations/0001_initial.py create mode 100644 haproxy-route-policy/policy/migrations/__init__.py create mode 100644 haproxy-route-policy/policy/tests/__init__.py create mode 100644 haproxy-route-policy/policy/tests/test_models.py create mode 100644 haproxy-route-policy/policy/tests/test_views.py create mode 100644 haproxy-route-policy/policy/urls.py create mode 100644 haproxy-route-policy/policy/views.py diff --git a/.gitignore b/.gitignore index 1fba8d8f1..6d2a4e023 100644 --- a/.gitignore +++ b/.gitignore @@ -29,3 +29,5 @@ __pycache__/ terraform/**/.terraform* terraform/**/.tfvars terraform/**/*.tfstate* +haproxy-route-policy/db.sqlite3 + diff --git a/haproxy-route-policy/haproxy_route_policy/asgi.py b/haproxy-route-policy/haproxy_route_policy/asgi.py index db644c77f..a449fdbd2 100644 --- a/haproxy-route-policy/haproxy_route_policy/asgi.py +++ b/haproxy-route-policy/haproxy_route_policy/asgi.py @@ -14,6 +14,6 @@ from django.core.asgi import get_asgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'haproxy_route_policy.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "haproxy_route_policy.settings") application = get_asgi_application() diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 009893d1e..be22e37d2 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -23,7 +23,7 @@ # See https://docs.djangoproject.com/en/6.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = 'django-insecure-8^cu^zn%=))7@yooq*_w2yz&cs@=)&5g*^72)l)ye6bdyzm3+%' +SECRET_KEY = "django-insecure-8^cu^zn%=))7@yooq*_w2yz&cs@=)&5g*^72)l)ye6bdyzm3+%" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True @@ -34,51 +34,55 @@ # Application definition INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "rest_framework", + "policy.apps.PolicyConfig", ] +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" + MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] -ROOT_URLCONF = 'haproxy_route_policy.urls' +ROOT_URLCONF = "haproxy_route_policy.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], }, }, ] -WSGI_APPLICATION = 'haproxy_route_policy.wsgi.application' +WSGI_APPLICATION = "haproxy_route_policy.wsgi.application" # Database # https://docs.djangoproject.com/en/6.0/ref/settings/#databases DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': BASE_DIR / 'db.sqlite3', + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": BASE_DIR / "db.sqlite3", } } @@ -88,16 +92,16 @@ AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] @@ -105,9 +109,9 @@ # Internationalization # https://docs.djangoproject.com/en/6.0/topics/i18n/ -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" -TIME_ZONE = 'UTC' +TIME_ZONE = "UTC" USE_I18N = True @@ -117,4 +121,4 @@ # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/6.0/howto/static-files/ -STATIC_URL = 'static/' +STATIC_URL = "static/" diff --git a/haproxy-route-policy/haproxy_route_policy/test_settings.py b/haproxy-route-policy/haproxy_route_policy/test_settings.py new file mode 100644 index 000000000..28e02860b --- /dev/null +++ b/haproxy-route-policy/haproxy_route_policy/test_settings.py @@ -0,0 +1,13 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Django settings for running tests with SQLite.""" + +from haproxy_route_policy.settings import * # noqa: F401, F403 + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", + } +} diff --git a/haproxy-route-policy/haproxy_route_policy/urls.py b/haproxy-route-policy/haproxy_route_policy/urls.py index 90851e82c..0fb11c0d9 100644 --- a/haproxy-route-policy/haproxy_route_policy/urls.py +++ b/haproxy-route-policy/haproxy_route_policy/urls.py @@ -17,9 +17,13 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ + from django.contrib import admin -from django.urls import path +from django.urls import include, path + +from policy import urls as policy_urls urlpatterns = [ - path('admin/', admin.site.urls), + path("admin/", admin.site.urls), + path("", include(policy_urls)), ] diff --git a/haproxy-route-policy/policy/__init__.py b/haproxy-route-policy/policy/__init__.py new file mode 100644 index 000000000..fa89e9d7f --- /dev/null +++ b/haproxy-route-policy/policy/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/haproxy-route-policy/policy/apps.py b/haproxy-route-policy/policy/apps.py new file mode 100644 index 000000000..ccc707240 --- /dev/null +++ b/haproxy-route-policy/policy/apps.py @@ -0,0 +1,12 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Django app configuration for the policy app.""" + +from django.apps import AppConfig + + +class PolicyConfig(AppConfig): + """Configuration for the policy Django app.""" + + name = "policy" diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py new file mode 100644 index 000000000..e72551429 --- /dev/null +++ b/haproxy-route-policy/policy/db_models.py @@ -0,0 +1,85 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Database models for the haproxy-route-policy application.""" + +from datetime import datetime +import typing +from django.db import models +from validators import domain +from django.core.exceptions import ValidationError + +REQUEST_STATUS_PENDING = "pending" +REQUEST_STATUS_ACCEPTED = "accepted" +REQUEST_STATUS_REJECTED = "rejected" + +REQUEST_STATUSES = [ + REQUEST_STATUS_PENDING, + REQUEST_STATUS_ACCEPTED, + REQUEST_STATUS_REJECTED, +] + +REQUEST_STATUS_CHOICES = [(status, status) for status in REQUEST_STATUSES] + + +def validate_hostname_acls(value: typing.Any): + """Validate that the value is a list of valid hostnames.""" + if not isinstance(value, list): + raise ValidationError("hostname_acls must be a list.") + if invalid_hostnames := [ + hostname for hostname in typing.cast(list, value) if not domain(hostname) + ]: + raise ValidationError(f"Invalid hostnames: {', '.join(invalid_hostnames)}") + + +class BackendRequest(models.Model): + """A backend request submitted via the haproxy-route relation. + + Attrs: + id: Auto-incrementing primary key. + relation_id: The Juju relation ID this request originated from. + hostname_acls: Hostnames requested for routing. + backend_name: The name of the backend in the HAProxy config. + paths: URL paths requested for routing. + port: The port exposed on the frontend. + status: Current approval status (pending, accepted, rejected). + created_at: Timestamp when the request was created. + updated_at: Timestamp when the request was last updated. + """ + + id = models.BigAutoField(primary_key=True) + relation_id = models.IntegerField() + hostname_acls = models.JSONField(default=list, validators=[validate_hostname_acls]) + backend_name = models.TextField() + paths = models.JSONField(default=list) + port = models.IntegerField(null=True) + status = models.TextField( + choices=REQUEST_STATUS_CHOICES, + default=REQUEST_STATUS_PENDING, + db_index=True, + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + def to_dict(self) -> dict: + """Serialize to a JSON-compatible dict.""" + return { + "id": self.id, + "relation_id": self.relation_id, + "hostname_acls": self.hostname_acls, + "backend_name": self.backend_name, + "paths": self.paths, + "port": self.port, + "status": self.status, + "created_at": typing.cast(datetime, self.created_at).isoformat() + if self.created_at + else None, + "updated_at": typing.cast(datetime, self.updated_at).isoformat() + if self.updated_at + else None, + } + + @classmethod + def required_fields(cls): + """Return a list of fields required for creating a BackendRequest.""" + return ["relation_id", "backend_name", "port"] diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py new file mode 100644 index 000000000..316fa7703 --- /dev/null +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -0,0 +1,28 @@ +# Generated by Django 6.0.3 on 2026-03-16 15:53 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='BackendRequest', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('relation_id', models.IntegerField()), + ('hostname_acls', models.JSONField(default=list)), + ('backend_name', models.TextField()), + ('paths', models.JSONField(default=list)), + ('port', models.IntegerField(null=True)), + ('status', models.TextField(choices=[('pending', 'pending'), ('accepted', 'accepted'), ('rejected', 'rejected')], db_index=True, default='pending')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + ] diff --git a/haproxy-route-policy/policy/migrations/__init__.py b/haproxy-route-policy/policy/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/haproxy-route-policy/policy/tests/__init__.py b/haproxy-route-policy/policy/tests/__init__.py new file mode 100644 index 000000000..fa89e9d7f --- /dev/null +++ b/haproxy-route-policy/policy/tests/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py new file mode 100644 index 000000000..087a1ae0d --- /dev/null +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -0,0 +1,64 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for the BackendRequest model.""" + +from django.test import TestCase + +from policy import db_models + + +class TestBackendRequestModel(TestCase): + """Tests for BackendRequest model creation and serialisation.""" + + def test_create_with_defaults(self): + """Test creating a request with minimal required fields.""" + request = db_models.BackendRequest.objects.create( + relation_id=1, + backend_name="my-backend", + ) + self.assertEqual(request.relation_id, 1) + self.assertEqual(request.backend_name, "my-backend") + self.assertEqual(request.hostname_acls, []) + self.assertEqual(request.paths, []) + self.assertIsNone(request.port) + self.assertEqual(request.status, db_models.REQUEST_STATUS_PENDING) + self.assertIsNotNone(request.created_at) + self.assertIsNotNone(request.updated_at) + + def test_create_with_all_fields(self): + """Test creating a request with all fields specified.""" + request = db_models.BackendRequest.objects.create( + relation_id=5, + hostname_acls=["example.com", "app.example.com"], + backend_name="web-backend", + paths=["/api", "/health"], + port=8080, + status=db_models.REQUEST_STATUS_ACCEPTED, + ) + self.assertEqual(request.relation_id, 5) + self.assertEqual(request.hostname_acls, ["example.com", "app.example.com"]) + self.assertEqual(request.backend_name, "web-backend") + self.assertEqual(request.paths, ["/api", "/health"]) + self.assertEqual(request.port, 8080) + self.assertEqual(request.status, db_models.REQUEST_STATUS_ACCEPTED) + + def test_to_jsonable(self): + """Test serialisation to a JSON-compatible dict.""" + request = db_models.BackendRequest.objects.create( + relation_id=2, + hostname_acls=["host.example.com"], + backend_name="backend-a", + paths=["/v1"], + port=443, + ) + data = request.to_dict() + self.assertEqual(data["id"], request.pk) + self.assertEqual(data["relation_id"], 2) + self.assertEqual(data["hostname_acls"], ["host.example.com"]) + self.assertEqual(data["backend_name"], "backend-a") + self.assertEqual(data["paths"], ["/v1"]) + self.assertEqual(data["port"], 443) + self.assertEqual(data["status"], db_models.REQUEST_STATUS_PENDING) + self.assertIn("created_at", data) + self.assertIn("updated_at", data) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py new file mode 100644 index 000000000..b2875d35f --- /dev/null +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -0,0 +1,136 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Integration tests for the policy REST API views.""" + +from django.test import TestCase +from rest_framework.test import APIClient + +from policy import db_models + + +class TestListCreateRequestsView(TestCase): + """Tests for GET /api/v1/requests and POST /api/v1/requests.""" + + def setUp(self): + """Set up the API client.""" + self.client = APIClient() + + def test_list_empty(self): + """GET returns an empty list when no requests exist.""" + response = self.client.get("/api/v1/requests") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), []) + + def test_list_returns_all(self): + """GET returns all requests.""" + db_models.BackendRequest.objects.create(relation_id=1, backend_name="a") + db_models.BackendRequest.objects.create(relation_id=2, backend_name="b") + response = self.client.get("/api/v1/requests") + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(len(data), 2) + self.assertEqual(data[0]["backend_name"], "a") + self.assertEqual(data[1]["backend_name"], "b") + + def test_list_filter_by_status(self): + """GET with ?status= filters results.""" + db_models.BackendRequest.objects.create( + relation_id=1, backend_name="a", status=db_models.REQUEST_STATUS_PENDING + ) + db_models.BackendRequest.objects.create( + relation_id=2, backend_name="b", status=db_models.REQUEST_STATUS_ACCEPTED + ) + response = self.client.get("/api/v1/requests?status=accepted") + data = response.json() + self.assertEqual(len(data), 1) + self.assertEqual(data[0]["backend_name"], "b") + + def test_bulk_create(self): + """POST creates multiple requests and returns them.""" + payload = [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "paths": ["/api"], + "port": 80, + }, + { + "relation_id": 2, + "backend_name": "backend-2", + }, + ] + response = self.client.post("/api/v1/requests", data=payload, format="json") + self.assertEqual(response.status_code, 201) + data = response.json() + self.assertEqual(len(data), 2) + self.assertEqual(data[0]["backend_name"], "backend-1") + self.assertEqual(data[0]["status"], "pending") + self.assertEqual(data[0]["hostname_acls"], ["example.com"]) + self.assertEqual(data[1]["backend_name"], "backend-2") + self.assertEqual(data[1]["hostname_acls"], []) + self.assertEqual(data[1]["paths"], []) + self.assertIsNone(data[1]["port"]) + self.assertEqual(db_models.BackendRequest.objects.count(), 2) + + def test_bulk_create_all_set_to_pending(self): + """POST always sets status to pending regardless of input.""" + payload = [ + { + "relation_id": 1, + "backend_name": "test", + "status": "accepted", + }, + ] + response = self.client.post("/api/v1/requests", data=payload, format="json") + self.assertEqual(response.status_code, 201) + self.assertEqual(response.json()[0]["status"], "pending") + + def test_bulk_create_rejects_non_list(self): + """POST returns 400 when the body is not a list.""" + response = self.client.post( + "/api/v1/requests", + data={"relation_id": 1, "backend_name": "x"}, + format="json", + ) + self.assertEqual(response.status_code, 400) + + +class TestRequestDetailView(TestCase): + """Tests for GET /api/v1/requests/ and DELETE /api/v1/requests/.""" + + def setUp(self): + """Set up the API client and a sample request.""" + self.client = APIClient() + self.backend_request = db_models.BackendRequest.objects.create( + relation_id=10, + hostname_acls=["host.test"], + backend_name="detail-backend", + port=443, + ) + + def test_get_existing(self): + """GET returns the request matching the given ID.""" + response = self.client.get(f"/api/v1/requests/{self.backend_request.pk}") + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data["id"], self.backend_request.pk) + self.assertEqual(data["backend_name"], "detail-backend") + + def test_get_not_found(self): + """GET returns 404 for a non-existent ID.""" + response = self.client.get("/api/v1/requests/99999") + self.assertEqual(response.status_code, 404) + + def test_delete_existing(self): + """DELETE removes the request and returns 204.""" + pk = self.backend_request.pk + response = self.client.delete(f"/api/v1/requests/{pk}") + self.assertEqual(response.status_code, 204) + self.assertFalse(db_models.BackendRequest.objects.filter(pk=pk).exists()) + + def test_delete_nonexistent(self): + """DELETE on a non-existent ID still returns 204 (idempotent).""" + response = self.client.delete("/api/v1/requests/99999") + self.assertEqual(response.status_code, 204) diff --git a/haproxy-route-policy/policy/urls.py b/haproxy-route-policy/policy/urls.py new file mode 100644 index 000000000..f1580fabb --- /dev/null +++ b/haproxy-route-policy/policy/urls.py @@ -0,0 +1,21 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""URL configuration for the policy app.""" + +from django.urls import path + +from policy import views + +urlpatterns = [ + path( + "api/v1/requests", + views.ListCreateRequestsView.as_view(), + name="api-requests", + ), + path( + "api/v1/requests/", + views.RequestDetailView.as_view(), + name="api-request-detail", + ), +] diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py new file mode 100644 index 000000000..ebee67bcd --- /dev/null +++ b/haproxy-route-policy/policy/views.py @@ -0,0 +1,64 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""REST API views for backend requests.""" + +from django.http import HttpResponse, HttpResponseNotFound, HttpResponseBadRequest, JsonResponse +from rest_framework.views import APIView +from django.core.exceptions import ValidationError +from .db_models import BackendRequest, REQUEST_STATUS_PENDING + + +class ListCreateRequestsView(APIView): + """View for listing and bulk-creating backend requests.""" + + def get(self, request): + """List all requests, optionally filtered by status.""" + status = request.GET.get("status") + queryset = BackendRequest.objects.all() + if status: + queryset = queryset.filter(status=status) + return JsonResponse([r.to_dict() for r in queryset.order_by("id")], safe=False) + + def post(self, request): + """Bulk create backend requests. + + All new requests are set to 'pending' (evaluation logic is deferred). + """ + if not isinstance(request.data, list): + return JsonResponse( + {"error": "Expected a list of request objects."}, status=400 + ) + + created = [] + try: + for item in request.data: + backend_request = BackendRequest.objects.create( + relation_id=item.get("relation_id"), + hostname_acls=item.get("hostname_acls", []), + backend_name=item.get("backend_name"), + paths=item.get("paths", []), + port=item.get("port"), + status=REQUEST_STATUS_PENDING, + ) + created.append(backend_request.to_dict()) + except ValidationError as e: + return HttpResponseBadRequest({"error": str(e)}, status=400) + return JsonResponse(created, safe=False, status=201) + + +class RequestDetailView(APIView): + """View for getting or deleting a single backend request.""" + + def get(self, request, pk): + """Get a request by ID.""" + try: + backend_request = BackendRequest.objects.get(pk=pk) + except BackendRequest.DoesNotExist: + return HttpResponseNotFound() + return JsonResponse(backend_request.to_dict()) + + def delete(self, request, pk): + """Delete a request by ID.""" + BackendRequest.objects.filter(pk=pk).delete() + return HttpResponse(status=204) From 45402502e3a6c5b3e1ea3bf4230b42eec30cc762 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 002/201] update model validation before save and add unit tests --- .github/workflows/test.yaml | 13 +++++++++++++ haproxy-route-policy/policy/db_models.py | 13 ++++++++----- haproxy-route-policy/policy/views.py | 16 +++++++++++++--- 3 files changed, 34 insertions(+), 8 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 7f6390e4f..ac13eacff 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -22,3 +22,16 @@ jobs: self-hosted-runner-image: "noble" working-directory: ${{ matrix.charm.working-directory }} with-uv: true + + haproxy-route-policy: + name: HAProxy-route Policy App Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6.0.1 + - uses: actions/setup-python@v6 + with: + python-version: '3.x' + - working-directory: ./haproxy-route-policy + run: | + pip install -r requirements.txt + python3 ./manage.py test --settings=haproxy_route_policy.test_settings diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index e72551429..01c6ff19f 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -8,6 +8,9 @@ from django.db import models from validators import domain from django.core.exceptions import ValidationError +import logging + +logger = logging.getLogger(__name__) REQUEST_STATUS_PENDING = "pending" REQUEST_STATUS_ACCEPTED = "accepted" @@ -24,6 +27,7 @@ def validate_hostname_acls(value: typing.Any): """Validate that the value is a list of valid hostnames.""" + logger.info("Validating hostname_acls: %s", value) if not isinstance(value, list): raise ValidationError("hostname_acls must be a list.") if invalid_hostnames := [ @@ -41,7 +45,6 @@ class BackendRequest(models.Model): hostname_acls: Hostnames requested for routing. backend_name: The name of the backend in the HAProxy config. paths: URL paths requested for routing. - port: The port exposed on the frontend. status: Current approval status (pending, accepted, rejected). created_at: Timestamp when the request was created. updated_at: Timestamp when the request was last updated. @@ -49,10 +52,11 @@ class BackendRequest(models.Model): id = models.BigAutoField(primary_key=True) relation_id = models.IntegerField() - hostname_acls = models.JSONField(default=list, validators=[validate_hostname_acls]) + hostname_acls = models.JSONField( + default=list, validators=[validate_hostname_acls], blank=True + ) backend_name = models.TextField() - paths = models.JSONField(default=list) - port = models.IntegerField(null=True) + paths = models.JSONField(default=list, blank=True) status = models.TextField( choices=REQUEST_STATUS_CHOICES, default=REQUEST_STATUS_PENDING, @@ -69,7 +73,6 @@ def to_dict(self) -> dict: "hostname_acls": self.hostname_acls, "backend_name": self.backend_name, "paths": self.paths, - "port": self.port, "status": self.status, "created_at": typing.cast(datetime, self.created_at).isoformat() if self.created_at diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index ebee67bcd..fb92139cf 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -3,10 +3,16 @@ """REST API views for backend requests.""" -from django.http import HttpResponse, HttpResponseNotFound, HttpResponseBadRequest, JsonResponse +from django.http import ( + HttpResponse, + HttpResponseNotFound, + HttpResponseBadRequest, + JsonResponse, +) from rest_framework.views import APIView from django.core.exceptions import ValidationError from .db_models import BackendRequest, REQUEST_STATUS_PENDING +from django.db.utils import IntegrityError class ListCreateRequestsView(APIView): @@ -33,7 +39,7 @@ def post(self, request): created = [] try: for item in request.data: - backend_request = BackendRequest.objects.create( + backend_request = BackendRequest( relation_id=item.get("relation_id"), hostname_acls=item.get("hostname_acls", []), backend_name=item.get("backend_name"), @@ -41,9 +47,13 @@ def post(self, request): port=item.get("port"), status=REQUEST_STATUS_PENDING, ) + backend_request.full_clean() + backend_request.save() created.append(backend_request.to_dict()) except ValidationError as e: - return HttpResponseBadRequest({"error": str(e)}, status=400) + return HttpResponseBadRequest(str(e), status=400) + except IntegrityError: + return HttpResponseBadRequest("Invalid request data.", status=400) return JsonResponse(created, safe=False, status=201) From 2111aa17a24c5ee8d896ff16139c8812e8b123f4 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 003/201] use environment variables for secret key --- haproxy-route-policy/haproxy_route_policy/settings.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index be22e37d2..f628c2208 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -14,6 +14,7 @@ """ from pathlib import Path +import os # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent @@ -23,10 +24,10 @@ # See https://docs.djangoproject.com/en/6.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = "django-insecure-8^cu^zn%=))7@yooq*_w2yz&cs@=)&5g*^72)l)ye6bdyzm3+%" +SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = True +DEBUG = os.environ.get("DJANGO_DEBUG", "True") == "True" ALLOWED_HOSTS = [] From b597201bf665e68380df636faa6aa7322234386d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 004/201] ISD-5226 update docs landing pages (#387) * update how-to landing page * Update docs/how-to/index.md Co-authored-by: Erin Conley * add summary for each section * Update docs/how-to/index.md Co-authored-by: Erin Conley * Update docs/how-to/index.md Co-authored-by: Erin Conley * Update docs/how-to/index.md Co-authored-by: Erin Conley --------- Co-authored-by: Erin Conley --- docs/how-to/index.md | 36 ++++++++++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/docs/how-to/index.md b/docs/how-to/index.md index b1fad25c3..f52f90f9b 100644 --- a/docs/how-to/index.md +++ b/docs/how-to/index.md @@ -10,14 +10,42 @@ myst: The following guides cover key processes and common tasks for managing and using the HAProxy charm. +## Common use-cases + +Once you've set up the HAProxy charm, you can take advantage of the built-in features and capabilities to customize the charm based on your specific needs and use case. + ```{toctree} :maxdepth: 1 -Configure high availability -Configure virtual IP on OpenStack Integrate with non-charm workloads Provide extra configurations for ingress requirer charms -Upgrade Protect a hostname using OpenID Connect -Contribute Enable DDoS Protection +Configure high availability +``` + +## Platform-specific workflows + +In some cases additional steps need to be performed on specific substrates to ensure that the charm is working as intended. + +```{toctree} +:maxdepth: 1 +Configure virtual IP on OpenStack +``` + +## Maintenance + +This section contains how-to guides for maintenance actions that you might need to take while operating the charm. + +```{toctree} +:maxdepth: 1 +Upgrade +``` + +## Development + +This section contains how-to guides for developing the charm. + +```{toctree} +:maxdepth: 1 +Contribute ``` From 0f1bbfd4d9269a6ce91ddb434061f08e776af90d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 005/201] ruff format --- .../haproxy_route_policy/wsgi.py | 2 +- haproxy-route-policy/manage.py | 5 ++- .../policy/migrations/0001_initial.py | 43 +++++++++++++------ 3 files changed, 34 insertions(+), 16 deletions(-) diff --git a/haproxy-route-policy/haproxy_route_policy/wsgi.py b/haproxy-route-policy/haproxy_route_policy/wsgi.py index b81d09010..9616b9a61 100644 --- a/haproxy-route-policy/haproxy_route_policy/wsgi.py +++ b/haproxy-route-policy/haproxy_route_policy/wsgi.py @@ -14,6 +14,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'haproxy_route_policy.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "haproxy_route_policy.settings") application = get_wsgi_application() diff --git a/haproxy-route-policy/manage.py b/haproxy-route-policy/manage.py index 36fbcd667..55b10427a 100755 --- a/haproxy-route-policy/manage.py +++ b/haproxy-route-policy/manage.py @@ -4,13 +4,14 @@ # See LICENSE file for licensing details. """Django's command-line utility for administrative tasks.""" + import os import sys def main(): """Run administrative tasks.""" - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'haproxy_route_policy.settings') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "haproxy_route_policy.settings") try: from django.core.management import execute_from_command_line except ImportError as exc: @@ -22,5 +23,5 @@ def main(): execute_from_command_line(sys.argv) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 316fa7703..3c892bc55 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -4,25 +4,42 @@ class Migration(migrations.Migration): - initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='BackendRequest', + name="BackendRequest", fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('relation_id', models.IntegerField()), - ('hostname_acls', models.JSONField(default=list)), - ('backend_name', models.TextField()), - ('paths', models.JSONField(default=list)), - ('port', models.IntegerField(null=True)), - ('status', models.TextField(choices=[('pending', 'pending'), ('accepted', 'accepted'), ('rejected', 'rejected')], db_index=True, default='pending')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("relation_id", models.IntegerField()), + ("hostname_acls", models.JSONField(default=list)), + ("backend_name", models.TextField()), + ("paths", models.JSONField(default=list)), + ("port", models.IntegerField(null=True)), + ( + "status", + models.TextField( + choices=[ + ("pending", "pending"), + ("accepted", "accepted"), + ("rejected", "rejected"), + ], + db_index=True, + default="pending", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] From 2c7bd783e4b1bb42147af8adf095de8f45f16715 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 006/201] add secret key for testing --- haproxy-route-policy/haproxy_route_policy/test_settings.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haproxy-route-policy/haproxy_route_policy/test_settings.py b/haproxy-route-policy/haproxy_route_policy/test_settings.py index 28e02860b..dc1db7f24 100644 --- a/haproxy-route-policy/haproxy_route_policy/test_settings.py +++ b/haproxy-route-policy/haproxy_route_policy/test_settings.py @@ -5,6 +5,9 @@ from haproxy_route_policy.settings import * # noqa: F401, F403 +# Mock secret key for testing. +SECRET_KEY = "test-secret-key" + DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", From cbd051e1fc189fe163d2c522ec0c75bfcf160be9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 007/201] remove port attribute from test --- haproxy-route-policy/policy/migrations/0001_initial.py | 1 - haproxy-route-policy/policy/tests/test_models.py | 5 ----- haproxy-route-policy/policy/tests/test_views.py | 3 --- haproxy-route-policy/policy/views.py | 1 - 4 files changed, 10 deletions(-) diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 3c892bc55..82c6bdf2f 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -25,7 +25,6 @@ class Migration(migrations.Migration): ("hostname_acls", models.JSONField(default=list)), ("backend_name", models.TextField()), ("paths", models.JSONField(default=list)), - ("port", models.IntegerField(null=True)), ( "status", models.TextField( diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 087a1ae0d..a1e475fdb 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -21,7 +21,6 @@ def test_create_with_defaults(self): self.assertEqual(request.backend_name, "my-backend") self.assertEqual(request.hostname_acls, []) self.assertEqual(request.paths, []) - self.assertIsNone(request.port) self.assertEqual(request.status, db_models.REQUEST_STATUS_PENDING) self.assertIsNotNone(request.created_at) self.assertIsNotNone(request.updated_at) @@ -33,14 +32,12 @@ def test_create_with_all_fields(self): hostname_acls=["example.com", "app.example.com"], backend_name="web-backend", paths=["/api", "/health"], - port=8080, status=db_models.REQUEST_STATUS_ACCEPTED, ) self.assertEqual(request.relation_id, 5) self.assertEqual(request.hostname_acls, ["example.com", "app.example.com"]) self.assertEqual(request.backend_name, "web-backend") self.assertEqual(request.paths, ["/api", "/health"]) - self.assertEqual(request.port, 8080) self.assertEqual(request.status, db_models.REQUEST_STATUS_ACCEPTED) def test_to_jsonable(self): @@ -50,7 +47,6 @@ def test_to_jsonable(self): hostname_acls=["host.example.com"], backend_name="backend-a", paths=["/v1"], - port=443, ) data = request.to_dict() self.assertEqual(data["id"], request.pk) @@ -58,7 +54,6 @@ def test_to_jsonable(self): self.assertEqual(data["hostname_acls"], ["host.example.com"]) self.assertEqual(data["backend_name"], "backend-a") self.assertEqual(data["paths"], ["/v1"]) - self.assertEqual(data["port"], 443) self.assertEqual(data["status"], db_models.REQUEST_STATUS_PENDING) self.assertIn("created_at", data) self.assertIn("updated_at", data) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index b2875d35f..ad58deeae 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -54,7 +54,6 @@ def test_bulk_create(self): "hostname_acls": ["example.com"], "backend_name": "backend-1", "paths": ["/api"], - "port": 80, }, { "relation_id": 2, @@ -71,7 +70,6 @@ def test_bulk_create(self): self.assertEqual(data[1]["backend_name"], "backend-2") self.assertEqual(data[1]["hostname_acls"], []) self.assertEqual(data[1]["paths"], []) - self.assertIsNone(data[1]["port"]) self.assertEqual(db_models.BackendRequest.objects.count(), 2) def test_bulk_create_all_set_to_pending(self): @@ -107,7 +105,6 @@ def setUp(self): relation_id=10, hostname_acls=["host.test"], backend_name="detail-backend", - port=443, ) def test_get_existing(self): diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index fb92139cf..548d20517 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -44,7 +44,6 @@ def post(self, request): hostname_acls=item.get("hostname_acls", []), backend_name=item.get("backend_name"), paths=item.get("paths", []), - port=item.get("port"), status=REQUEST_STATUS_PENDING, ) backend_request.full_clean() From eac8f319a3fbab522b27905e8fe823a041f8c49a Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 008/201] add requirements.txt for testing --- haproxy-route-policy/requirements.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 haproxy-route-policy/requirements.txt diff --git a/haproxy-route-policy/requirements.txt b/haproxy-route-policy/requirements.txt new file mode 100644 index 000000000..df9f7bbe8 --- /dev/null +++ b/haproxy-route-policy/requirements.txt @@ -0,0 +1,3 @@ +Django==6.0.3 +djangorestframework==3.16.1 +validators==0.35.0 From 45f1886e0e9a036f7a74cb649fc30731fc30c762 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 009/201] reintroduce port field --- haproxy-route-policy/policy/db_models.py | 3 ++ .../policy/migrations/0001_initial.py | 1 + .../policy/tests/test_models.py | 8 ++++-- .../policy/tests/test_views.py | 28 +++++++++++++++---- haproxy-route-policy/policy/views.py | 1 + 5 files changed, 34 insertions(+), 7 deletions(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 01c6ff19f..119519bca 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -45,6 +45,7 @@ class BackendRequest(models.Model): hostname_acls: Hostnames requested for routing. backend_name: The name of the backend in the HAProxy config. paths: URL paths requested for routing. + port: The frontend port that should be opened by HAProxy. status: Current approval status (pending, accepted, rejected). created_at: Timestamp when the request was created. updated_at: Timestamp when the request was last updated. @@ -57,6 +58,7 @@ class BackendRequest(models.Model): ) backend_name = models.TextField() paths = models.JSONField(default=list, blank=True) + port = models.IntegerField() status = models.TextField( choices=REQUEST_STATUS_CHOICES, default=REQUEST_STATUS_PENDING, @@ -73,6 +75,7 @@ def to_dict(self) -> dict: "hostname_acls": self.hostname_acls, "backend_name": self.backend_name, "paths": self.paths, + "port": self.port, "status": self.status, "created_at": typing.cast(datetime, self.created_at).isoformat() if self.created_at diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 82c6bdf2f..403c762f5 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -25,6 +25,7 @@ class Migration(migrations.Migration): ("hostname_acls", models.JSONField(default=list)), ("backend_name", models.TextField()), ("paths", models.JSONField(default=list)), + ("port", models.IntegerField()), ( "status", models.TextField( diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index a1e475fdb..7d3233715 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -14,14 +14,14 @@ class TestBackendRequestModel(TestCase): def test_create_with_defaults(self): """Test creating a request with minimal required fields.""" request = db_models.BackendRequest.objects.create( - relation_id=1, - backend_name="my-backend", + relation_id=1, backend_name="my-backend", port=443 ) self.assertEqual(request.relation_id, 1) self.assertEqual(request.backend_name, "my-backend") self.assertEqual(request.hostname_acls, []) self.assertEqual(request.paths, []) self.assertEqual(request.status, db_models.REQUEST_STATUS_PENDING) + self.assertEqual(request.port, 443) self.assertIsNotNone(request.created_at) self.assertIsNotNone(request.updated_at) @@ -32,12 +32,14 @@ def test_create_with_all_fields(self): hostname_acls=["example.com", "app.example.com"], backend_name="web-backend", paths=["/api", "/health"], + port=443, status=db_models.REQUEST_STATUS_ACCEPTED, ) self.assertEqual(request.relation_id, 5) self.assertEqual(request.hostname_acls, ["example.com", "app.example.com"]) self.assertEqual(request.backend_name, "web-backend") self.assertEqual(request.paths, ["/api", "/health"]) + self.assertEqual(request.port, 443) self.assertEqual(request.status, db_models.REQUEST_STATUS_ACCEPTED) def test_to_jsonable(self): @@ -47,6 +49,7 @@ def test_to_jsonable(self): hostname_acls=["host.example.com"], backend_name="backend-a", paths=["/v1"], + port=443, ) data = request.to_dict() self.assertEqual(data["id"], request.pk) @@ -54,6 +57,7 @@ def test_to_jsonable(self): self.assertEqual(data["hostname_acls"], ["host.example.com"]) self.assertEqual(data["backend_name"], "backend-a") self.assertEqual(data["paths"], ["/v1"]) + self.assertEqual(data["port"], 443) self.assertEqual(data["status"], db_models.REQUEST_STATUS_PENDING) self.assertIn("created_at", data) self.assertIn("updated_at", data) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index ad58deeae..e230a6d08 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -24,8 +24,12 @@ def test_list_empty(self): def test_list_returns_all(self): """GET returns all requests.""" - db_models.BackendRequest.objects.create(relation_id=1, backend_name="a") - db_models.BackendRequest.objects.create(relation_id=2, backend_name="b") + db_models.BackendRequest.objects.create( + relation_id=1, backend_name="a", port=443 + ) + db_models.BackendRequest.objects.create( + relation_id=2, backend_name="b", port=443 + ) response = self.client.get("/api/v1/requests") self.assertEqual(response.status_code, 200) data = response.json() @@ -36,10 +40,16 @@ def test_list_returns_all(self): def test_list_filter_by_status(self): """GET with ?status= filters results.""" db_models.BackendRequest.objects.create( - relation_id=1, backend_name="a", status=db_models.REQUEST_STATUS_PENDING + relation_id=1, + backend_name="a", + status=db_models.REQUEST_STATUS_PENDING, + port=443, ) db_models.BackendRequest.objects.create( - relation_id=2, backend_name="b", status=db_models.REQUEST_STATUS_ACCEPTED + relation_id=2, + backend_name="b", + status=db_models.REQUEST_STATUS_ACCEPTED, + port=443, ) response = self.client.get("/api/v1/requests?status=accepted") data = response.json() @@ -54,10 +64,12 @@ def test_bulk_create(self): "hostname_acls": ["example.com"], "backend_name": "backend-1", "paths": ["/api"], + "port": 443, }, { "relation_id": 2, "backend_name": "backend-2", + "port": 443, }, ] response = self.client.post("/api/v1/requests", data=payload, format="json") @@ -67,9 +79,12 @@ def test_bulk_create(self): self.assertEqual(data[0]["backend_name"], "backend-1") self.assertEqual(data[0]["status"], "pending") self.assertEqual(data[0]["hostname_acls"], ["example.com"]) + self.assertEqual(data[0]["paths"], second=["/api"]) + self.assertEqual(data[0]["port"], 443) self.assertEqual(data[1]["backend_name"], "backend-2") self.assertEqual(data[1]["hostname_acls"], []) self.assertEqual(data[1]["paths"], []) + self.assertEqual(data[1]["port"], 443) self.assertEqual(db_models.BackendRequest.objects.count(), 2) def test_bulk_create_all_set_to_pending(self): @@ -79,17 +94,19 @@ def test_bulk_create_all_set_to_pending(self): "relation_id": 1, "backend_name": "test", "status": "accepted", + "port": 443, }, ] response = self.client.post("/api/v1/requests", data=payload, format="json") self.assertEqual(response.status_code, 201) self.assertEqual(response.json()[0]["status"], "pending") + self.assertEqual(response.json()[0]["port"], 443) def test_bulk_create_rejects_non_list(self): """POST returns 400 when the body is not a list.""" response = self.client.post( "/api/v1/requests", - data={"relation_id": 1, "backend_name": "x"}, + data={"relation_id": 1, "backend_name": "x", "port": 443}, format="json", ) self.assertEqual(response.status_code, 400) @@ -105,6 +122,7 @@ def setUp(self): relation_id=10, hostname_acls=["host.test"], backend_name="detail-backend", + port=443, ) def test_get_existing(self): diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 548d20517..fb92139cf 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -44,6 +44,7 @@ def post(self, request): hostname_acls=item.get("hostname_acls", []), backend_name=item.get("backend_name"), paths=item.get("paths", []), + port=item.get("port"), status=REQUEST_STATUS_PENDING, ) backend_request.full_clean() From 4b5b49a9d951f987cbcb8d4fba41b4c663158ae0 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 010/201] Add change artifact --- docs/release-notes/artifacts/pr0399.yaml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0399.yaml diff --git a/docs/release-notes/artifacts/pr0399.yaml b/docs/release-notes/artifacts/pr0399.yaml new file mode 100644 index 000000000..42dbc4b55 --- /dev/null +++ b/docs/release-notes/artifacts/pr0399.yaml @@ -0,0 +1,20 @@ +version_schema: 2 + +changes: + - title: Added requests management REST API for haproxy-route-policy app + author: tphan025 + type: minor + description: > + Added the policy Django app with a BackendRequest model and REST API endpoints + for managing backend requests. Implemented GET /api/v1/requests (list with optional + status filter), POST /api/v1/requests (bulk create with all requests set to pending), + GET /api/v1/requests/ (retrieve by ID), and DELETE /api/v1/requests/ + (idempotent delete). Included hostname validation, test settings with in-memory + SQLite, and unit and integration tests for models and views. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/399 + related_doc: + related_issue: + visibility: public + highlight: false From b143b06b0a9d8d92ad440d63f6113aa40e091748 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 011/201] run lint with uv --- .github/workflows/test.yaml | 18 +- haproxy-route-policy/.python-version | 1 + haproxy-route-policy/README.md | 0 haproxy-route-policy/policy/db_models.py | 18 +- haproxy-route-policy/policy/views.py | 4 +- haproxy-route-policy/pyproject.toml | 22 ++ haproxy-route-policy/requirements.txt | 3 - haproxy-route-policy/tox.toml | 56 ++++ haproxy-route-policy/uv.lock | 339 +++++++++++++++++++++++ 9 files changed, 439 insertions(+), 22 deletions(-) create mode 100644 haproxy-route-policy/.python-version create mode 100644 haproxy-route-policy/README.md create mode 100644 haproxy-route-policy/pyproject.toml delete mode 100644 haproxy-route-policy/requirements.txt create mode 100644 haproxy-route-policy/tox.toml create mode 100644 haproxy-route-policy/uv.lock diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index ac13eacff..1185d12da 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -14,6 +14,8 @@ jobs: working-directory: ./haproxy-spoe-auth-operator - name: haproxy-ddos-protection-configurator working-directory: ./haproxy-ddos-protection-configurator + - name: haproxy-route-policy + working-directory: ./haproxy-route-policy name: Unit tests for ${{ matrix.charm.name }} uses: canonical/operator-workflows/.github/workflows/test.yaml@main secrets: inherit @@ -27,11 +29,11 @@ jobs: name: HAProxy-route Policy App Tests runs-on: ubuntu-latest steps: - - uses: actions/checkout@v6.0.1 - - uses: actions/setup-python@v6 - with: - python-version: '3.x' - - working-directory: ./haproxy-route-policy - run: | - pip install -r requirements.txt - python3 ./manage.py test --settings=haproxy_route_policy.test_settings + - uses: actions/checkout@v6.0.1 + - uses: actions/setup-python@v6 + with: + python-version: "3.x" + - working-directory: ./haproxy-route-policy + run: | + pip install -r requirements.txt + python3 ./manage.py test --settings=haproxy_route_policy.test_settings diff --git a/haproxy-route-policy/.python-version b/haproxy-route-policy/.python-version new file mode 100644 index 000000000..e4fba2183 --- /dev/null +++ b/haproxy-route-policy/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/haproxy-route-policy/README.md b/haproxy-route-policy/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 119519bca..81b0921bd 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -51,21 +51,21 @@ class BackendRequest(models.Model): updated_at: Timestamp when the request was last updated. """ - id = models.BigAutoField(primary_key=True) - relation_id = models.IntegerField() - hostname_acls = models.JSONField( + id: models.BigAutoField = models.BigAutoField(primary_key=True) + relation_id: models.IntegerField = models.IntegerField() + hostname_acls: models.JSONField = models.JSONField( default=list, validators=[validate_hostname_acls], blank=True ) - backend_name = models.TextField() - paths = models.JSONField(default=list, blank=True) - port = models.IntegerField() - status = models.TextField( + backend_name: models.TextField = models.TextField() + paths: models.JSONField = models.JSONField(default=list, blank=True) + port: models.IntegerField = models.IntegerField() + status: models.TextField = models.TextField( choices=REQUEST_STATUS_CHOICES, default=REQUEST_STATUS_PENDING, db_index=True, ) - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) + created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) def to_dict(self) -> dict: """Serialize to a JSON-compatible dict.""" diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index fb92139cf..f44b88c19 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -51,9 +51,9 @@ def post(self, request): backend_request.save() created.append(backend_request.to_dict()) except ValidationError as e: - return HttpResponseBadRequest(str(e), status=400) + return HttpResponseBadRequest(bytes(str(e), encoding="utf-8"), status=400) except IntegrityError: - return HttpResponseBadRequest("Invalid request data.", status=400) + return HttpResponseBadRequest(b"Invalid request data.", status=400) return JsonResponse(created, safe=False, status=201) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml new file mode 100644 index 000000000..bf5b72e0d --- /dev/null +++ b/haproxy-route-policy/pyproject.toml @@ -0,0 +1,22 @@ +[project] +name = "haproxy-route-policy" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "django>=6.0.3", + "djangorestframework>=3.16.1", + "validators>=0.35.0", +] + +[dependency-groups] +lint = [ + "codespell>=2.4.2", + "django-stubs>=6.0.0", + "django-types>=0.23.0", + "djangorestframework-stubs>=3.16.8", + "djangorestframework-types>=0.9.0", + "mypy>=1.19.1", + "ruff>=0.15.6", +] diff --git a/haproxy-route-policy/requirements.txt b/haproxy-route-policy/requirements.txt deleted file mode 100644 index df9f7bbe8..000000000 --- a/haproxy-route-policy/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -Django==6.0.3 -djangorestframework==3.16.1 -validators==0.35.0 diff --git a/haproxy-route-policy/tox.toml b/haproxy-route-policy/tox.toml new file mode 100644 index 000000000..819c5fbe6 --- /dev/null +++ b/haproxy-route-policy/tox.toml @@ -0,0 +1,56 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +skipsdist = true +skip_missing_interpreters = true +requires = ["tox>=4.21"] +no_package = true + +[env_run_base] +passenv = ["PYTHONPATH"] +runner = "uv-venv-lock-runner" + +[env_run_base.setenv] +PYTHONPATH = "{toxinidir}:{[vars]src_path}" +PYTHONBREAKPOINT = "ipdb.set_trace" +PY_COLORS = "1" + +[env.lint] +description = "Check code against coding style standards" +commands = [ + [ + "codespell", + "{toxinidir}", + ], + [ + "ruff", + "format", + "--check", + "--diff", + { replace = "ref", of = [ + "vars", + "all_path", + ], extend = true }, + ], + [ + "ruff", + "check", + { replace = "ref", of = [ + "vars", + "all_path", + ], extend = true }, + ], + [ + "mypy", + { replace = "ref", of = [ + "vars", + "all_path", + ], extend = true }, + ], +] +dependency_groups = ["lint"] + +[vars] +src_path = "{toxinidir}/policy/" +tst_path = "{toxinidir}/policy/tests" +all_path = ["{toxinidir}/policy/"] diff --git a/haproxy-route-policy/uv.lock b/haproxy-route-policy/uv.lock new file mode 100644 index 000000000..17c0cdf21 --- /dev/null +++ b/haproxy-route-policy/uv.lock @@ -0,0 +1,339 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "asgiref" +version = "3.11.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/63/40/f03da1264ae8f7cfdbf9146542e5e7e8100a4c66ab48e791df9a03d3f6c0/asgiref-3.11.1.tar.gz", hash = "sha256:5f184dc43b7e763efe848065441eac62229c9f7b0475f41f80e207a114eda4ce", size = 38550, upload-time = "2026-02-03T13:30:14.33Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/0a/a72d10ed65068e115044937873362e6e32fab1b7dce0046aeb224682c989/asgiref-3.11.1-py3-none-any.whl", hash = "sha256:e8667a091e69529631969fd45dc268fa79b99c92c5fcdda727757e52146ec133", size = 24345, upload-time = "2026-02-03T13:30:13.039Z" }, +] + +[[package]] +name = "codespell" +version = "2.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/9d/1d0903dff693160f893ca6abcabad545088e7a2ee0a6deae7c24e958be69/codespell-2.4.2.tar.gz", hash = "sha256:3c33be9ae34543807f088aeb4832dfad8cb2dae38da61cac0a7045dd376cfdf3", size = 352058, upload-time = "2026-03-05T18:10:42.936Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/a1/52fa05533e95fe45bcc09bcf8a503874b1c08f221a4e35608017e0938f55/codespell-2.4.2-py3-none-any.whl", hash = "sha256:97e0c1060cf46bd1d5db89a936c98db8c2b804e1fdd4b5c645e82a1ec6b1f886", size = 353715, upload-time = "2026-03-05T18:10:41.398Z" }, +] + +[[package]] +name = "django" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asgiref" }, + { name = "sqlparse" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/e1/894115c6bd70e2c8b66b0c40a3c367d83a5a48c034a4d904d31b62f7c53a/django-6.0.3.tar.gz", hash = "sha256:90be765ee756af8a6cbd6693e56452404b5ad15294f4d5e40c0a55a0f4870fe1", size = 10872701, upload-time = "2026-03-03T13:55:15.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/b1/23f2556967c45e34d3d3cf032eb1bd3ef925ee458667fb99052a0b3ea3a6/django-6.0.3-py3-none-any.whl", hash = "sha256:2e5974441491ddb34c3f13d5e7a9f97b07ba03bf70234c0a9c68b79bbb235bc3", size = 8358527, upload-time = "2026-03-03T13:55:10.552Z" }, +] + +[[package]] +name = "django-stubs" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "django" }, + { name = "django-stubs-ext" }, + { name = "types-pyyaml" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/1a/24f0bdd54fccbd1ea0a72bfef2cadd2f53a2138d319c603f6519346b93fb/django_stubs-6.0.0.tar.gz", hash = "sha256:14e7c667d2de73dbaf91ae43d117f923639107d8d3e84a2257ebc101861f18ed", size = 272752, upload-time = "2026-03-17T00:25:12.195Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/a2/b0e8d16fa33e07aa096eacb12d032d4561ce0e7e21248b4d34e943daf8d9/django_stubs-6.0.0-py3-none-any.whl", hash = "sha256:747baa97fb9a5c1892ef93bf881d06b4f211f719da18e4ddfd1e74bbad71e752", size = 535559, upload-time = "2026-03-17T00:25:10.335Z" }, +] + +[[package]] +name = "django-stubs-ext" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "django" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/39/d9f4ae9506458bfb77bbcd45bf21cd5eb460026acac6727af456a9deabec/django_stubs_ext-6.0.0.tar.gz", hash = "sha256:fb860210b496e75ae751cadee02a3449d5a7599de68c8db9df40c84e559d9298", size = 6686, upload-time = "2026-03-17T00:24:33.688Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/63/f727323c6e422ad72846558d8464955448dfcac3268419929332fc6e31a5/django_stubs_ext-6.0.0-py3-none-any.whl", hash = "sha256:6f8c29e0dd5111fd36aa72519446c8a21c3e419e48c5d7dc7f418c8eec9c43ae", size = 10168, upload-time = "2026-03-17T00:24:32.289Z" }, +] + +[[package]] +name = "django-types" +version = "0.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-psycopg2" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/9a/5c652bbc8694489782c415d7d6fa0782219e401aba25f4d1df2b95c3a34c/django_types-0.23.0.tar.gz", hash = "sha256:f97fb746166fb15a5f40e470a1fd7a58226349aac9e0a9cb8ae81deb14d94fd0", size = 208369, upload-time = "2026-02-04T00:36:23.33Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/de/471afcd92022642544f7866dd5620bc85f04e4312d5e29d7f2960f31f010/django_types-0.23.0-py3-none-any.whl", hash = "sha256:0727b13ae810c4b1f14eeac9872834ac928c99dc76584ea7c23afc4461e049dd", size = 379397, upload-time = "2026-02-04T00:36:21.783Z" }, +] + +[[package]] +name = "djangorestframework" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "django" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/95/5376fe618646fde6899b3cdc85fd959716bb67542e273a76a80d9f326f27/djangorestframework-3.16.1.tar.gz", hash = "sha256:166809528b1aced0a17dc66c24492af18049f2c9420dbd0be29422029cfc3ff7", size = 1089735, upload-time = "2025-08-06T17:50:53.251Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/ce/bf8b9d3f415be4ac5588545b5fcdbbb841977db1c1d923f7568eeabe1689/djangorestframework-3.16.1-py3-none-any.whl", hash = "sha256:33a59f47fb9c85ede792cbf88bde71893bcda0667bc573f784649521f1102cec", size = 1080442, upload-time = "2025-08-06T17:50:50.667Z" }, +] + +[[package]] +name = "djangorestframework-stubs" +version = "3.16.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "django-stubs" }, + { name = "types-pyyaml" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/d5/87166a827833eb39703856ef957ca0fb4e9d15285331251186a2e738c20c/djangorestframework_stubs-3.16.8.tar.gz", hash = "sha256:f6d464b54fa2f929610e957446c04e6ac29558265418e0a2d9f653a4cdd410b5", size = 32312, upload-time = "2026-02-03T22:35:53.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/e9/d9c363b08d07d975c21793fe821b2020dfd3627ac4ce19c5c12df94ce9d0/djangorestframework_stubs-3.16.8-py3-none-any.whl", hash = "sha256:c5bf61def0f330a071dd5f470f05710189d06c467b3f3e186b32c5a23d4952fb", size = 56517, upload-time = "2026-02-03T22:35:50.67Z" }, +] + +[[package]] +name = "djangorestframework-types" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/5d/1a21a5fd10ad9980dcb934b8221934dee2b6b97af5edc58cb169558c0831/djangorestframework_types-0.9.0.tar.gz", hash = "sha256:aa6b27fbdab5ff4ab1dfa5376f3b6ec45713ce48dbcdd4226bf3e1410f0deaca", size = 32521, upload-time = "2024-10-10T00:42:04.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/5f/d908ce938356b209d4d27a7fb159ab9100b8814396a69c0204bb66e38703/djangorestframework_types-0.9.0-py3-none-any.whl", hash = "sha256:5e4258fe43774d0a3d018780170bd702bf615407fe244453ea5ec6e6676b98c4", size = 54947, upload-time = "2024-10-10T00:42:02.311Z" }, +] + +[[package]] +name = "haproxy-route-policy" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "django" }, + { name = "djangorestframework" }, + { name = "validators" }, +] + +[package.dev-dependencies] +lint = [ + { name = "codespell" }, + { name = "django-stubs" }, + { name = "django-types" }, + { name = "djangorestframework-stubs" }, + { name = "djangorestframework-types" }, + { name = "mypy" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "django", specifier = ">=6.0.3" }, + { name = "djangorestframework", specifier = ">=3.16.1" }, + { name = "validators", specifier = ">=0.35.0" }, +] + +[package.metadata.requires-dev] +lint = [ + { name = "codespell", specifier = ">=2.4.2" }, + { name = "django-stubs", specifier = ">=6.0.0" }, + { name = "django-types", specifier = ">=0.23.0" }, + { name = "djangorestframework-stubs", specifier = ">=3.16.8" }, + { name = "djangorestframework-types", specifier = ">=0.9.0" }, + { name = "mypy", specifier = ">=1.19.1" }, + { name = "ruff", specifier = ">=0.15.6" }, +] + +[[package]] +name = "librt" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/21/d39b0a87ac52fc98f621fb6f8060efb017a767ebbbac2f99fbcbc9ddc0d7/librt-0.8.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a28f2612ab566b17f3698b0da021ff9960610301607c9a5e8eaca62f5e1c350a", size = 66516, upload-time = "2026-02-17T16:11:41.604Z" }, + { url = "https://files.pythonhosted.org/packages/69/f1/46375e71441c43e8ae335905e069f1c54febee63a146278bcee8782c84fd/librt-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:60a78b694c9aee2a0f1aaeaa7d101cf713e92e8423a941d2897f4fa37908dab9", size = 68634, upload-time = "2026-02-17T16:11:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/c510de7f93bf1fa19e13423a606d8189a02624a800710f6e6a0a0f0784b3/librt-0.8.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:758509ea3f1eba2a57558e7e98f4659d0ea7670bff49673b0dde18a3c7e6c0eb", size = 198941, upload-time = "2026-02-17T16:11:44.28Z" }, + { url = "https://files.pythonhosted.org/packages/dd/36/e725903416409a533d92398e88ce665476f275081d0d7d42f9c4951999e5/librt-0.8.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:039b9f2c506bd0ab0f8725aa5ba339c6f0cd19d3b514b50d134789809c24285d", size = 209991, upload-time = "2026-02-17T16:11:45.462Z" }, + { url = "https://files.pythonhosted.org/packages/30/7a/8d908a152e1875c9f8eac96c97a480df425e657cdb47854b9efaa4998889/librt-0.8.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bb54f1205a3a6ab41a6fd71dfcdcbd278670d3a90ca502a30d9da583105b6f7", size = 224476, upload-time = "2026-02-17T16:11:46.542Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b8/a22c34f2c485b8903a06f3fe3315341fe6876ef3599792344669db98fcff/librt-0.8.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:05bd41cdee35b0c59c259f870f6da532a2c5ca57db95b5f23689fcb5c9e42440", size = 217518, upload-time = "2026-02-17T16:11:47.746Z" }, + { url = "https://files.pythonhosted.org/packages/79/6f/5c6fea00357e4f82ba44f81dbfb027921f1ab10e320d4a64e1c408d035d9/librt-0.8.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adfab487facf03f0d0857b8710cf82d0704a309d8ffc33b03d9302b4c64e91a9", size = 225116, upload-time = "2026-02-17T16:11:49.298Z" }, + { url = "https://files.pythonhosted.org/packages/f2/a0/95ced4e7b1267fe1e2720a111685bcddf0e781f7e9e0ce59d751c44dcfe5/librt-0.8.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:153188fe98a72f206042be10a2c6026139852805215ed9539186312d50a8e972", size = 217751, upload-time = "2026-02-17T16:11:50.49Z" }, + { url = "https://files.pythonhosted.org/packages/93/c2/0517281cb4d4101c27ab59472924e67f55e375bc46bedae94ac6dc6e1902/librt-0.8.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dd3c41254ee98604b08bd5b3af5bf0a89740d4ee0711de95b65166bf44091921", size = 218378, upload-time = "2026-02-17T16:11:51.783Z" }, + { url = "https://files.pythonhosted.org/packages/43/e8/37b3ac108e8976888e559a7b227d0ceac03c384cfd3e7a1c2ee248dbae79/librt-0.8.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e0d138c7ae532908cbb342162b2611dbd4d90c941cd25ab82084aaf71d2c0bd0", size = 241199, upload-time = "2026-02-17T16:11:53.561Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/35812d041c53967fedf551a39399271bbe4257e681236a2cf1a69c8e7fa1/librt-0.8.1-cp312-cp312-win32.whl", hash = "sha256:43353b943613c5d9c49a25aaffdba46f888ec354e71e3529a00cca3f04d66a7a", size = 54917, upload-time = "2026-02-17T16:11:54.758Z" }, + { url = "https://files.pythonhosted.org/packages/de/d1/fa5d5331b862b9775aaf2a100f5ef86854e5d4407f71bddf102f4421e034/librt-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff8baf1f8d3f4b6b7257fcb75a501f2a5499d0dda57645baa09d4d0d34b19444", size = 62017, upload-time = "2026-02-17T16:11:55.748Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7c/c614252f9acda59b01a66e2ddfd243ed1c7e1deab0293332dfbccf862808/librt-0.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f2ae3725904f7377e11cc37722d5d401e8b3d5851fb9273d7f4fe04f6b3d37d", size = 52441, upload-time = "2026-02-17T16:11:56.801Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3c/f614c8e4eaac7cbf2bbdf9528790b21d89e277ee20d57dc6e559c626105f/librt-0.8.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e6bad1cd94f6764e1e21950542f818a09316645337fd5ab9a7acc45d99a8f35", size = 66529, upload-time = "2026-02-17T16:11:57.809Z" }, + { url = "https://files.pythonhosted.org/packages/ab/96/5836544a45100ae411eda07d29e3d99448e5258b6e9c8059deb92945f5c2/librt-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cf450f498c30af55551ba4f66b9123b7185362ec8b625a773b3d39aa1a717583", size = 68669, upload-time = "2026-02-17T16:11:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/06/53/f0b992b57af6d5531bf4677d75c44f095f2366a1741fb695ee462ae04b05/librt-0.8.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eca45e982fa074090057132e30585a7e8674e9e885d402eae85633e9f449ce6c", size = 199279, upload-time = "2026-02-17T16:11:59.862Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/4848cc16e268d14280d8168aee4f31cea92bbd2b79ce33d3e166f2b4e4fc/librt-0.8.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c3811485fccfda840861905b8c70bba5ec094e02825598bb9d4ca3936857a04", size = 210288, upload-time = "2026-02-17T16:12:00.954Z" }, + { url = "https://files.pythonhosted.org/packages/52/05/27fdc2e95de26273d83b96742d8d3b7345f2ea2bdbd2405cc504644f2096/librt-0.8.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e4af413908f77294605e28cfd98063f54b2c790561383971d2f52d113d9c363", size = 224809, upload-time = "2026-02-17T16:12:02.108Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d0/78200a45ba3240cb042bc597d6f2accba9193a2c57d0356268cbbe2d0925/librt-0.8.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5212a5bd7fae98dae95710032902edcd2ec4dc994e883294f75c857b83f9aba0", size = 218075, upload-time = "2026-02-17T16:12:03.631Z" }, + { url = "https://files.pythonhosted.org/packages/af/72/a210839fa74c90474897124c064ffca07f8d4b347b6574d309686aae7ca6/librt-0.8.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e692aa2d1d604e6ca12d35e51fdc36f4cda6345e28e36374579f7ef3611b3012", size = 225486, upload-time = "2026-02-17T16:12:04.725Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c1/a03cc63722339ddbf087485f253493e2b013039f5b707e8e6016141130fa/librt-0.8.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4be2a5c926b9770c9e08e717f05737a269b9d0ebc5d2f0060f0fe3fe9ce47acb", size = 218219, upload-time = "2026-02-17T16:12:05.828Z" }, + { url = "https://files.pythonhosted.org/packages/58/f5/fff6108af0acf941c6f274a946aea0e484bd10cd2dc37610287ce49388c5/librt-0.8.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fd1a720332ea335ceb544cf0a03f81df92abd4bb887679fd1e460976b0e6214b", size = 218750, upload-time = "2026-02-17T16:12:07.09Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/5a387bfef30ec1e4b4f30562c8586566faf87e47d696768c19feb49e3646/librt-0.8.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2af9e01e0ef80d95ae3c720be101227edae5f2fe7e3dc63d8857fadfc5a1d", size = 241624, upload-time = "2026-02-17T16:12:08.43Z" }, + { url = "https://files.pythonhosted.org/packages/d4/be/24f8502db11d405232ac1162eb98069ca49c3306c1d75c6ccc61d9af8789/librt-0.8.1-cp313-cp313-win32.whl", hash = "sha256:086a32dbb71336627e78cc1d6ee305a68d038ef7d4c39aaff41ae8c9aa46e91a", size = 54969, upload-time = "2026-02-17T16:12:09.633Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/c9fdf6cb2a529c1a092ce769a12d88c8cca991194dfe641b6af12fa964d2/librt-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:e11769a1dbda4da7b00a76cfffa67aa47cfa66921d2724539eee4b9ede780b79", size = 62000, upload-time = "2026-02-17T16:12:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/68f80ca3ac4924f250cdfa6e20142a803e5e50fca96ef5148c52ee8c10ea/librt-0.8.1-cp313-cp313-win_arm64.whl", hash = "sha256:924817ab3141aca17893386ee13261f1d100d1ef410d70afe4389f2359fea4f0", size = 52495, upload-time = "2026-02-17T16:12:11.633Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6cfa7fe54fd4d1f47130017351a959fe5804bda7a0bc7e07a2cdbc3fdd28d34f", size = 66081, upload-time = "2026-02-17T16:12:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:228c2409c079f8c11fb2e5d7b277077f694cb93443eb760e00b3b83cb8b3176c", size = 68309, upload-time = "2026-02-17T16:12:13.756Z" }, + { url = "https://files.pythonhosted.org/packages/a4/36/46820d03f058cfb5a9de5940640ba03165ed8aded69e0733c417bb04df34/librt-0.8.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7aae78ab5e3206181780e56912d1b9bb9f90a7249ce12f0e8bf531d0462dd0fc", size = 196804, upload-time = "2026-02-17T16:12:14.818Z" }, + { url = "https://files.pythonhosted.org/packages/59/18/5dd0d3b87b8ff9c061849fbdb347758d1f724b9a82241aa908e0ec54ccd0/librt-0.8.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:172d57ec04346b047ca6af181e1ea4858086c80bdf455f61994c4aa6fc3f866c", size = 206907, upload-time = "2026-02-17T16:12:16.513Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3", size = 221217, upload-time = "2026-02-17T16:12:17.906Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/7e01f2dda84a8f5d280637a2e5827210a8acca9a567a54507ef1c75b342d/librt-0.8.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:10c42e1f6fd06733ef65ae7bebce2872bcafd8d6e6b0a08fe0a05a23b044fb14", size = 214622, upload-time = "2026-02-17T16:12:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/1e/8c/5b093d08a13946034fed57619742f790faf77058558b14ca36a6e331161e/librt-0.8.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4c8dfa264b9193c4ee19113c985c95f876fae5e51f731494fc4e0cf594990ba7", size = 221987, upload-time = "2026-02-17T16:12:20.331Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cc/86b0b3b151d40920ad45a94ce0171dec1aebba8a9d72bb3fa00c73ab25dd/librt-0.8.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:01170b6729a438f0dedc4a26ed342e3dc4f02d1000b4b19f980e1877f0c297e6", size = 215132, upload-time = "2026-02-17T16:12:21.54Z" }, + { url = "https://files.pythonhosted.org/packages/fc/be/8588164a46edf1e69858d952654e216a9a91174688eeefb9efbb38a9c799/librt-0.8.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:7b02679a0d783bdae30d443025b94465d8c3dc512f32f5b5031f93f57ac32071", size = 215195, upload-time = "2026-02-17T16:12:23.073Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f2/0b9279bea735c734d69344ecfe056c1ba211694a72df10f568745c899c76/librt-0.8.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:190b109bb69592a3401fe1ffdea41a2e73370ace2ffdc4a0e8e2b39cdea81b78", size = 237946, upload-time = "2026-02-17T16:12:24.275Z" }, + { url = "https://files.pythonhosted.org/packages/e9/cc/5f2a34fbc8aeb35314a3641f9956fa9051a947424652fad9882be7a97949/librt-0.8.1-cp314-cp314-win32.whl", hash = "sha256:e70a57ecf89a0f64c24e37f38d3fe217a58169d2fe6ed6d70554964042474023", size = 50689, upload-time = "2026-02-17T16:12:25.766Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/cd4d010ab2147339ca2b93e959c3686e964edc6de66ddacc935c325883d7/librt-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:7e2f3edca35664499fbb36e4770650c4bd4a08abc1f4458eab9df4ec56389730", size = 57875, upload-time = "2026-02-17T16:12:27.465Z" }, + { url = "https://files.pythonhosted.org/packages/84/0f/2143cb3c3ca48bd3379dcd11817163ca50781927c4537345d608b5045998/librt-0.8.1-cp314-cp314-win_arm64.whl", hash = "sha256:0d2f82168e55ddefd27c01c654ce52379c0750ddc31ee86b4b266bcf4d65f2a3", size = 48058, upload-time = "2026-02-17T16:12:28.556Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/9b23a87e37baf00311c3efe6b48d6b6c168c29902dfc3f04c338372fd7db/librt-0.8.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c74a2da57a094bd48d03fa5d196da83d2815678385d2978657499063709abe1", size = 68313, upload-time = "2026-02-17T16:12:29.659Z" }, + { url = "https://files.pythonhosted.org/packages/db/9a/859c41e5a4f1c84200a7d2b92f586aa27133c8243b6cac9926f6e54d01b9/librt-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a355d99c4c0d8e5b770313b8b247411ed40949ca44e33e46a4789b9293a907ee", size = 70994, upload-time = "2026-02-17T16:12:31.516Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/10605366ee599ed34223ac2bf66404c6fb59399f47108215d16d5ad751a8/librt-0.8.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2eb345e8b33fb748227409c9f1233d4df354d6e54091f0e8fc53acdb2ffedeb7", size = 220770, upload-time = "2026-02-17T16:12:33.294Z" }, + { url = "https://files.pythonhosted.org/packages/af/8d/16ed8fd452dafae9c48d17a6bc1ee3e818fd40ef718d149a8eff2c9f4ea2/librt-0.8.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9be2f15e53ce4e83cc08adc29b26fb5978db62ef2a366fbdf716c8a6c8901040", size = 235409, upload-time = "2026-02-17T16:12:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/7bdf3e49349c134b25db816e4a3db6b94a47ac69d7d46b1e682c2c4949be/librt-0.8.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:785ae29c1f5c6e7c2cde2c7c0e148147f4503da3abc5d44d482068da5322fd9e", size = 246473, upload-time = "2026-02-17T16:12:36.656Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8a/91fab8e4fd2a24930a17188c7af5380eb27b203d72101c9cc000dbdfd95a/librt-0.8.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d3a7da44baf692f0c6aeb5b2a09c5e6fc7a703bca9ffa337ddd2e2da53f7732", size = 238866, upload-time = "2026-02-17T16:12:37.849Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e0/c45a098843fc7c07e18a7f8a24ca8496aecbf7bdcd54980c6ca1aaa79a8e/librt-0.8.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fc48998000cbc39ec0d5311312dda93ecf92b39aaf184c5e817d5d440b29624", size = 250248, upload-time = "2026-02-17T16:12:39.445Z" }, + { url = "https://files.pythonhosted.org/packages/82/30/07627de23036640c952cce0c1fe78972e77d7d2f8fd54fa5ef4554ff4a56/librt-0.8.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e96baa6820280077a78244b2e06e416480ed859bbd8e5d641cf5742919d8beb4", size = 240629, upload-time = "2026-02-17T16:12:40.889Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/55bfe1ee3542eba055616f9098eaf6eddb966efb0ca0f44eaa4aba327307/librt-0.8.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:31362dbfe297b23590530007062c32c6f6176f6099646bb2c95ab1b00a57c382", size = 239615, upload-time = "2026-02-17T16:12:42.446Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/191d3d28abc26c9099b19852e6c99f7f6d400b82fa5a4e80291bd3803e19/librt-0.8.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cc3656283d11540ab0ea01978378e73e10002145117055e03722417aeab30994", size = 263001, upload-time = "2026-02-17T16:12:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/b9/eb/7697f60fbe7042ab4e88f4ee6af496b7f222fffb0a4e3593ef1f29f81652/librt-0.8.1-cp314-cp314t-win32.whl", hash = "sha256:738f08021b3142c2918c03692608baed43bc51144c29e35807682f8070ee2a3a", size = 51328, upload-time = "2026-02-17T16:12:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/7c/72/34bf2eb7a15414a23e5e70ecb9440c1d3179f393d9349338a91e2781c0fb/librt-0.8.1-cp314-cp314t-win_amd64.whl", hash = "sha256:89815a22daf9c51884fb5dbe4f1ef65ee6a146e0b6a8df05f753e2e4a9359bf4", size = 58722, upload-time = "2026-02-17T16:12:46.85Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" }, + { url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" }, + { url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" }, + { url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" }, + { url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" }, + { url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" }, + { url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" }, + { url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" }, + { url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" }, + { url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" }, + { url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" }, + { url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" }, +] + +[[package]] +name = "sqlparse" +version = "0.5.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/76/437d71068094df0726366574cf3432a4ed754217b436eb7429415cf2d480/sqlparse-0.5.5.tar.gz", hash = "sha256:e20d4a9b0b8585fdf63b10d30066c7c94c5d7a7ec47c889a2d83a3caa93ff28e", size = 120815, upload-time = "2025-12-19T07:17:45.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/4b/359f28a903c13438ef59ebeee215fb25da53066db67b305c125f1c6d2a25/sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba", size = 46138, upload-time = "2025-12-19T07:17:46.573Z" }, +] + +[[package]] +name = "types-psycopg2" +version = "2.9.21.20260223" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/55/1f/4daff0ce5e8e191844e65aaa793ed1b9cb40027dc2700906ecf2b6bcc0ed/types_psycopg2-2.9.21.20260223.tar.gz", hash = "sha256:78ed70de2e56bc6b5c26c8c1da8e9af54e49fdc3c94d1504609f3519e2b84f02", size = 27090, upload-time = "2026-02-23T04:11:18.177Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/e7/c566df58410bc0728348b514e718f0b38fa0d248b5c10599a11494ba25d2/types_psycopg2-2.9.21.20260223-py3-none-any.whl", hash = "sha256:c6228ade72d813b0624f4c03feeb89471950ac27cd0506b5debed6f053086bc8", size = 24919, upload-time = "2026-02-23T04:11:17.214Z" }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250915" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, +] + +[[package]] +name = "validators" +version = "0.35.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/66/a435d9ae49850b2f071f7ebd8119dd4e84872b01630d6736761e6e7fd847/validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a", size = 73399, upload-time = "2025-05-01T05:42:06.7Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712, upload-time = "2025-05-01T05:42:04.203Z" }, +] From 02bb32f7111286229fffd390ab0760b4ba0bdf9f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 012/201] add unit testing --- haproxy-route-policy/tox.toml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/haproxy-route-policy/tox.toml b/haproxy-route-policy/tox.toml index 819c5fbe6..d86392900 100644 --- a/haproxy-route-policy/tox.toml +++ b/haproxy-route-policy/tox.toml @@ -15,6 +15,20 @@ PYTHONPATH = "{toxinidir}:{[vars]src_path}" PYTHONBREAKPOINT = "ipdb.set_trace" PY_COLORS = "1" +[env.unit] +description = "Run unit tests" +commands = [ + [ + "uv", + "run", + "manage.py", + "test", + "policy", + "--settings=haproxy_route_policy.test_settings", + "-v2", + ], +] + [env.lint] description = "Check code against coding style standards" commands = [ From 99f56fd2b8a45b04e586f5371fe86857f0f86d36 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 013/201] remove custom test --- .github/workflows/test.yaml | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 1185d12da..23e5cc81d 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -24,16 +24,3 @@ jobs: self-hosted-runner-image: "noble" working-directory: ${{ matrix.charm.working-directory }} with-uv: true - - haproxy-route-policy: - name: HAProxy-route Policy App Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v6.0.1 - - uses: actions/setup-python@v6 - with: - python-version: "3.x" - - working-directory: ./haproxy-route-policy - run: | - pip install -r requirements.txt - python3 ./manage.py test --settings=haproxy_route_policy.test_settings From e825d3a65989993ab44b946e7470618423960e5c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 014/201] update migration --- .../policy/migrations/0001_initial.py | 46 ++++++------------- 1 file changed, 15 insertions(+), 31 deletions(-) diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 403c762f5..10d4f2d61 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -1,45 +1,29 @@ -# Generated by Django 6.0.3 on 2026-03-16 15:53 +# Generated by Django 6.0.3 on 2026-03-17 20:21 +import policy.db_models from django.db import migrations, models class Migration(migrations.Migration): + initial = True - dependencies = [] + dependencies = [ + ] operations = [ migrations.CreateModel( - name="BackendRequest", + name='BackendRequest', fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("relation_id", models.IntegerField()), - ("hostname_acls", models.JSONField(default=list)), - ("backend_name", models.TextField()), - ("paths", models.JSONField(default=list)), - ("port", models.IntegerField()), - ( - "status", - models.TextField( - choices=[ - ("pending", "pending"), - ("accepted", "accepted"), - ("rejected", "rejected"), - ], - db_index=True, - default="pending", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), + ('id', models.BigAutoField(primary_key=True, serialize=False)), + ('relation_id', models.IntegerField()), + ('hostname_acls', models.JSONField(blank=True, default=list, validators=[policy.db_models.validate_hostname_acls])), + ('backend_name', models.TextField()), + ('paths', models.JSONField(blank=True, default=list)), + ('port', models.IntegerField()), + ('status', models.TextField(choices=[('pending', 'pending'), ('accepted', 'accepted'), ('rejected', 'rejected')], db_index=True, default='pending')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), ], ), ] From cceb382d43384ca979139cebc9d85a302bc90cef Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:53 +0200 Subject: [PATCH 015/201] Wrap creation under `transaction.atomic` Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> --- haproxy-route-policy/policy/views.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index f44b88c19..fc2733b87 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -13,6 +13,7 @@ from django.core.exceptions import ValidationError from .db_models import BackendRequest, REQUEST_STATUS_PENDING from django.db.utils import IntegrityError +from django.db import transaction class ListCreateRequestsView(APIView): @@ -38,18 +39,19 @@ def post(self, request): created = [] try: - for item in request.data: - backend_request = BackendRequest( - relation_id=item.get("relation_id"), - hostname_acls=item.get("hostname_acls", []), - backend_name=item.get("backend_name"), - paths=item.get("paths", []), - port=item.get("port"), - status=REQUEST_STATUS_PENDING, - ) - backend_request.full_clean() - backend_request.save() - created.append(backend_request.to_dict()) + with transaction.atomic(): + for item in request.data: + backend_request = BackendRequest( + relation_id=item.get("relation_id"), + hostname_acls=item.get("hostname_acls", []), + backend_name=item.get("backend_name"), + paths=item.get("paths", []), + port=item.get("port"), + status=REQUEST_STATUS_PENDING, + ) + backend_request.full_clean() + backend_request.save() + created.append(backend_request.to_dict()) except ValidationError as e: return HttpResponseBadRequest(bytes(str(e), encoding="utf-8"), status=400) except IntegrityError: From e841faa328435e813bd4ce5a3c0600744033485f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 016/201] Potential fix for pull request finding Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> --- haproxy-route-policy/policy/db_models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 81b0921bd..660c16d11 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -27,7 +27,6 @@ def validate_hostname_acls(value: typing.Any): """Validate that the value is a list of valid hostnames.""" - logger.info("Validating hostname_acls: %s", value) if not isinstance(value, list): raise ValidationError("hostname_acls must be a list.") if invalid_hostnames := [ From 597aa03296c49085b932f8e6a966079df0478e40 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 017/201] remove unused code --- haproxy-route-policy/policy/db_models.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 660c16d11..e523453f9 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -8,9 +8,6 @@ from django.db import models from validators import domain from django.core.exceptions import ValidationError -import logging - -logger = logging.getLogger(__name__) REQUEST_STATUS_PENDING = "pending" REQUEST_STATUS_ACCEPTED = "accepted" @@ -83,8 +80,3 @@ def to_dict(self) -> dict: if self.updated_at else None, } - - @classmethod - def required_fields(cls): - """Return a list of fields required for creating a BackendRequest.""" - return ["relation_id", "backend_name", "port"] From 5c9441d3b8aa511cfb14c6009c82a4d6601af3a9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 018/201] minor fixes to settings --- haproxy-route-policy/haproxy_route_policy/settings.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index f628c2208..df4f9c1a7 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -18,16 +18,8 @@ # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent - - -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/6.0/howto/deployment/checklist/ - -# SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") - -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = os.environ.get("DJANGO_DEBUG", "True") == "True" +DEBUG = os.environ.get("DJANGO_DEBUG", "").lower() == "true" ALLOWED_HOSTS = [] From 9e981b63f9f107caff691e3e77bfe7b918c60f7f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 019/201] use django serializer --- .../policy/migrations/0001_initial.py | 42 +++++++++++++------ haproxy-route-policy/policy/serializers.py | 26 ++++++++++++ .../policy/tests/test_views.py | 15 ------- haproxy-route-policy/policy/views.py | 20 ++++----- 4 files changed, 63 insertions(+), 40 deletions(-) create mode 100644 haproxy-route-policy/policy/serializers.py diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 10d4f2d61..26cc6998e 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -5,25 +5,41 @@ class Migration(migrations.Migration): - initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='BackendRequest', + name="BackendRequest", fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ('relation_id', models.IntegerField()), - ('hostname_acls', models.JSONField(blank=True, default=list, validators=[policy.db_models.validate_hostname_acls])), - ('backend_name', models.TextField()), - ('paths', models.JSONField(blank=True, default=list)), - ('port', models.IntegerField()), - ('status', models.TextField(choices=[('pending', 'pending'), ('accepted', 'accepted'), ('rejected', 'rejected')], db_index=True, default='pending')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("relation_id", models.IntegerField()), + ( + "hostname_acls", + models.JSONField( + blank=True, + default=list, + validators=[policy.db_models.validate_hostname_acls], + ), + ), + ("backend_name", models.TextField()), + ("paths", models.JSONField(blank=True, default=list)), + ("port", models.IntegerField()), + ( + "status", + models.TextField( + choices=[ + ("pending", "pending"), + ("accepted", "accepted"), + ("rejected", "rejected"), + ], + db_index=True, + default="pending", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/haproxy-route-policy/policy/serializers.py b/haproxy-route-policy/policy/serializers.py new file mode 100644 index 000000000..6130dc563 --- /dev/null +++ b/haproxy-route-policy/policy/serializers.py @@ -0,0 +1,26 @@ +from rest_framework import serializers +from policy.db_models import ( + BackendRequest, +) + + +class BackendRequestSerializer(serializers.ModelSerializer): + class Meta: # pyright: ignore[reportIncompatibleVariableOverride] + model = BackendRequest + fields = [ + "id", + "relation_id", + "hostname_acls", + "backend_name", + "paths", + "port", + "status", + "created_at", + "updated_at", + ] + + def create(self, validated_data): + """ + Create and return a new `BackendRequest` instance, given the validated data. + """ + return BackendRequest.objects.create(**validated_data) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index e230a6d08..049893645 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -87,21 +87,6 @@ def test_bulk_create(self): self.assertEqual(data[1]["port"], 443) self.assertEqual(db_models.BackendRequest.objects.count(), 2) - def test_bulk_create_all_set_to_pending(self): - """POST always sets status to pending regardless of input.""" - payload = [ - { - "relation_id": 1, - "backend_name": "test", - "status": "accepted", - "port": 443, - }, - ] - response = self.client.post("/api/v1/requests", data=payload, format="json") - self.assertEqual(response.status_code, 201) - self.assertEqual(response.json()[0]["status"], "pending") - self.assertEqual(response.json()[0]["port"], 443) - def test_bulk_create_rejects_non_list(self): """POST returns 400 when the body is not a list.""" response = self.client.post( diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index fc2733b87..8c1449a7a 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -11,9 +11,10 @@ ) from rest_framework.views import APIView from django.core.exceptions import ValidationError -from .db_models import BackendRequest, REQUEST_STATUS_PENDING +from .db_models import BackendRequest from django.db.utils import IntegrityError from django.db import transaction +from policy import serializers class ListCreateRequestsView(APIView): @@ -40,18 +41,13 @@ def post(self, request): created = [] try: with transaction.atomic(): - for item in request.data: - backend_request = BackendRequest( - relation_id=item.get("relation_id"), - hostname_acls=item.get("hostname_acls", []), - backend_name=item.get("backend_name"), - paths=item.get("paths", []), - port=item.get("port"), - status=REQUEST_STATUS_PENDING, + for backend_request in request.data: + serializer = serializers.BackendRequestSerializer( + data=backend_request ) - backend_request.full_clean() - backend_request.save() - created.append(backend_request.to_dict()) + if serializer.is_valid(raise_exception=True): + serializer.save() + created.append(serializer.data) except ValidationError as e: return HttpResponseBadRequest(bytes(str(e), encoding="utf-8"), status=400) except IntegrityError: From f4508e777e42e51842805276c1f70dedc17675c0 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 020/201] update gitignore --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 6d2a4e023..453102298 100644 --- a/.gitignore +++ b/.gitignore @@ -30,4 +30,4 @@ terraform/**/.terraform* terraform/**/.tfvars terraform/**/*.tfstate* haproxy-route-policy/db.sqlite3 - +haproxy-route-policy/.python-version From 6a496602101a1392f58f97d545a0b14ce18dd6d9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 021/201] Potential fix for pull request finding Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> --- haproxy-route-policy/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index bf5b72e0d..1ef141aee 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "haproxy-route-policy" version = "0.1.0" -description = "Add your description here" +description = "Django REST API for managing HAProxy routing policies." readme = "README.md" requires-python = ">=3.12" dependencies = [ From ef41d0fbaaa48e09fedf16bc1722d71199aeba46 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 022/201] update view to use django rest --- haproxy-route-policy/policy/db_models.py | 19 -------- haproxy-route-policy/policy/serializers.py | 18 +------ .../policy/tests/test_models.py | 20 -------- haproxy-route-policy/policy/views.py | 48 +++++++++++-------- 4 files changed, 28 insertions(+), 77 deletions(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index e523453f9..4a670cfe4 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -3,7 +3,6 @@ """Database models for the haproxy-route-policy application.""" -from datetime import datetime import typing from django.db import models from validators import domain @@ -62,21 +61,3 @@ class BackendRequest(models.Model): ) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - - def to_dict(self) -> dict: - """Serialize to a JSON-compatible dict.""" - return { - "id": self.id, - "relation_id": self.relation_id, - "hostname_acls": self.hostname_acls, - "backend_name": self.backend_name, - "paths": self.paths, - "port": self.port, - "status": self.status, - "created_at": typing.cast(datetime, self.created_at).isoformat() - if self.created_at - else None, - "updated_at": typing.cast(datetime, self.updated_at).isoformat() - if self.updated_at - else None, - } diff --git a/haproxy-route-policy/policy/serializers.py b/haproxy-route-policy/policy/serializers.py index 6130dc563..56a1549f7 100644 --- a/haproxy-route-policy/policy/serializers.py +++ b/haproxy-route-policy/policy/serializers.py @@ -7,20 +7,4 @@ class BackendRequestSerializer(serializers.ModelSerializer): class Meta: # pyright: ignore[reportIncompatibleVariableOverride] model = BackendRequest - fields = [ - "id", - "relation_id", - "hostname_acls", - "backend_name", - "paths", - "port", - "status", - "created_at", - "updated_at", - ] - - def create(self, validated_data): - """ - Create and return a new `BackendRequest` instance, given the validated data. - """ - return BackendRequest.objects.create(**validated_data) + fields = "__all__" diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 7d3233715..e9981db1d 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -41,23 +41,3 @@ def test_create_with_all_fields(self): self.assertEqual(request.paths, ["/api", "/health"]) self.assertEqual(request.port, 443) self.assertEqual(request.status, db_models.REQUEST_STATUS_ACCEPTED) - - def test_to_jsonable(self): - """Test serialisation to a JSON-compatible dict.""" - request = db_models.BackendRequest.objects.create( - relation_id=2, - hostname_acls=["host.example.com"], - backend_name="backend-a", - paths=["/v1"], - port=443, - ) - data = request.to_dict() - self.assertEqual(data["id"], request.pk) - self.assertEqual(data["relation_id"], 2) - self.assertEqual(data["hostname_acls"], ["host.example.com"]) - self.assertEqual(data["backend_name"], "backend-a") - self.assertEqual(data["paths"], ["/v1"]) - self.assertEqual(data["port"], 443) - self.assertEqual(data["status"], db_models.REQUEST_STATUS_PENDING) - self.assertIn("created_at", data) - self.assertIn("updated_at", data) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 8c1449a7a..8a5a7d564 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -3,15 +3,16 @@ """REST API views for backend requests.""" -from django.http import ( - HttpResponse, - HttpResponseNotFound, - HttpResponseBadRequest, - JsonResponse, -) +from policy.db_models import BackendRequest from rest_framework.views import APIView +from rest_framework.response import Response +from rest_framework.status import ( + HTTP_201_CREATED, + HTTP_400_BAD_REQUEST, + HTTP_404_NOT_FOUND, + HTTP_204_NO_CONTENT, +) from django.core.exceptions import ValidationError -from .db_models import BackendRequest from django.db.utils import IntegrityError from django.db import transaction from policy import serializers @@ -22,11 +23,12 @@ class ListCreateRequestsView(APIView): def get(self, request): """List all requests, optionally filtered by status.""" - status = request.GET.get("status") - queryset = BackendRequest.objects.all() - if status: - queryset = queryset.filter(status=status) - return JsonResponse([r.to_dict() for r in queryset.order_by("id")], safe=False) + filter = ( + {"status": request.GET.get("status")} if request.GET.get("status") else {} + ) + queryset = BackendRequest.objects.all().filter(**filter) + serializer = serializers.BackendRequestSerializer(queryset, many=True) + return Response(serializer.data) def post(self, request): """Bulk create backend requests. @@ -34,8 +36,9 @@ def post(self, request): All new requests are set to 'pending' (evaluation logic is deferred). """ if not isinstance(request.data, list): - return JsonResponse( - {"error": "Expected a list of request objects."}, status=400 + return Response( + {"error": "Expected a list of request objects."}, + status=HTTP_400_BAD_REQUEST, ) created = [] @@ -49,24 +52,27 @@ def post(self, request): serializer.save() created.append(serializer.data) except ValidationError as e: - return HttpResponseBadRequest(bytes(str(e), encoding="utf-8"), status=400) + return Response({"error": str(e)}, status=HTTP_400_BAD_REQUEST) except IntegrityError: - return HttpResponseBadRequest(b"Invalid request data.", status=400) - return JsonResponse(created, safe=False, status=201) + return Response( + {"error": "Invalid request data."}, status=HTTP_400_BAD_REQUEST + ) + return Response(created, status=HTTP_201_CREATED) class RequestDetailView(APIView): """View for getting or deleting a single backend request.""" - def get(self, request, pk): + def get(self, _request, pk): """Get a request by ID.""" try: backend_request = BackendRequest.objects.get(pk=pk) + serializer = serializers.BackendRequestSerializer(backend_request) except BackendRequest.DoesNotExist: - return HttpResponseNotFound() - return JsonResponse(backend_request.to_dict()) + return Response(status=HTTP_404_NOT_FOUND) + return Response(serializer.data) def delete(self, request, pk): """Delete a request by ID.""" BackendRequest.objects.filter(pk=pk).delete() - return HttpResponse(status=204) + return Response(status=HTTP_204_NO_CONTENT) From 447e25f83e259235b4d43a65b6c204c7c7bba16d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 023/201] remove python-version --- .gitignore | 1 - haproxy-route-policy/.python-version | 1 - 2 files changed, 2 deletions(-) delete mode 100644 haproxy-route-policy/.python-version diff --git a/.gitignore b/.gitignore index 453102298..906e8929b 100644 --- a/.gitignore +++ b/.gitignore @@ -30,4 +30,3 @@ terraform/**/.terraform* terraform/**/.tfvars terraform/**/*.tfstate* haproxy-route-policy/db.sqlite3 -haproxy-route-policy/.python-version diff --git a/haproxy-route-policy/.python-version b/haproxy-route-policy/.python-version deleted file mode 100644 index e4fba2183..000000000 --- a/haproxy-route-policy/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.12 From 2601119442a4e7b800dd358d7b845b0fec014c76 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 024/201] update gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 906e8929b..c08ae9c7e 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,5 @@ terraform/**/.terraform* terraform/**/.tfvars terraform/**/*.tfstate* haproxy-route-policy/db.sqlite3 +haproxy-route-policy/.python-version + From b7f3827112c3dc276381182719aa500c431798c9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 025/201] add missing license headers --- haproxy-route-policy/policy/migrations/__init__.py | 2 ++ haproxy-route-policy/policy/serializers.py | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/haproxy-route-policy/policy/migrations/__init__.py b/haproxy-route-policy/policy/migrations/__init__.py index e69de29bb..fa89e9d7f 100644 --- a/haproxy-route-policy/policy/migrations/__init__.py +++ b/haproxy-route-policy/policy/migrations/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/haproxy-route-policy/policy/serializers.py b/haproxy-route-policy/policy/serializers.py index 56a1549f7..27139efb3 100644 --- a/haproxy-route-policy/policy/serializers.py +++ b/haproxy-route-policy/policy/serializers.py @@ -1,3 +1,8 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Serializers for the haproxy-route-policy application.""" + from rest_framework import serializers from policy.db_models import ( BackendRequest, From 3d478436d66217e6da3a2026609663567e3b1cd8 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 026/201] Add rules engine --- haproxy-route-policy/policy/db_models.py | 65 ++++++ ...r_backendrequest_hostname_acls_and_more.py | 43 ++++ .../policy/tests/test_models.py | 133 ++++++++++++- .../policy/tests/test_views.py | 187 +++++++++++++++++- haproxy-route-policy/policy/urls.py | 10 + haproxy-route-policy/policy/views.py | 83 +++++++- 6 files changed, 517 insertions(+), 4 deletions(-) create mode 100644 haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 4a670cfe4..d5919af42 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -4,6 +4,7 @@ """Database models for the haproxy-route-policy application.""" import typing +import uuid from django.db import models from validators import domain from django.core.exceptions import ValidationError @@ -20,6 +21,24 @@ REQUEST_STATUS_CHOICES = [(status, status) for status in REQUEST_STATUSES] +RULE_ACTION_ALLOW = "allow" +RULE_ACTION_DENY = "deny" + +RULE_ACTIONS = [ + RULE_ACTION_ALLOW, + RULE_ACTION_DENY, +] + +RULE_ACTION_CHOICES = [(action, action) for action in RULE_ACTIONS] + +RULE_KIND_HOSTNAME_AND_PATH_MATCH = "hostname_and_path_match" + +RULE_KINDS = [ + RULE_KIND_HOSTNAME_AND_PATH_MATCH, +] + +RULE_KIND_CHOICES = [(kind, kind) for kind in RULE_KINDS] + def validate_hostname_acls(value: typing.Any): """Validate that the value is a list of valid hostnames.""" @@ -61,3 +80,49 @@ class BackendRequest(models.Model): ) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + + +class Rule(models.Model): + """A rule used to evaluate backend requests. + + Rules are matched against incoming backend requests to automatically + accept or deny them. Rules have a priority and an action (allow/deny). + + Attrs: + id: UUID primary key. + kind: The type of rule (e.g. hostname_and_path_match, match_request_id). + value: The rule value, structure depends on kind. + action: Whether the rule allows or denies matching requests. + priority: Rule priority (higher = evaluated first, deny wins on tie). + comment: Optional human-readable comment. + created_at: Timestamp when the rule was created. + updated_at: Timestamp when the rule was last updated. + """ + + id: models.UUIDField = models.UUIDField( + primary_key=True, default=uuid.uuid4, editable=False + ) + kind: models.TextField = models.TextField(choices=RULE_KIND_CHOICES) + value: models.JSONField = models.JSONField() + action: models.TextField = models.TextField(choices=RULE_ACTION_CHOICES) + priority: models.IntegerField = models.IntegerField(default=0, blank=True) + comment: models.TextField = models.TextField(default="", blank=True) + created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) + + def to_dict(self) -> dict: + """Serialize to a JSON-compatible dict.""" + return { + "id": str(self.id), + "kind": self.kind, + "value": self.value, + "action": self.action, + "priority": self.priority, + "comment": self.comment, + "created_at": typing.cast(datetime, self.created_at).isoformat() + if self.created_at + else None, + "updated_at": typing.cast(datetime, self.updated_at).isoformat() + if self.updated_at + else None, + } diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py new file mode 100644 index 000000000..92ff9a4a1 --- /dev/null +++ b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 6.0.3 on 2026-03-17 20:05 + +import policy.db_models +import uuid +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('policy', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='Rule', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('kind', models.TextField(choices=[('hostname_and_path_match', 'hostname_and_path_match'), ('match_request_id', 'match_request_id')])), + ('value', models.JSONField()), + ('action', models.TextField(choices=[('allow', 'allow'), ('deny', 'deny')])), + ('priority', models.IntegerField(default=0)), + ('comment', models.TextField(blank=True, default='')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.AlterField( + model_name='backendrequest', + name='hostname_acls', + field=models.JSONField(blank=True, default=list, validators=[policy.db_models.validate_hostname_acls]), + ), + migrations.AlterField( + model_name='backendrequest', + name='id', + field=models.BigAutoField(primary_key=True, serialize=False), + ), + migrations.AlterField( + model_name='backendrequest', + name='paths', + field=models.JSONField(blank=True, default=list), + ), + ] diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index e9981db1d..708e66554 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -1,9 +1,10 @@ # Copyright 2026 Canonical Ltd. # See LICENSE file for licensing details. -"""Unit tests for the BackendRequest model.""" +"""Unit tests for the BackendRequest and Rule models.""" from django.test import TestCase +from django.core.exceptions import ValidationError from policy import db_models @@ -41,3 +42,133 @@ def test_create_with_all_fields(self): self.assertEqual(request.paths, ["/api", "/health"]) self.assertEqual(request.port, 443) self.assertEqual(request.status, db_models.REQUEST_STATUS_ACCEPTED) + + +class TestRuleModel(TestCase): + """Tests for Rule model creation, serialisation, and validation.""" + + def test_create_hostname_and_path_match_rule(self): + """Test creating a hostname_and_path_match rule with valid data.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": ["/api"]}, + action=db_models.RULE_ACTION_DENY, + priority=1, + comment="Deny example.com/api", + ) + rule.full_clean() + rule.save() + + self.assertIsNotNone(rule.id) + self.assertEqual(rule.kind, db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) + self.assertEqual(rule.value, {"hostnames": ["example.com"], "paths": ["/api"]}) + self.assertEqual(rule.action, db_models.RULE_ACTION_DENY) + self.assertEqual(rule.priority, 1) + self.assertEqual(rule.comment, "Deny example.com/api") + self.assertIsNotNone(rule.created_at) + self.assertIsNotNone(rule.updated_at) + + def test_create_rule_defaults(self): + """Test that default values are set correctly.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["test.com"], "paths": []}, + action=db_models.RULE_ACTION_ALLOW, + ) + rule.full_clean() + rule.save() + + self.assertEqual(rule.priority, 0) + self.assertEqual(rule.comment, "") + + def test_to_dict(self): + """Test serialisation to a JSON-compatible dict.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": []}, + action=db_models.RULE_ACTION_DENY, + priority=5, + comment="Test rule", + ) + rule.full_clean() + rule.save() + + data = rule.to_dict() + self.assertEqual(data["id"], str(rule.id)) + self.assertEqual(data["kind"], db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) + self.assertEqual(data["value"], {"hostnames": ["example.com"], "paths": []}) + self.assertEqual(data["action"], db_models.RULE_ACTION_DENY) + self.assertEqual(data["priority"], 5) + self.assertEqual(data["comment"], "Test rule") + self.assertIn("created_at", data) + self.assertIn("updated_at", data) + + def test_validate_hostname_and_path_match_requires_dict(self): + """Test that hostname_and_path_match rules require a dict value.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value="not-a-dict", + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError): + rule.full_clean() + + def test_validate_hostname_and_path_match_requires_hostnames_key(self): + """Test that hostname_and_path_match rules require 'hostnames' key.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"paths": []}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError): + rule.full_clean() + + def test_validate_hostname_and_path_match_requires_paths_key(self): + """Test that hostname_and_path_match rules require 'paths' key.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"]}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError): + rule.full_clean() + + def test_validate_hostname_and_path_match_hostnames_must_be_list(self): + """Test that 'hostnames' must be a list.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": "example.com", "paths": []}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError): + rule.full_clean() + + def test_validate_hostname_and_path_match_paths_must_be_list(self): + """Test that 'paths' must be a list.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": "/api"}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError): + rule.full_clean() + + def test_validate_rule_value_rejects_list(self): + """Test that the value field rejects list types.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value=["invalid"], + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError): + rule.full_clean() + + def test_invalid_kind_rejected(self): + """Test that an invalid kind value is rejected.""" + rule = db_models.Rule( + kind="invalid_kind", + value=1, + action=db_models.RULE_ACTION_ALLOW, + ) + with self.assertRaises(ValidationError): + rule.full_clean() diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 049893645..340b8651f 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -3,6 +3,8 @@ """Integration tests for the policy REST API views.""" +import uuid + from django.test import TestCase from rest_framework.test import APIClient @@ -79,7 +81,7 @@ def test_bulk_create(self): self.assertEqual(data[0]["backend_name"], "backend-1") self.assertEqual(data[0]["status"], "pending") self.assertEqual(data[0]["hostname_acls"], ["example.com"]) - self.assertEqual(data[0]["paths"], second=["/api"]) + self.assertEqual(data[0]["paths"], ["/api"]) self.assertEqual(data[0]["port"], 443) self.assertEqual(data[1]["backend_name"], "backend-2") self.assertEqual(data[1]["hostname_acls"], []) @@ -134,3 +136,186 @@ def test_delete_nonexistent(self): """DELETE on a non-existent ID still returns 204 (idempotent).""" response = self.client.delete("/api/v1/requests/99999") self.assertEqual(response.status_code, 204) + + +class TestListCreateRulesView(TestCase): + """Tests for GET /api/v1/rules and POST /api/v1/rules.""" + + def setUp(self): + """Set up the API client.""" + self.client = APIClient() + + def test_list_empty(self): + """GET returns an empty list when no rules exist.""" + response = self.client.get("/api/v1/rules") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), []) + + def test_list_returns_all_ordered_by_priority(self): + """GET returns all rules ordered by descending priority.""" + rule_low = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": ["/api"]}, + action=db_models.RULE_ACTION_ALLOW, + priority=0, + ) + rule_low.full_clean() + rule_low.save() + rule_high = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.org"], "paths": ["/admin"]}, + action=db_models.RULE_ACTION_DENY, + priority=10, + ) + rule_high.full_clean() + rule_high.save() + + response = self.client.get("/api/v1/rules") + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(len(data), 2) + # Higher priority should come first + self.assertEqual(data[0]["priority"], 10) + self.assertEqual(data[1]["priority"], 0) + + def test_create_hostname_and_path_match_rule(self): + """POST creates a hostname_and_path_match rule.""" + payload = { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["example.com"], "paths": ["/api"]}, + "action": db_models.RULE_ACTION_DENY, + "priority": 5, + "comment": "Block example.com/api", + } + response = self.client.post("/api/v1/rules", data=payload, format="json") + self.assertEqual(response.status_code, 201) + data = response.json() + self.assertEqual(data["kind"], db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) + self.assertEqual( + data["value"], {"hostnames": ["example.com"], "paths": ["/api"]} + ) + self.assertEqual(data["action"], db_models.RULE_ACTION_DENY) + self.assertEqual(data["priority"], 5) + self.assertEqual(data["comment"], "Block example.com/api") + self.assertIn("id", data) + self.assertIn("created_at", data) + self.assertEqual(db_models.Rule.objects.count(), 1) + + def test_create_rule_with_defaults(self): + """POST creates a rule with default priority and comment.""" + payload = { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["example.com"], "paths": ["/api"]}, + "action": db_models.RULE_ACTION_DENY, + } + response = self.client.post("/api/v1/rules", data=payload, format="json") + self.assertEqual(response.status_code, 201) + data = response.json() + self.assertEqual(data["priority"], 0) + self.assertEqual(data["comment"], "") + + def test_create_rule_missing_required_fields(self): + """POST returns 400 when required fields are missing.""" + payload = {"kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH} + response = self.client.post("/api/v1/rules", data=payload, format="json") + self.assertEqual(response.status_code, 400) + + def test_create_rule_invalid_kind(self): + """POST returns 400 when kind is invalid.""" + payload = { + "kind": "invalid_kind", + "value": 1, + "action": db_models.RULE_ACTION_ALLOW, + } + response = self.client.post("/api/v1/rules", data=payload, format="json") + self.assertEqual(response.status_code, 400) + + def test_create_rule_invalid_value_for_kind(self): + """POST returns 400 when value doesn't match kind requirements.""" + payload = { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": "not-a-dict", + "action": db_models.RULE_ACTION_DENY, + } + response = self.client.post("/api/v1/rules", data=payload, format="json") + self.assertEqual(response.status_code, 400) + + def test_create_rule_rejects_non_dict(self): + """POST returns 400 when the body is not a JSON object.""" + response = self.client.post( + "/api/v1/rules", data=[{"kind": "test"}], format="json" + ) + self.assertEqual(response.status_code, 400) + + +class TestRuleDetailView(TestCase): + """Tests for GET, PUT, DELETE /api/v1/rules/.""" + + def setUp(self): + """Set up the API client and a sample rule.""" + self.client = APIClient() + self.rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": ["/api"]}, + action=db_models.RULE_ACTION_DENY, + priority=1, + comment="Test rule", + ) + self.rule.full_clean() + self.rule.save() + + def test_get_existing(self): + """GET returns the rule matching the given ID.""" + response = self.client.get(f"/api/v1/rules/{self.rule.pk}") + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data["id"], str(self.rule.pk)) + self.assertEqual(data["kind"], db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) + + def test_get_not_found(self): + """GET returns 404 for a non-existent rule ID.""" + fake_id = uuid.uuid4() + response = self.client.get(f"/api/v1/rules/{fake_id}") + self.assertEqual(response.status_code, 404) + + def test_update_rule(self): + """PUT updates the rule fields.""" + payload = { + "priority": 10, + "comment": "Updated comment", + "action": db_models.RULE_ACTION_ALLOW, + } + response = self.client.put( + f"/api/v1/rules/{self.rule.pk}", data=payload, format="json" + ) + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertEqual(data["priority"], 10) + self.assertEqual(data["comment"], "Updated comment") + self.assertEqual(data["action"], db_models.RULE_ACTION_ALLOW) + # Unchanged fields remain the same + self.assertEqual(data["kind"], db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) + self.assertEqual( + data["value"], {"hostnames": ["example.com"], "paths": ["/api"]} + ) + + def test_update_nonexistent(self): + """PUT returns 404 for a non-existent rule ID.""" + fake_id = uuid.uuid4() + response = self.client.put( + f"/api/v1/rules/{fake_id}", data={"priority": 5}, format="json" + ) + self.assertEqual(response.status_code, 404) + + def test_delete_existing(self): + """DELETE removes the rule and returns 204.""" + pk = self.rule.pk + response = self.client.delete(f"/api/v1/rules/{pk}") + self.assertEqual(response.status_code, 204) + self.assertFalse(db_models.Rule.objects.filter(pk=pk).exists()) + + def test_delete_nonexistent(self): + """DELETE on a non-existent rule ID still returns 204 (idempotent).""" + fake_id = uuid.uuid4() + response = self.client.delete(f"/api/v1/rules/{fake_id}") + self.assertEqual(response.status_code, 204) diff --git a/haproxy-route-policy/policy/urls.py b/haproxy-route-policy/policy/urls.py index f1580fabb..9cb4e203c 100644 --- a/haproxy-route-policy/policy/urls.py +++ b/haproxy-route-policy/policy/urls.py @@ -18,4 +18,14 @@ views.RequestDetailView.as_view(), name="api-request-detail", ), + path( + "api/v1/rules", + views.ListCreateRulesView.as_view(), + name="api-rules", + ), + path( + "api/v1/rules/", + views.RuleDetailView.as_view(), + name="api-rule-detail", + ), ] diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 8a5a7d564..a08a996e3 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -1,9 +1,9 @@ # Copyright 2026 Canonical Ltd. # See LICENSE file for licensing details. -"""REST API views for backend requests.""" +"""REST API views for backend requests and rules.""" -from policy.db_models import BackendRequest +from policy.db_models import BackendRequest, Rule from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.status import ( @@ -76,3 +76,82 @@ def delete(self, request, pk): """Delete a request by ID.""" BackendRequest.objects.filter(pk=pk).delete() return Response(status=HTTP_204_NO_CONTENT) + + +class ListCreateRulesView(APIView): + """View for listing and creating rules.""" + + def get(self, request): + """List all rules.""" + queryset = Rule.objects.all().order_by("-priority", "created_at") + serializer = serializers.RuleSerializer(queryset, many=True) + return Response(serializer.data) + + def post(self, request): + """Create a new rule.""" + data = request.data + if not isinstance(data, dict): + return Response( + {"error": "Expected a JSON object."}, status=HTTP_400_BAD_REQUEST + ) + + try: + serializer = serializers.RuleSerializer(data=data) + if serializer.is_valid(raise_exception=True): + serializer.save() + except IntegrityError: + return Response( + {"error": "Invalid rule data."}, status=HTTP_400_BAD_REQUEST + ) + + return Response(serializer.data, status=HTTP_201_CREATED) + + +class RuleDetailView(APIView): + """View for getting, updating, or deleting a single rule.""" + + def get(self, request, pk): + """Get a rule by ID.""" + try: + rule = Rule.objects.get(pk=pk) + except (Rule.DoesNotExist, ValueError): + return Response(status=HTTP_404_NOT_FOUND) + return Response(rule.to_dict()) + + def put(self, request, pk): + """Update a rule by ID.""" + try: + rule = Rule.objects.get(pk=pk) + serializer = serializers.RuleSerializer(rule) + except (Rule.DoesNotExist, ValueError): + return Response(status=HTTP_404_NOT_FOUND) + + data = request.data + if not isinstance(data, dict): + return Response( + {"error": "Expected a JSON object."}, status=HTTP_400_BAD_REQUEST + ) + # Update fields if provided + if "kind" in data: + rule.kind = data["kind"] + if "value" in data: + rule.value = data["value"] + if "action" in data: + rule.action = data["action"] + if "priority" in data: + rule.priority = data["priority"] + if "comment" in data: + rule.comment = data["comment"] + + try: + rule.full_clean() + rule.save() + except ValidationError as e: + return Response({"error": str(e)}, status=HTTP_400_BAD_REQUEST) + + return Response(serializer.data) + + def delete(self, request, pk): + """Delete a rule by ID.""" + Rule.objects.filter(pk=pk).delete() + return Response(status=HTTP_204_NO_CONTENT) From 4a066ea4467b7e7ad6425133cf30da8b0fa9d619 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 027/201] update migration --- ...alter_backendrequest_hostname_acls_and_more.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py index 92ff9a4a1..d6f1a1444 100644 --- a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py +++ b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py @@ -25,19 +25,4 @@ class Migration(migrations.Migration): ('updated_at', models.DateTimeField(auto_now=True)), ], ), - migrations.AlterField( - model_name='backendrequest', - name='hostname_acls', - field=models.JSONField(blank=True, default=list, validators=[policy.db_models.validate_hostname_acls]), - ), - migrations.AlterField( - model_name='backendrequest', - name='id', - field=models.BigAutoField(primary_key=True, serialize=False), - ), - migrations.AlterField( - model_name='backendrequest', - name='paths', - field=models.JSONField(blank=True, default=list), - ), ] From 01e6da20c2941d1edd103e84e6af1716353be23f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 028/201] update view --- haproxy-route-policy/policy/views.py | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index a08a996e3..6c599cb82 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -132,22 +132,25 @@ def put(self, request, pk): {"error": "Expected a JSON object."}, status=HTTP_400_BAD_REQUEST ) # Update fields if provided - if "kind" in data: - rule.kind = data["kind"] - if "value" in data: - rule.value = data["value"] - if "action" in data: - rule.action = data["action"] - if "priority" in data: - rule.priority = data["priority"] - if "comment" in data: - rule.comment = data["comment"] - + if kind := data.get("kind"): + rule.kind = kind + if value := data.get("value"): + rule.value = value + if action := data.get("action"): + rule.action = action + if priority := data.get("priority"): + rule.priority = priority + if comment := data.get("comment"): + rule.comment = comment try: rule.full_clean() rule.save() except ValidationError as e: return Response({"error": str(e)}, status=HTTP_400_BAD_REQUEST) + except IntegrityError: + return Response( + {"error": "Invalid rule data."}, status=HTTP_400_BAD_REQUEST + ) return Response(serializer.data) From fc6ecc696b97b50a39e1a689f88d573187e54652 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 029/201] fix lint --- ...r_backendrequest_hostname_acls_and_more.py | 41 +++++++++++++------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py index d6f1a1444..84e403e52 100644 --- a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py +++ b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py @@ -1,28 +1,45 @@ # Generated by Django 6.0.3 on 2026-03-17 20:05 -import policy.db_models import uuid from django.db import migrations, models class Migration(migrations.Migration): - dependencies = [ - ('policy', '0001_initial'), + ("policy", "0001_initial"), ] operations = [ migrations.CreateModel( - name='Rule', + name="Rule", fields=[ - ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), - ('kind', models.TextField(choices=[('hostname_and_path_match', 'hostname_and_path_match'), ('match_request_id', 'match_request_id')])), - ('value', models.JSONField()), - ('action', models.TextField(choices=[('allow', 'allow'), ('deny', 'deny')])), - ('priority', models.IntegerField(default=0)), - ('comment', models.TextField(blank=True, default='')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ( + "id", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + ), + ), + ( + "kind", + models.TextField( + choices=[ + ("hostname_and_path_match", "hostname_and_path_match"), + ("match_request_id", "match_request_id"), + ] + ), + ), + ("value", models.JSONField()), + ( + "action", + models.TextField(choices=[("allow", "allow"), ("deny", "deny")]), + ), + ("priority", models.IntegerField(default=0)), + ("comment", models.TextField(blank=True, default="")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] From 570def06c46a2c473eb780175cacfed3e6d4bfa6 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 030/201] remove extra tests --- .../policy/tests/test_models.py | 60 ------------------- .../policy/tests/test_views.py | 6 -- 2 files changed, 66 deletions(-) diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 708e66554..736262c4d 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -103,66 +103,6 @@ def test_to_dict(self): self.assertIn("created_at", data) self.assertIn("updated_at", data) - def test_validate_hostname_and_path_match_requires_dict(self): - """Test that hostname_and_path_match rules require a dict value.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value="not-a-dict", - action=db_models.RULE_ACTION_DENY, - ) - with self.assertRaises(ValidationError): - rule.full_clean() - - def test_validate_hostname_and_path_match_requires_hostnames_key(self): - """Test that hostname_and_path_match rules require 'hostnames' key.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"paths": []}, - action=db_models.RULE_ACTION_DENY, - ) - with self.assertRaises(ValidationError): - rule.full_clean() - - def test_validate_hostname_and_path_match_requires_paths_key(self): - """Test that hostname_and_path_match rules require 'paths' key.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"]}, - action=db_models.RULE_ACTION_DENY, - ) - with self.assertRaises(ValidationError): - rule.full_clean() - - def test_validate_hostname_and_path_match_hostnames_must_be_list(self): - """Test that 'hostnames' must be a list.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": "example.com", "paths": []}, - action=db_models.RULE_ACTION_DENY, - ) - with self.assertRaises(ValidationError): - rule.full_clean() - - def test_validate_hostname_and_path_match_paths_must_be_list(self): - """Test that 'paths' must be a list.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": "/api"}, - action=db_models.RULE_ACTION_DENY, - ) - with self.assertRaises(ValidationError): - rule.full_clean() - - def test_validate_rule_value_rejects_list(self): - """Test that the value field rejects list types.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value=["invalid"], - action=db_models.RULE_ACTION_DENY, - ) - with self.assertRaises(ValidationError): - rule.full_clean() - def test_invalid_kind_rejected(self): """Test that an invalid kind value is rejected.""" rule = db_models.Rule( diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 340b8651f..54962f4a7 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -214,12 +214,6 @@ def test_create_rule_with_defaults(self): self.assertEqual(data["priority"], 0) self.assertEqual(data["comment"], "") - def test_create_rule_missing_required_fields(self): - """POST returns 400 when required fields are missing.""" - payload = {"kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH} - response = self.client.post("/api/v1/rules", data=payload, format="json") - self.assertEqual(response.status_code, 400) - def test_create_rule_invalid_kind(self): """POST returns 400 when kind is invalid.""" payload = { From b5efa1807a0cc1d60bc2ede398ab7be8a67fb986 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 031/201] add validation and update tests --- haproxy-route-policy/policy/db_models.py | 25 +++ .../policy/tests/test_models.py | 154 ++++++++++++++++++ 2 files changed, 179 insertions(+) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index d5919af42..ae35815b6 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -82,6 +82,11 @@ class BackendRequest(models.Model): updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) +def is_valid_path(value: typing.Any): + """Validate that the value is a list of valid URL paths.""" + return not isinstance(value, str) or not value.startswith("/") + + class Rule(models.Model): """A rule used to evaluate backend requests. @@ -126,3 +131,23 @@ def to_dict(self) -> dict: if self.updated_at else None, } + + def clean(self) -> None: + """Custom validation logic for the Rule model.""" + if self.kind == RULE_KIND_HOSTNAME_AND_PATH_MATCH: + if not isinstance(self.value, dict): + raise ValidationError("The value field must be a JSON object.") + + if hostnames := self.value.get("hostnames"): + if invalid_hostnames := [ + hostname for hostname in hostnames if not domain(hostname) + ]: + raise ValidationError( + f"Invalid hostname(s) in rule: {', '.join(invalid_hostnames)}" + ) + + if paths := self.value.get("paths"): + if invalid_paths := [path for path in paths if is_valid_path(path)]: + raise ValidationError( + f"Invalid path(s) in rule: {', '.join([str(path) for path in invalid_paths])}" + ) diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 736262c4d..eaff743b4 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -112,3 +112,157 @@ def test_invalid_kind_rejected(self): ) with self.assertRaises(ValidationError): rule.full_clean() + + def test_invalid_action_rejected(self): + """Test that an invalid action value is rejected.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": []}, + action="invalid_action", + ) + with self.assertRaises(ValidationError): + rule.full_clean() + + def test_hostname_and_path_match_value_must_be_dict(self): + """Test that hostname_and_path_match rules require a dict value.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value="not-a-dict", + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError) as ctx: + rule.full_clean() + self.assertIn("value field must be a JSON object", str(ctx.exception)) + + def test_hostname_and_path_match_value_list_rejected(self): + """Test that hostname_and_path_match rules reject a list value.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value=["not", "a", "dict"], + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError) as ctx: + rule.full_clean() + self.assertIn("value field must be a JSON object", str(ctx.exception)) + + def test_hostname_and_path_match_value_int_rejected(self): + """Test that hostname_and_path_match rules reject an integer value.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value=42, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError) as ctx: + rule.full_clean() + self.assertIn("value field must be a JSON object", str(ctx.exception)) + + def test_hostname_and_path_match_invalid_hostname(self): + """Test that invalid hostnames are rejected in hostname_and_path_match rules.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["not a valid hostname!!!"], "paths": []}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError) as ctx: + rule.full_clean() + self.assertIn("Invalid hostname", str(ctx.exception)) + + def test_hostname_and_path_match_multiple_invalid_hostnames(self): + """Test that multiple invalid hostnames are reported.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["valid.com", "bad host", "also bad!"], "paths": []}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError) as ctx: + rule.full_clean() + msg = str(ctx.exception) + self.assertIn("bad host", msg) + self.assertIn("also bad!", msg) + + def test_hostname_and_path_match_valid_hostnames_accepted(self): + """Test that valid hostnames pass validation.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com", "sub.example.org"], "paths": []}, + action=db_models.RULE_ACTION_ALLOW, + ) + rule.full_clean() # Should not raise + + def test_hostname_and_path_match_empty_hostnames_accepted(self): + """Test that an empty hostnames list passes validation.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": [], "paths": []}, + action=db_models.RULE_ACTION_ALLOW, + ) + rule.full_clean() # Should not raise + + def test_hostname_and_path_match_invalid_path_not_starting_with_slash(self): + """Test that paths not starting with / are rejected.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": ["api/v1"]}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError) as ctx: + rule.full_clean() + self.assertIn("Invalid path", str(ctx.exception)) + + def test_hostname_and_path_match_invalid_path_non_string(self): + """Test that non-string paths are rejected.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": [123]}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError) as ctx: + rule.full_clean() + self.assertIn("Invalid path", str(ctx.exception)) + + def test_hostname_and_path_match_valid_paths_accepted(self): + """Test that valid paths starting with / pass validation.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": ["/api", "/health"]}, + action=db_models.RULE_ACTION_ALLOW, + ) + rule.full_clean() # Should not raise + + def test_hostname_and_path_match_empty_paths_accepted(self): + """Test that an empty paths list passes validation.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": []}, + action=db_models.RULE_ACTION_ALLOW, + ) + rule.full_clean() # Should not raise + + def test_hostname_and_path_match_multiple_invalid_paths(self): + """Test that multiple invalid paths are reported.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": [], "paths": ["no-slash", "also-bad"]}, + action=db_models.RULE_ACTION_DENY, + ) + with self.assertRaises(ValidationError) as ctx: + rule.full_clean() + msg = str(ctx.exception) + self.assertIn("no-slash", msg) + self.assertIn("also-bad", msg) + + def test_hostname_and_path_match_both_valid_hostnames_and_paths(self): + """Test that a rule with both valid hostnames and paths passes.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={ + "hostnames": ["example.com", "app.example.com"], + "paths": ["/api", "/v1/health"], + }, + action=db_models.RULE_ACTION_DENY, + priority=3, + comment="Block specific routes", + ) + rule.full_clean() + rule.save() + self.assertIsNotNone(rule.id) From 96350a13bfd943df5fdb060ccd91936bff11bcb0 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 032/201] update view --- haproxy-route-policy/policy/serializers.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/haproxy-route-policy/policy/serializers.py b/haproxy-route-policy/policy/serializers.py index 27139efb3..a031064c9 100644 --- a/haproxy-route-policy/policy/serializers.py +++ b/haproxy-route-policy/policy/serializers.py @@ -6,6 +6,7 @@ from rest_framework import serializers from policy.db_models import ( BackendRequest, + Rule, ) @@ -13,3 +14,9 @@ class BackendRequestSerializer(serializers.ModelSerializer): class Meta: # pyright: ignore[reportIncompatibleVariableOverride] model = BackendRequest fields = "__all__" + + +class RuleSerializer(serializers.ModelSerializer): + class Meta: # pyright: ignore[reportIncompatibleVariableOverride] + model = Rule + fields = "__all__" From 30eb7fb4a3cb7136c9a6aa4a1178c0e01dbfd0fc Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 033/201] remove to_dict --- haproxy-route-policy/policy/db_models.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index ae35815b6..17789744e 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -115,23 +115,6 @@ class Rule(models.Model): created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - def to_dict(self) -> dict: - """Serialize to a JSON-compatible dict.""" - return { - "id": str(self.id), - "kind": self.kind, - "value": self.value, - "action": self.action, - "priority": self.priority, - "comment": self.comment, - "created_at": typing.cast(datetime, self.created_at).isoformat() - if self.created_at - else None, - "updated_at": typing.cast(datetime, self.updated_at).isoformat() - if self.updated_at - else None, - } - def clean(self) -> None: """Custom validation logic for the Rule model.""" if self.kind == RULE_KIND_HOSTNAME_AND_PATH_MATCH: From c5fdbee9d6c74ece8acea02377929ea560189ddc Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 034/201] use serializer for get --- haproxy-route-policy/policy/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 6c599cb82..0d186e368 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -114,9 +114,10 @@ def get(self, request, pk): """Get a rule by ID.""" try: rule = Rule.objects.get(pk=pk) + serializer = serializers.RuleSerializer(rule) except (Rule.DoesNotExist, ValueError): return Response(status=HTTP_404_NOT_FOUND) - return Response(rule.to_dict()) + return Response(serializer.data) def put(self, request, pk): """Update a rule by ID.""" From 8d34c55b977671e7938c531760e76f14683a952b Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 035/201] use serializer --- haproxy-route-policy/policy/views.py | 76 ++++++++-------------------- 1 file changed, 21 insertions(+), 55 deletions(-) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 0d186e368..55fc49260 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -12,6 +12,7 @@ HTTP_404_NOT_FOUND, HTTP_204_NO_CONTENT, ) +from django.http import Http404 from django.core.exceptions import ValidationError from django.db.utils import IntegrityError from django.db import transaction @@ -89,73 +90,38 @@ def get(self, request): def post(self, request): """Create a new rule.""" - data = request.data - if not isinstance(data, dict): - return Response( - {"error": "Expected a JSON object."}, status=HTTP_400_BAD_REQUEST - ) - - try: - serializer = serializers.RuleSerializer(data=data) - if serializer.is_valid(raise_exception=True): - serializer.save() - except IntegrityError: - return Response( - {"error": "Invalid rule data."}, status=HTTP_400_BAD_REQUEST - ) - - return Response(serializer.data, status=HTTP_201_CREATED) + serializer = serializers.RuleSerializer(data=request.data) + if serializer.is_valid(raise_exception=True): + serializer.save() + return Response(serializer.data, status=HTTP_201_CREATED) + return Response(serializer.errors, status=HTTP_400_BAD_REQUEST) class RuleDetailView(APIView): """View for getting, updating, or deleting a single rule.""" + def get_object(self, pk): + try: + return Rule.objects.get(pk=pk) + except Rule.DoesNotExist: + raise Http404 + def get(self, request, pk): """Get a rule by ID.""" - try: - rule = Rule.objects.get(pk=pk) - serializer = serializers.RuleSerializer(rule) - except (Rule.DoesNotExist, ValueError): - return Response(status=HTTP_404_NOT_FOUND) + rule = self.get_object(pk) + serializer = serializers.RuleSerializer(rule) return Response(serializer.data) def put(self, request, pk): """Update a rule by ID.""" - try: - rule = Rule.objects.get(pk=pk) - serializer = serializers.RuleSerializer(rule) - except (Rule.DoesNotExist, ValueError): - return Response(status=HTTP_404_NOT_FOUND) - - data = request.data - if not isinstance(data, dict): - return Response( - {"error": "Expected a JSON object."}, status=HTTP_400_BAD_REQUEST - ) - # Update fields if provided - if kind := data.get("kind"): - rule.kind = kind - if value := data.get("value"): - rule.value = value - if action := data.get("action"): - rule.action = action - if priority := data.get("priority"): - rule.priority = priority - if comment := data.get("comment"): - rule.comment = comment - try: - rule.full_clean() - rule.save() - except ValidationError as e: - return Response({"error": str(e)}, status=HTTP_400_BAD_REQUEST) - except IntegrityError: - return Response( - {"error": "Invalid rule data."}, status=HTTP_400_BAD_REQUEST - ) - - return Response(serializer.data) + rule = self.get_object(pk) + serializer = serializers.RuleSerializer(rule, data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data) + return Response(serializer.errors, status=HTTP_400_BAD_REQUEST) def delete(self, request, pk): """Delete a rule by ID.""" - Rule.objects.filter(pk=pk).delete() + Rule.objects.get(pk=pk).delete() return Response(status=HTTP_204_NO_CONTENT) From de688810453190299a17ee3004c8e1f0685f7187 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 036/201] remove unused tests --- .../policy/tests/test_models.py | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index eaff743b4..7130982bd 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -81,28 +81,6 @@ def test_create_rule_defaults(self): self.assertEqual(rule.priority, 0) self.assertEqual(rule.comment, "") - def test_to_dict(self): - """Test serialisation to a JSON-compatible dict.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": []}, - action=db_models.RULE_ACTION_DENY, - priority=5, - comment="Test rule", - ) - rule.full_clean() - rule.save() - - data = rule.to_dict() - self.assertEqual(data["id"], str(rule.id)) - self.assertEqual(data["kind"], db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) - self.assertEqual(data["value"], {"hostnames": ["example.com"], "paths": []}) - self.assertEqual(data["action"], db_models.RULE_ACTION_DENY) - self.assertEqual(data["priority"], 5) - self.assertEqual(data["comment"], "Test rule") - self.assertIn("created_at", data) - self.assertIn("updated_at", data) - def test_invalid_kind_rejected(self): """Test that an invalid kind value is rejected.""" rule = db_models.Rule( From d5684632521d11d99e8dea6692e65723d3e16e37 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 037/201] use filter for delete query --- haproxy-route-policy/policy/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 55fc49260..de6e3c890 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -123,5 +123,5 @@ def put(self, request, pk): def delete(self, request, pk): """Delete a rule by ID.""" - Rule.objects.get(pk=pk).delete() + Rule.objects.filter(pk=pk).delete() return Response(status=HTTP_204_NO_CONTENT) From cdfde2c03b289acbcfe7572b4bc2e7a5eddf50b3 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 038/201] update tests and move validation to serializer class --- haproxy-route-policy/policy/db_models.py | 25 -- haproxy-route-policy/policy/serializers.py | 35 ++- .../policy/tests/test_models.py | 287 ++++++++++-------- haproxy-route-policy/policy/views.py | 2 +- 4 files changed, 198 insertions(+), 151 deletions(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 17789744e..ba96854cc 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -82,11 +82,6 @@ class BackendRequest(models.Model): updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) -def is_valid_path(value: typing.Any): - """Validate that the value is a list of valid URL paths.""" - return not isinstance(value, str) or not value.startswith("/") - - class Rule(models.Model): """A rule used to evaluate backend requests. @@ -114,23 +109,3 @@ class Rule(models.Model): comment: models.TextField = models.TextField(default="", blank=True) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - - def clean(self) -> None: - """Custom validation logic for the Rule model.""" - if self.kind == RULE_KIND_HOSTNAME_AND_PATH_MATCH: - if not isinstance(self.value, dict): - raise ValidationError("The value field must be a JSON object.") - - if hostnames := self.value.get("hostnames"): - if invalid_hostnames := [ - hostname for hostname in hostnames if not domain(hostname) - ]: - raise ValidationError( - f"Invalid hostname(s) in rule: {', '.join(invalid_hostnames)}" - ) - - if paths := self.value.get("paths"): - if invalid_paths := [path for path in paths if is_valid_path(path)]: - raise ValidationError( - f"Invalid path(s) in rule: {', '.join([str(path) for path in invalid_paths])}" - ) diff --git a/haproxy-route-policy/policy/serializers.py b/haproxy-route-policy/policy/serializers.py index a031064c9..590b2d223 100644 --- a/haproxy-route-policy/policy/serializers.py +++ b/haproxy-route-policy/policy/serializers.py @@ -4,10 +4,14 @@ """Serializers for the haproxy-route-policy application.""" from rest_framework import serializers -from policy.db_models import ( - BackendRequest, - Rule, -) +from policy.db_models import BackendRequest, Rule, RULE_KIND_HOSTNAME_AND_PATH_MATCH +import typing +from validators import domain + + +def is_valid_path(value: typing.Any): + """Validate that the value is a list of valid URL paths.""" + return not isinstance(value, str) or not value.startswith("/") class BackendRequestSerializer(serializers.ModelSerializer): @@ -20,3 +24,26 @@ class RuleSerializer(serializers.ModelSerializer): class Meta: # pyright: ignore[reportIncompatibleVariableOverride] model = Rule fields = "__all__" + + def validate(self, attrs): + """Custom validation logic for the Rule model.""" + if attrs.get("kind") == RULE_KIND_HOSTNAME_AND_PATH_MATCH: + if not isinstance(attrs.get("value"), dict): + raise serializers.ValidationError( + "The value field must be a JSON object." + ) + + if hostnames := typing.cast(dict, attrs.get("value")).get("hostnames"): + if invalid_hostnames := [ + hostname for hostname in hostnames if not domain(hostname) + ]: + raise serializers.ValidationError( + f"Invalid hostname(s) in rule: {', '.join(invalid_hostnames)}" + ) + + if paths := typing.cast(dict, attrs.get("value")).get("paths"): + if invalid_paths := [path for path in paths if is_valid_path(path)]: + raise serializers.ValidationError( + f"Invalid path(s) in rule: {', '.join([str(path) for path in invalid_paths])}" + ) + return attrs diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 7130982bd..e73181f5c 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -4,13 +4,12 @@ """Unit tests for the BackendRequest and Rule models.""" from django.test import TestCase -from django.core.exceptions import ValidationError -from policy import db_models +from policy import db_models, serializers class TestBackendRequestModel(TestCase): - """Tests for BackendRequest model creation and serialisation.""" + """Tests for BackendRequest model creation and serialization.""" def test_create_with_defaults(self): """Test creating a request with minimal required fields.""" @@ -49,15 +48,17 @@ class TestRuleModel(TestCase): def test_create_hostname_and_path_match_rule(self): """Test creating a hostname_and_path_match rule with valid data.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": ["/api"]}, - action=db_models.RULE_ACTION_DENY, - priority=1, - comment="Deny example.com/api", + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["example.com"], "paths": ["/api"]}, + "action": db_models.RULE_ACTION_DENY, + "priority": 1, + "comment": "Deny example.com/api", + } ) - rule.full_clean() - rule.save() + self.assertTrue(serializer.is_valid(), serializer.errors) + rule = serializer.save() self.assertIsNotNone(rule.id) self.assertEqual(rule.kind, db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) @@ -70,177 +71,221 @@ def test_create_hostname_and_path_match_rule(self): def test_create_rule_defaults(self): """Test that default values are set correctly.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["test.com"], "paths": []}, - action=db_models.RULE_ACTION_ALLOW, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["test.com"], "paths": []}, + "action": db_models.RULE_ACTION_ALLOW, + } ) - rule.full_clean() - rule.save() + self.assertTrue(serializer.is_valid(), serializer.errors) + rule = serializer.save() self.assertEqual(rule.priority, 0) self.assertEqual(rule.comment, "") def test_invalid_kind_rejected(self): """Test that an invalid kind value is rejected.""" - rule = db_models.Rule( - kind="invalid_kind", - value=1, - action=db_models.RULE_ACTION_ALLOW, + serializer = serializers.RuleSerializer( + data={ + "kind": "invalid_kind", + "value": 1, + "action": db_models.RULE_ACTION_ALLOW, + } ) - with self.assertRaises(ValidationError): - rule.full_clean() + self.assertFalse(serializer.is_valid()) + self.assertIn("kind", serializer.errors) def test_invalid_action_rejected(self): """Test that an invalid action value is rejected.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": []}, - action="invalid_action", + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["example.com"], "paths": []}, + "action": "invalid_action", + } ) - with self.assertRaises(ValidationError): - rule.full_clean() + self.assertFalse(serializer.is_valid()) + self.assertIn("action", serializer.errors) def test_hostname_and_path_match_value_must_be_dict(self): """Test that hostname_and_path_match rules require a dict value.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value="not-a-dict", - action=db_models.RULE_ACTION_DENY, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": "not-a-dict", + "action": db_models.RULE_ACTION_DENY, + } + ) + self.assertFalse(serializer.is_valid()) + self.assertIn("non_field_errors", serializer.errors) + self.assertIn( + "value field must be a JSON object", + str(serializer.errors["non_field_errors"]), ) - with self.assertRaises(ValidationError) as ctx: - rule.full_clean() - self.assertIn("value field must be a JSON object", str(ctx.exception)) def test_hostname_and_path_match_value_list_rejected(self): """Test that hostname_and_path_match rules reject a list value.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value=["not", "a", "dict"], - action=db_models.RULE_ACTION_DENY, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": ["not", "a", "dict"], + "action": db_models.RULE_ACTION_DENY, + } + ) + self.assertFalse(serializer.is_valid()) + self.assertIn("non_field_errors", serializer.errors) + self.assertIn( + "value field must be a JSON object", + str(serializer.errors["non_field_errors"]), ) - with self.assertRaises(ValidationError) as ctx: - rule.full_clean() - self.assertIn("value field must be a JSON object", str(ctx.exception)) def test_hostname_and_path_match_value_int_rejected(self): """Test that hostname_and_path_match rules reject an integer value.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value=42, - action=db_models.RULE_ACTION_DENY, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": 42, + "action": db_models.RULE_ACTION_DENY, + } + ) + self.assertFalse(serializer.is_valid()) + self.assertIn("non_field_errors", serializer.errors) + self.assertIn( + "value field must be a JSON object", + str(serializer.errors["non_field_errors"]), ) - with self.assertRaises(ValidationError) as ctx: - rule.full_clean() - self.assertIn("value field must be a JSON object", str(ctx.exception)) def test_hostname_and_path_match_invalid_hostname(self): """Test that invalid hostnames are rejected in hostname_and_path_match rules.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["not a valid hostname!!!"], "paths": []}, - action=db_models.RULE_ACTION_DENY, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["not a valid hostname!!!"], "paths": []}, + "action": db_models.RULE_ACTION_DENY, + } ) - with self.assertRaises(ValidationError) as ctx: - rule.full_clean() - self.assertIn("Invalid hostname", str(ctx.exception)) + self.assertFalse(serializer.is_valid()) + self.assertIn("Invalid hostname", str(serializer.errors)) def test_hostname_and_path_match_multiple_invalid_hostnames(self): """Test that multiple invalid hostnames are reported.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["valid.com", "bad host", "also bad!"], "paths": []}, - action=db_models.RULE_ACTION_DENY, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": { + "hostnames": ["valid.com", "bad host", "also bad!"], + "paths": [], + }, + "action": db_models.RULE_ACTION_DENY, + } ) - with self.assertRaises(ValidationError) as ctx: - rule.full_clean() - msg = str(ctx.exception) - self.assertIn("bad host", msg) - self.assertIn("also bad!", msg) + self.assertFalse(serializer.is_valid()) + errors_str = str(serializer.errors) + self.assertIn("bad host", errors_str) + self.assertIn("also bad!", errors_str) def test_hostname_and_path_match_valid_hostnames_accepted(self): """Test that valid hostnames pass validation.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com", "sub.example.org"], "paths": []}, - action=db_models.RULE_ACTION_ALLOW, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": { + "hostnames": ["example.com", "sub.example.org"], + "paths": [], + }, + "action": db_models.RULE_ACTION_ALLOW, + } ) - rule.full_clean() # Should not raise + self.assertTrue(serializer.is_valid(), serializer.errors) def test_hostname_and_path_match_empty_hostnames_accepted(self): """Test that an empty hostnames list passes validation.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": [], "paths": []}, - action=db_models.RULE_ACTION_ALLOW, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": [], "paths": []}, + "action": db_models.RULE_ACTION_ALLOW, + } ) - rule.full_clean() # Should not raise + self.assertTrue(serializer.is_valid(), serializer.errors) def test_hostname_and_path_match_invalid_path_not_starting_with_slash(self): """Test that paths not starting with / are rejected.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": ["api/v1"]}, - action=db_models.RULE_ACTION_DENY, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["example.com"], "paths": ["api/v1"]}, + "action": db_models.RULE_ACTION_DENY, + } ) - with self.assertRaises(ValidationError) as ctx: - rule.full_clean() - self.assertIn("Invalid path", str(ctx.exception)) + self.assertFalse(serializer.is_valid()) + self.assertIn("Invalid path", str(serializer.errors)) def test_hostname_and_path_match_invalid_path_non_string(self): """Test that non-string paths are rejected.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": [123]}, - action=db_models.RULE_ACTION_DENY, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["example.com"], "paths": [123]}, + "action": db_models.RULE_ACTION_DENY, + } ) - with self.assertRaises(ValidationError) as ctx: - rule.full_clean() - self.assertIn("Invalid path", str(ctx.exception)) + self.assertFalse(serializer.is_valid()) def test_hostname_and_path_match_valid_paths_accepted(self): """Test that valid paths starting with / pass validation.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": ["/api", "/health"]}, - action=db_models.RULE_ACTION_ALLOW, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": { + "hostnames": ["example.com"], + "paths": ["/api", "/health"], + }, + "action": db_models.RULE_ACTION_ALLOW, + } ) - rule.full_clean() # Should not raise + self.assertTrue(serializer.is_valid(), serializer.errors) def test_hostname_and_path_match_empty_paths_accepted(self): """Test that an empty paths list passes validation.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": []}, - action=db_models.RULE_ACTION_ALLOW, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["example.com"], "paths": []}, + "action": db_models.RULE_ACTION_ALLOW, + } ) - rule.full_clean() # Should not raise + self.assertTrue(serializer.is_valid(), serializer.errors) def test_hostname_and_path_match_multiple_invalid_paths(self): """Test that multiple invalid paths are reported.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": [], "paths": ["no-slash", "also-bad"]}, - action=db_models.RULE_ACTION_DENY, + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": [], "paths": ["no-slash", "also-bad"]}, + "action": db_models.RULE_ACTION_DENY, + } ) - with self.assertRaises(ValidationError) as ctx: - rule.full_clean() - msg = str(ctx.exception) - self.assertIn("no-slash", msg) - self.assertIn("also-bad", msg) + self.assertFalse(serializer.is_valid()) + errors_str = str(serializer.errors) + self.assertIn("no-slash", errors_str) + self.assertIn("also-bad", errors_str) def test_hostname_and_path_match_both_valid_hostnames_and_paths(self): """Test that a rule with both valid hostnames and paths passes.""" - rule = db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={ - "hostnames": ["example.com", "app.example.com"], - "paths": ["/api", "/v1/health"], - }, - action=db_models.RULE_ACTION_DENY, - priority=3, - comment="Block specific routes", - ) - rule.full_clean() - rule.save() + serializer = serializers.RuleSerializer( + data={ + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": { + "hostnames": ["example.com", "app.example.com"], + "paths": ["/api", "/v1/health"], + }, + "action": db_models.RULE_ACTION_DENY, + "priority": 3, + "comment": "Block specific routes", + } + ) + self.assertTrue(serializer.is_valid(), serializer.errors) + rule = serializer.save() self.assertIsNotNone(rule.id) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index de6e3c890..03d73a79f 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -115,7 +115,7 @@ def get(self, request, pk): def put(self, request, pk): """Update a rule by ID.""" rule = self.get_object(pk) - serializer = serializers.RuleSerializer(rule, data=request.data) + serializer = serializers.RuleSerializer(rule, data=request.data, partial=True) if serializer.is_valid(): serializer.save() return Response(serializer.data) From d9ad271a0db730a58a090b5f9d3c09f0dd992310 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 039/201] Apply suggestion from @github-actions[bot] Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .../0002_rule_alter_backendrequest_hostname_acls_and_more.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py index 84e403e52..ebbc42eeb 100644 --- a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py +++ b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py @@ -1,3 +1,6 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + # Generated by Django 6.0.3 on 2026-03-17 20:05 import uuid From bff1c28fcb4e47e8505a425193ea715c5b2c174c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 040/201] remove license header from generated files --- .../0002_rule_alter_backendrequest_hostname_acls_and_more.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py index ebbc42eeb..84e403e52 100644 --- a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py +++ b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py @@ -1,6 +1,3 @@ -# Copyright 2026 Canonical Ltd. -# See LICENSE file for licensing details. - # Generated by Django 6.0.3 on 2026-03-17 20:05 import uuid From 1fad230159993ebb45191fcc1ccb7df890f2f538 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 041/201] Update haproxy-route-policy/policy/migrations/0001_initial.py Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- haproxy-route-policy/policy/migrations/0001_initial.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 26cc6998e..1b55c35b4 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -1,3 +1,6 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + # Generated by Django 6.0.3 on 2026-03-17 20:21 import policy.db_models From b5c511ad112e77054a9caf459693ec2a9dbeb8d5 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 042/201] Revert "Update haproxy-route-policy/policy/migrations/0001_initial.py" This reverts commit 10a2708834d797cfbe036bfdfc33f26fe0c7eed2. --- haproxy-route-policy/policy/migrations/0001_initial.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 1b55c35b4..26cc6998e 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -1,6 +1,3 @@ -# Copyright 2026 Canonical Ltd. -# See LICENSE file for licensing details. - # Generated by Django 6.0.3 on 2026-03-17 20:21 import policy.db_models From 9bd2821846543b38d176400f5a8f2005efca2983 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 043/201] ignore migration files for license header --- .licenserc.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.licenserc.yaml b/.licenserc.yaml index 6a24006db..58e40ca11 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -38,4 +38,5 @@ header: - '.readthedocs.yaml' - 'docs/**' - '.lycheeignore' + - 'haproxy-route-policy/policy/migrations/*.py' comment: on-failure From 79510f9cc04c239d2f062eb04fd23ddee190ad2c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 044/201] add change artifact --- docs/release-notes/artifacts/pr0400.yaml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0400.yaml diff --git a/docs/release-notes/artifacts/pr0400.yaml b/docs/release-notes/artifacts/pr0400.yaml new file mode 100644 index 000000000..67c834051 --- /dev/null +++ b/docs/release-notes/artifacts/pr0400.yaml @@ -0,0 +1,21 @@ +version_schema: 2 + +changes: + - title: Added rules management REST API for haproxy-route-policy app + author: tphan025 + type: minor + description: > + Added the Rule model with UUID primary key and fields for kind, value, action, + priority, and comment. Implemented REST API endpoints for rules: GET /api/v1/rules + (list ordered by descending priority), POST /api/v1/rules (create with validation), + GET /api/v1/rules/ (retrieve by ID), PUT /api/v1/rules/ (partial update), + and DELETE /api/v1/rules/ (idempotent delete). Added RuleSerializer with + custom validation for hostname_and_path_match rules including hostname and path + checks. Included unit and integration tests for the Rule model and API views. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/400 + related_doc: + related_issue: + visibility: public + highlight: false From 64b47921b139f5347414738b8337c8a00c8eba6e Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 045/201] add envlist to tox commands --- haproxy-route-policy/tox.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/haproxy-route-policy/tox.toml b/haproxy-route-policy/tox.toml index d86392900..26924a975 100644 --- a/haproxy-route-policy/tox.toml +++ b/haproxy-route-policy/tox.toml @@ -5,6 +5,7 @@ skipsdist = true skip_missing_interpreters = true requires = ["tox>=4.21"] no_package = true +envlist = [ "lint", "unit", "static", "coverage-report" ] [env_run_base] passenv = ["PYTHONPATH"] From 2c56b269d94981f06935e30cbab7a30ec7512128 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 046/201] update envlist --- haproxy-route-policy/tox.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/tox.toml b/haproxy-route-policy/tox.toml index 26924a975..d08f465fc 100644 --- a/haproxy-route-policy/tox.toml +++ b/haproxy-route-policy/tox.toml @@ -5,7 +5,7 @@ skipsdist = true skip_missing_interpreters = true requires = ["tox>=4.21"] no_package = true -envlist = [ "lint", "unit", "static", "coverage-report" ] +envlist = [ "lint", "unit"] [env_run_base] passenv = ["PYTHONPATH"] From 65d04cbc712eff4251fc12d0563bfbd705355e25 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:49:54 +0200 Subject: [PATCH 047/201] convert pk to uuid for requests --- haproxy-route-policy/policy/db_models.py | 7 +++++-- .../policy/migrations/0001_initial.py | 13 +++++++++++-- haproxy-route-policy/policy/tests/test_views.py | 8 ++++---- haproxy-route-policy/policy/urls.py | 2 +- 4 files changed, 21 insertions(+), 9 deletions(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index ba96854cc..344b44320 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -8,6 +8,7 @@ from django.db import models from validators import domain from django.core.exceptions import ValidationError +import uuid REQUEST_STATUS_PENDING = "pending" REQUEST_STATUS_ACCEPTED = "accepted" @@ -54,7 +55,7 @@ class BackendRequest(models.Model): """A backend request submitted via the haproxy-route relation. Attrs: - id: Auto-incrementing primary key. + id: Request UUID. relation_id: The Juju relation ID this request originated from. hostname_acls: Hostnames requested for routing. backend_name: The name of the backend in the HAProxy config. @@ -65,7 +66,9 @@ class BackendRequest(models.Model): updated_at: Timestamp when the request was last updated. """ - id: models.BigAutoField = models.BigAutoField(primary_key=True) + id: models.UUIDField = models.UUIDField( + primary_key=True, default=uuid.uuid4, editable=False + ) relation_id: models.IntegerField = models.IntegerField() hostname_acls: models.JSONField = models.JSONField( default=list, validators=[validate_hostname_acls], blank=True diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 26cc6998e..7d7a16bcd 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -1,6 +1,7 @@ -# Generated by Django 6.0.3 on 2026-03-17 20:21 +# Generated by Django 6.0.3 on 2026-03-19 12:58 import policy.db_models +import uuid from django.db import migrations, models @@ -13,7 +14,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="BackendRequest", fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False)), + ( + "id", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + ), + ), ("relation_id", models.IntegerField()), ( "hostname_acls", diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 54962f4a7..20331c95c 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -7,7 +7,7 @@ from django.test import TestCase from rest_framework.test import APIClient - +import uuid from policy import db_models @@ -117,12 +117,12 @@ def test_get_existing(self): response = self.client.get(f"/api/v1/requests/{self.backend_request.pk}") self.assertEqual(response.status_code, 200) data = response.json() - self.assertEqual(data["id"], self.backend_request.pk) + self.assertEqual(data["id"], str(self.backend_request.pk)) self.assertEqual(data["backend_name"], "detail-backend") def test_get_not_found(self): """GET returns 404 for a non-existent ID.""" - response = self.client.get("/api/v1/requests/99999") + response = self.client.get(f"/api/v1/requests/{uuid.uuid4()}") self.assertEqual(response.status_code, 404) def test_delete_existing(self): @@ -134,7 +134,7 @@ def test_delete_existing(self): def test_delete_nonexistent(self): """DELETE on a non-existent ID still returns 204 (idempotent).""" - response = self.client.delete("/api/v1/requests/99999") + response = self.client.delete(f"/api/v1/requests/{uuid.uuid4()}") self.assertEqual(response.status_code, 204) diff --git a/haproxy-route-policy/policy/urls.py b/haproxy-route-policy/policy/urls.py index 9cb4e203c..95c4da997 100644 --- a/haproxy-route-policy/policy/urls.py +++ b/haproxy-route-policy/policy/urls.py @@ -14,7 +14,7 @@ name="api-requests", ), path( - "api/v1/requests/", + "api/v1/requests/", views.RequestDetailView.as_view(), name="api-request-detail", ), From 4bdca17da7203f9d6ddecec8db65d04e6c1ac84f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:50:17 +0200 Subject: [PATCH 048/201] Add guard against mal-formed uuid and parameter. Add logging configs, Add middleware to guard against db connection errors --- .../haproxy_route_policy/settings.py | 39 ++++++++++++++++++- haproxy-route-policy/policy/middleware.py | 38 ++++++++++++++++++ haproxy-route-policy/policy/views.py | 25 ++++++++---- haproxy-route-policy/pyproject.toml | 1 + haproxy-route-policy/uv.lock | 11 ++++++ 5 files changed, 105 insertions(+), 9 deletions(-) create mode 100644 haproxy-route-policy/policy/middleware.py diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index df4f9c1a7..7c3d2c341 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -15,14 +15,14 @@ from pathlib import Path import os +import json # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") DEBUG = os.environ.get("DJANGO_DEBUG", "").lower() == "true" -ALLOWED_HOSTS = [] - +ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS", "[]")) # Application definition @@ -47,6 +47,8 @@ "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", + "whitenoise.middleware.WhiteNoiseMiddleware", + "policy.middleware.DatabaseErrorMiddleware", ] ROOT_URLCONF = "haproxy_route_policy.urls" @@ -115,3 +117,36 @@ # https://docs.djangoproject.com/en/6.0/howto/static-files/ STATIC_URL = "static/" +STATIC_ROOT = Path(BASE_DIR, "static/") + +# Default primary key field type +# https://docs.djangoproject.com/en/5.1/ref/settings/#default-auto-field + +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" + +env_log_level = os.getenv("DJANGO_LOG_LEVEL", "INFO").upper() +if env_log_level not in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]: + env_log_level = "INFO" + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "handlers": { + "console": { + "class": "logging.StreamHandler", + }, + }, + "root": { + "handlers": ["console"], + "level": "WARNING", + }, + "loggers": { + "django": { + "handlers": ["console"], + "level": env_log_level, + "propagate": False, + }, + }, +} + +DATA_UPLOAD_MAX_MEMORY_SIZE = 32 * 1024 * 1024 diff --git a/haproxy-route-policy/policy/middleware.py b/haproxy-route-policy/policy/middleware.py new file mode 100644 index 000000000..cdd6fe22b --- /dev/null +++ b/haproxy-route-policy/policy/middleware.py @@ -0,0 +1,38 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Middleware for handling database connection errors.""" + +import logging + +from django.db import OperationalError, DatabaseError +from django.http import JsonResponse +from rest_framework.status import HTTP_500_INTERNAL_SERVER_ERROR + +logger = logging.getLogger(__name__) + + +class DatabaseErrorMiddleware: + """Catch database connection errors and return a generic 503 response. + + This prevents the application's stack trace from being exposed to the client + when the database is unreachable or encounters a connection-level error. + """ + + def __init__(self, get_response): + """Initialize the middleware.""" + self.get_response = get_response + + def __call__(self, request): + """Process the request.""" + return self.get_response(request) + + def process_exception(self, _request, exception): + """Handle database errors raised during view processing.""" + if isinstance(exception, (OperationalError, DatabaseError)): + logger.error("Database error: %s", exception) + return JsonResponse( + {"error": "A database error occurred. Please try again later."}, + status=HTTP_500_INTERNAL_SERVER_ERROR, + ) + return None diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 03d73a79f..551f5a4c1 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -17,6 +17,7 @@ from django.db.utils import IntegrityError from django.db import transaction from policy import serializers +from .db_models import REQUEST_STATUSES class ListCreateRequestsView(APIView): @@ -24,10 +25,13 @@ class ListCreateRequestsView(APIView): def get(self, request): """List all requests, optionally filtered by status.""" - filter = ( - {"status": request.GET.get("status")} if request.GET.get("status") else {} - ) - queryset = BackendRequest.objects.all().filter(**filter) + status = request.GET.get("status") + if status and status not in REQUEST_STATUSES: + return Response( + {"error": "Invalid status filter."}, status=HTTP_400_BAD_REQUEST + ) + filters = {"status": status} if status else {} + queryset = BackendRequest.objects.all().filter(**filters) serializer = serializers.BackendRequestSerializer(queryset, many=True) return Response(serializer.data) @@ -67,15 +71,22 @@ class RequestDetailView(APIView): def get(self, _request, pk): """Get a request by ID.""" try: - backend_request = BackendRequest.objects.get(pk=pk) + backend_request = BackendRequest.objects.get(pk=uuid_primary_key(pk)) serializer = serializers.BackendRequestSerializer(backend_request) except BackendRequest.DoesNotExist: return Response(status=HTTP_404_NOT_FOUND) + except (ValueError, AttributeError): + return Response( + {"error": "Invalid request ID."}, status=HTTP_400_BAD_REQUEST + ) return Response(serializer.data) - def delete(self, request, pk): + def delete(self, _request, pk): """Delete a request by ID.""" - BackendRequest.objects.filter(pk=pk).delete() + try: + BackendRequest.objects.filter(pk=uuid_primary_key(pk)).delete() + except (AttributeError, ValueError): + logger.warning(f"Attempted to delete request with invalid UUID: {pk}") return Response(status=HTTP_204_NO_CONTENT) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index 1ef141aee..8664dfd08 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -8,6 +8,7 @@ dependencies = [ "django>=6.0.3", "djangorestframework>=3.16.1", "validators>=0.35.0", + "whitenoise>=6.12.0", ] [dependency-groups] diff --git a/haproxy-route-policy/uv.lock b/haproxy-route-policy/uv.lock index 17c0cdf21..218acc61c 100644 --- a/haproxy-route-policy/uv.lock +++ b/haproxy-route-policy/uv.lock @@ -117,6 +117,7 @@ dependencies = [ { name = "django" }, { name = "djangorestframework" }, { name = "validators" }, + { name = "whitenoise" }, ] [package.dev-dependencies] @@ -135,6 +136,7 @@ requires-dist = [ { name = "django", specifier = ">=6.0.3" }, { name = "djangorestframework", specifier = ">=3.16.1" }, { name = "validators", specifier = ">=0.35.0" }, + { name = "whitenoise", specifier = ">=6.12.0" }, ] [package.metadata.requires-dev] @@ -337,3 +339,12 @@ sdist = { url = "https://files.pythonhosted.org/packages/53/66/a435d9ae49850b2f0 wheels = [ { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712, upload-time = "2025-05-01T05:42:04.203Z" }, ] + +[[package]] +name = "whitenoise" +version = "6.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/2a/55b3f3a4ec326cd077c1c3defeee656b9298372a69229134d930151acd01/whitenoise-6.12.0.tar.gz", hash = "sha256:f723ebb76a112e98816ff80fcea0a6c9b8ecde835f8ddda25df7a30a3c2db6ad", size = 26841, upload-time = "2026-02-27T00:05:42.028Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/eb/d5583a11486211f3ebd4b385545ae787f32363d453c19fffd81106c9c138/whitenoise-6.12.0-py3-none-any.whl", hash = "sha256:fc5e8c572e33ebf24795b47b6a7da8da3c00cff2349f5b04c02f28d0cc5a3cc2", size = 20302, upload-time = "2026-02-27T00:05:40.086Z" }, +] From 97b865810eece3913ded65218c3d9dd5b910f82d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:50:18 +0200 Subject: [PATCH 049/201] add validators for port and paths --- haproxy-route-policy/policy/db_models.py | 26 ++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 344b44320..83d324f86 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -51,6 +51,26 @@ def validate_hostname_acls(value: typing.Any): raise ValidationError(f"Invalid hostnames: {', '.join(invalid_hostnames)}") +def validate_port(value: typing.Any): + """Validate that the value is a valid TCP port number.""" + if not isinstance(value, int) or not (1 <= value <= 65535): + raise ValidationError("port must be an integer between 1 and 65535.") + + +def validate_paths(value: typing.Any): + """Validate that the value is a list of valid URL paths.""" + if not isinstance(value, list): + raise ValidationError("paths must be a list.") + if invalid_paths := [ + path + for path in typing.cast(list, value) + if not isinstance(path, str) or not path.startswith("/") + ]: + raise ValidationError( + f"Invalid paths: {', '.join(str(path) for path in invalid_paths)}" + ) + + class BackendRequest(models.Model): """A backend request submitted via the haproxy-route relation. @@ -74,8 +94,10 @@ class BackendRequest(models.Model): default=list, validators=[validate_hostname_acls], blank=True ) backend_name: models.TextField = models.TextField() - paths: models.JSONField = models.JSONField(default=list, blank=True) - port: models.IntegerField = models.IntegerField() + paths: models.JSONField = models.JSONField( + default=list, validators=[validate_paths], blank=True + ) + port: models.IntegerField = models.IntegerField(validators=[validate_port]) status: models.TextField = models.TextField( choices=REQUEST_STATUS_CHOICES, default=REQUEST_STATUS_PENDING, From a187dd47874983111d0198e40d9f761383fb3f4a Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:50:36 +0200 Subject: [PATCH 050/201] add tests for validators --- haproxy-route-policy/policy/tests/test_models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index e73181f5c..417e59887 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -3,6 +3,7 @@ """Unit tests for the BackendRequest and Rule models.""" +from django.core.exceptions import ValidationError from django.test import TestCase from policy import db_models, serializers From f48aed559597bef3725d12b724d38dba04c4ba1c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:50:41 +0200 Subject: [PATCH 051/201] add note for migration --- haproxy-route-policy/policy/db_models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 83d324f86..1a6d05b59 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -14,6 +14,7 @@ REQUEST_STATUS_ACCEPTED = "accepted" REQUEST_STATUS_REJECTED = "rejected" +# Note: changing these values will require a data migration to update the database schema. REQUEST_STATUSES = [ REQUEST_STATUS_PENDING, REQUEST_STATUS_ACCEPTED, From 3d5387a1ddc3a317867632cc1f1037903404686e Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:50:41 +0200 Subject: [PATCH 052/201] remove unused imports --- haproxy-route-policy/policy/db_models.py | 1 - haproxy-route-policy/policy/tests/test_views.py | 1 - 2 files changed, 2 deletions(-) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 1a6d05b59..caf23e64c 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -8,7 +8,6 @@ from django.db import models from validators import domain from django.core.exceptions import ValidationError -import uuid REQUEST_STATUS_PENDING = "pending" REQUEST_STATUS_ACCEPTED = "accepted" diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 20331c95c..676b059b3 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -7,7 +7,6 @@ from django.test import TestCase from rest_framework.test import APIClient -import uuid from policy import db_models From 0ac06b0a96457f8e473cb20b5af156d724f3e571 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:50:52 +0200 Subject: [PATCH 053/201] add static tests --- haproxy-route-policy/pyproject.toml | 3 + haproxy-route-policy/tox.toml | 6 ++ haproxy-route-policy/uv.lock | 126 ++++++++++++++++++++++++++++ 3 files changed, 135 insertions(+) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index 8664dfd08..3eab72619 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -21,3 +21,6 @@ lint = [ "mypy>=1.19.1", "ruff>=0.15.6", ] +static = [ + "bandit[toml]>=1.9.4", +] diff --git a/haproxy-route-policy/tox.toml b/haproxy-route-policy/tox.toml index d08f465fc..15b8d1214 100644 --- a/haproxy-route-policy/tox.toml +++ b/haproxy-route-policy/tox.toml @@ -65,6 +65,12 @@ commands = [ ] dependency_groups = ["lint"] + +[env.static] +description = "Run static analysis tests" +commands = [ [ "bandit", "-c", "{toxinidir}/pyproject.toml", "-r", "{[vars]src_path}", "{[vars]tst_path}" ] ] +dependency_groups = [ "static" ] + [vars] src_path = "{toxinidir}/policy/" tst_path = "{toxinidir}/policy/tests" diff --git a/haproxy-route-policy/uv.lock b/haproxy-route-policy/uv.lock index 218acc61c..5e705cf7f 100644 --- a/haproxy-route-policy/uv.lock +++ b/haproxy-route-policy/uv.lock @@ -11,6 +11,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/0a/a72d10ed65068e115044937873362e6e32fab1b7dce0046aeb224682c989/asgiref-3.11.1-py3-none-any.whl", hash = "sha256:e8667a091e69529631969fd45dc268fa79b99c92c5fcdda727757e52146ec133", size = 24345, upload-time = "2026-02-03T13:30:13.039Z" }, ] +[[package]] +name = "bandit" +version = "1.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "pyyaml" }, + { name = "rich" }, + { name = "stevedore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/c3/0cb80dfe0f3076e5da7e4c5ad8e57bac6ac357ff4a6406205501cade4965/bandit-1.9.4.tar.gz", hash = "sha256:b589e5de2afe70bd4d53fa0c1da6199f4085af666fde00e8a034f152a52cd628", size = 4242677, upload-time = "2026-02-25T06:44:15.503Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/a4/a26d5b25671d27e03afb5401a0be5899d94ff8fab6a698b1ac5be3ec29ef/bandit-1.9.4-py3-none-any.whl", hash = "sha256:f89ffa663767f5a0585ea075f01020207e966a9c0f2b9ef56a57c7963a3f6f8e", size = 134741, upload-time = "2026-02-25T06:44:13.694Z" }, +] + [[package]] name = "codespell" version = "2.4.2" @@ -20,6 +35,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/42/a1/52fa05533e95fe45bcc09bcf8a503874b1c08f221a4e35608017e0938f55/codespell-2.4.2-py3-none-any.whl", hash = "sha256:97e0c1060cf46bd1d5db89a936c98db8c2b804e1fdd4b5c645e82a1ec6b1f886", size = 353715, upload-time = "2026-03-05T18:10:41.398Z" }, ] +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + [[package]] name = "django" version = "6.0.3" @@ -130,6 +154,9 @@ lint = [ { name = "mypy" }, { name = "ruff" }, ] +static = [ + { name = "bandit" }, +] [package.metadata] requires-dist = [ @@ -149,6 +176,7 @@ lint = [ { name = "mypy", specifier = ">=1.19.1" }, { name = "ruff", specifier = ">=0.15.6" }, ] +static = [{ name = "bandit", extras = ["toml"], specifier = ">=1.9.4" }] [[package]] name = "librt" @@ -210,6 +238,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, ] +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + [[package]] name = "mypy" version = "1.19.1" @@ -261,6 +310,74 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, ] +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + [[package]] name = "ruff" version = "0.15.6" @@ -295,6 +412,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/49/4b/359f28a903c13438ef59ebeee215fb25da53066db67b305c125f1c6d2a25/sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba", size = 46138, upload-time = "2025-12-19T07:17:46.573Z" }, ] +[[package]] +name = "stevedore" +version = "5.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6d/90764092216fa560f6587f83bb70113a8ba510ba436c6476a2b47359057c/stevedore-5.7.0.tar.gz", hash = "sha256:31dd6fe6b3cbe921e21dcefabc9a5f1cf848cf538a1f27543721b8ca09948aa3", size = 516200, upload-time = "2026-02-20T13:27:06.765Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/06/36d260a695f383345ab5bbc3fd447249594ae2fa8dfd19c533d5ae23f46b/stevedore-5.7.0-py3-none-any.whl", hash = "sha256:fd25efbb32f1abb4c9e502f385f0018632baac11f9ee5d1b70f88cc5e22ad4ed", size = 54483, upload-time = "2026-02-20T13:27:05.561Z" }, +] + [[package]] name = "types-psycopg2" version = "2.9.21.20260223" From 1020b7068376a55310da6195ba024d5ea8f95636 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:50:52 +0200 Subject: [PATCH 054/201] guard rules API against pk --- haproxy-route-policy/policy/views.py | 35 ++++++++++++++++++++-------- 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 551f5a4c1..c1f3af15d 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -111,7 +111,7 @@ def post(self, request): class RuleDetailView(APIView): """View for getting, updating, or deleting a single rule.""" - def get_object(self, pk): + def get_object(self, pk: str): try: return Rule.objects.get(pk=pk) except Rule.DoesNotExist: @@ -119,20 +119,35 @@ def get_object(self, pk): def get(self, request, pk): """Get a rule by ID.""" - rule = self.get_object(pk) - serializer = serializers.RuleSerializer(rule) - return Response(serializer.data) + try: + rule = self.get_object(uuid_primary_key(pk)) + serializer = serializers.RuleSerializer(rule) + return Response(data=serializer.data) + except (ValueError, AttributeError): + return Response( + {"error": "Invalid request ID."}, status=HTTP_400_BAD_REQUEST + ) def put(self, request, pk): """Update a rule by ID.""" - rule = self.get_object(pk) - serializer = serializers.RuleSerializer(rule, data=request.data, partial=True) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data) + try: + rule = self.get_object(uuid_primary_key(pk)) + serializer = serializers.RuleSerializer( + rule, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data) + except (ValueError, AttributeError): + return Response( + {"error": "Invalid request ID."}, status=HTTP_400_BAD_REQUEST + ) return Response(serializer.errors, status=HTTP_400_BAD_REQUEST) def delete(self, request, pk): """Delete a rule by ID.""" - Rule.objects.filter(pk=pk).delete() + try: + Rule.objects.filter(pk=uuid_primary_key(pk)).delete() + except (AttributeError, ValueError): + logger.warning(f"Attempted to delete request with invalid UUID: {pk}") return Response(status=HTTP_204_NO_CONTENT) From 9aee5c16c6a480d79740102d8a23d29b43139016 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:01 +0200 Subject: [PATCH 055/201] update view, middle wares and tests --- haproxy-route-policy/policy/middleware.py | 16 ++++--- haproxy-route-policy/policy/urls.py | 2 +- haproxy-route-policy/policy/views.py | 56 +++++------------------ 3 files changed, 23 insertions(+), 51 deletions(-) diff --git a/haproxy-route-policy/policy/middleware.py b/haproxy-route-policy/policy/middleware.py index cdd6fe22b..ee01870a3 100644 --- a/haproxy-route-policy/policy/middleware.py +++ b/haproxy-route-policy/policy/middleware.py @@ -12,12 +12,8 @@ logger = logging.getLogger(__name__) -class DatabaseErrorMiddleware: - """Catch database connection errors and return a generic 503 response. - - This prevents the application's stack trace from being exposed to the client - when the database is unreachable or encounters a connection-level error. - """ +class BaseMiddleware: + """Base middleware class to provide common structure for all middleware.""" def __init__(self, get_response): """Initialize the middleware.""" @@ -27,6 +23,14 @@ def __call__(self, request): """Process the request.""" return self.get_response(request) + +class DatabaseErrorMiddleware(BaseMiddleware): + """Catch database connection errors and return a generic 503 response. + + This prevents the application's stack trace from being exposed to the client + when the database is unreachable or encounters a connection-level error. + """ + def process_exception(self, _request, exception): """Handle database errors raised during view processing.""" if isinstance(exception, (OperationalError, DatabaseError)): diff --git a/haproxy-route-policy/policy/urls.py b/haproxy-route-policy/policy/urls.py index 95c4da997..3229cb919 100644 --- a/haproxy-route-policy/policy/urls.py +++ b/haproxy-route-policy/policy/urls.py @@ -14,7 +14,7 @@ name="api-requests", ), path( - "api/v1/requests/", + "api/v1/requests/", views.RequestDetailView.as_view(), name="api-request-detail", ), diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index c1f3af15d..16cfc874a 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -9,7 +9,6 @@ from rest_framework.status import ( HTTP_201_CREATED, HTTP_400_BAD_REQUEST, - HTTP_404_NOT_FOUND, HTTP_204_NO_CONTENT, ) from django.http import Http404 @@ -70,23 +69,13 @@ class RequestDetailView(APIView): def get(self, _request, pk): """Get a request by ID.""" - try: - backend_request = BackendRequest.objects.get(pk=uuid_primary_key(pk)) - serializer = serializers.BackendRequestSerializer(backend_request) - except BackendRequest.DoesNotExist: - return Response(status=HTTP_404_NOT_FOUND) - except (ValueError, AttributeError): - return Response( - {"error": "Invalid request ID."}, status=HTTP_400_BAD_REQUEST - ) + backend_request = get_object(BackendRequest, pk) + serializer = serializers.BackendRequestSerializer(backend_request) return Response(serializer.data) def delete(self, _request, pk): """Delete a request by ID.""" - try: - BackendRequest.objects.filter(pk=uuid_primary_key(pk)).delete() - except (AttributeError, ValueError): - logger.warning(f"Attempted to delete request with invalid UUID: {pk}") + BackendRequest.objects.filter(pk=pk).delete() return Response(status=HTTP_204_NO_CONTENT) @@ -111,43 +100,22 @@ def post(self, request): class RuleDetailView(APIView): """View for getting, updating, or deleting a single rule.""" - def get_object(self, pk: str): - try: - return Rule.objects.get(pk=pk) - except Rule.DoesNotExist: - raise Http404 - def get(self, request, pk): """Get a rule by ID.""" - try: - rule = self.get_object(uuid_primary_key(pk)) - serializer = serializers.RuleSerializer(rule) - return Response(data=serializer.data) - except (ValueError, AttributeError): - return Response( - {"error": "Invalid request ID."}, status=HTTP_400_BAD_REQUEST - ) + rule = get_object(Rule, pk) + serializer = serializers.RuleSerializer(rule) + return Response(data=serializer.data) def put(self, request, pk): """Update a rule by ID.""" - try: - rule = self.get_object(uuid_primary_key(pk)) - serializer = serializers.RuleSerializer( - rule, data=request.data, partial=True - ) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data) - except (ValueError, AttributeError): - return Response( - {"error": "Invalid request ID."}, status=HTTP_400_BAD_REQUEST - ) + rule = get_object(Rule, pk) + serializer = serializers.RuleSerializer(rule, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data) return Response(serializer.errors, status=HTTP_400_BAD_REQUEST) def delete(self, request, pk): """Delete a rule by ID.""" - try: - Rule.objects.filter(pk=uuid_primary_key(pk)).delete() - except (AttributeError, ValueError): - logger.warning(f"Attempted to delete request with invalid UUID: {pk}") + Rule.objects.filter(pk=pk).delete() return Response(status=HTTP_204_NO_CONTENT) From 1328a474466f23a06fb522b8f78c033359f72b66 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:01 +0200 Subject: [PATCH 056/201] chore(deps): update dependency haproxy-spoe-auth to v75 (#406) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- terraform/tests/main.tftest.hcl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/terraform/tests/main.tftest.hcl b/terraform/tests/main.tftest.hcl index 66bd372c6..2cb3f83ad 100644 --- a/terraform/tests/main.tftest.hcl +++ b/terraform/tests/main.tftest.hcl @@ -30,7 +30,7 @@ run "basic_deploy" { hostname = "one.example.com" haproxy_spoe_auth = { # renovate: depName="haproxy-spoe-auth" - revision = 63 + revision = 75 } oauth_external_idp_integrator = { # renovate: depName="oauth-external-idp-integrator" @@ -53,7 +53,7 @@ run "basic_deploy" { haproxy_spoe_auth = { channel = "latest/edge" # renovate: depName="haproxy-spoe-auth" - revision = 63 + revision = 75 } } ] From 58149f4dd48a5c7b3c41b4117b2676249f7f4907 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:10 +0200 Subject: [PATCH 057/201] refactor tests by parametrizing --- .../policy/tests/test_models.py | 259 ++++-------------- 1 file changed, 51 insertions(+), 208 deletions(-) diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 417e59887..17e70ded4 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -47,29 +47,6 @@ def test_create_with_all_fields(self): class TestRuleModel(TestCase): """Tests for Rule model creation, serialisation, and validation.""" - def test_create_hostname_and_path_match_rule(self): - """Test creating a hostname_and_path_match rule with valid data.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": ["/api"]}, - "action": db_models.RULE_ACTION_DENY, - "priority": 1, - "comment": "Deny example.com/api", - } - ) - self.assertTrue(serializer.is_valid(), serializer.errors) - rule = serializer.save() - - self.assertIsNotNone(rule.id) - self.assertEqual(rule.kind, db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) - self.assertEqual(rule.value, {"hostnames": ["example.com"], "paths": ["/api"]}) - self.assertEqual(rule.action, db_models.RULE_ACTION_DENY) - self.assertEqual(rule.priority, 1) - self.assertEqual(rule.comment, "Deny example.com/api") - self.assertIsNotNone(rule.created_at) - self.assertIsNotNone(rule.updated_at) - def test_create_rule_defaults(self): """Test that default values are set correctly.""" serializer = serializers.RuleSerializer( @@ -85,202 +62,68 @@ def test_create_rule_defaults(self): self.assertEqual(rule.priority, 0) self.assertEqual(rule.comment, "") - def test_invalid_kind_rejected(self): - """Test that an invalid kind value is rejected.""" - serializer = serializers.RuleSerializer( - data={ - "kind": "invalid_kind", - "value": 1, - "action": db_models.RULE_ACTION_ALLOW, - } - ) - self.assertFalse(serializer.is_valid()) - self.assertIn("kind", serializer.errors) - - def test_invalid_action_rejected(self): - """Test that an invalid action value is rejected.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": []}, - "action": "invalid_action", - } - ) - self.assertFalse(serializer.is_valid()) - self.assertIn("action", serializer.errors) - - def test_hostname_and_path_match_value_must_be_dict(self): - """Test that hostname_and_path_match rules require a dict value.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": "not-a-dict", - "action": db_models.RULE_ACTION_DENY, - } - ) - self.assertFalse(serializer.is_valid()) - self.assertIn("non_field_errors", serializer.errors) - self.assertIn( - "value field must be a JSON object", - str(serializer.errors["non_field_errors"]), - ) - - def test_hostname_and_path_match_value_list_rejected(self): - """Test that hostname_and_path_match rules reject a list value.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": ["not", "a", "dict"], - "action": db_models.RULE_ACTION_DENY, - } - ) - self.assertFalse(serializer.is_valid()) - self.assertIn("non_field_errors", serializer.errors) - self.assertIn( - "value field must be a JSON object", - str(serializer.errors["non_field_errors"]), - ) - - def test_hostname_and_path_match_value_int_rejected(self): - """Test that hostname_and_path_match rules reject an integer value.""" + def test_create_hostname_and_path_match_rule(self): + """Test creating a hostname_and_path_match rule with valid data.""" serializer = serializers.RuleSerializer( data={ "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": 42, + "value": {"hostnames": ["example.com"], "paths": ["/api"]}, "action": db_models.RULE_ACTION_DENY, + "priority": 1, + "comment": "Deny example.com/api", } ) - self.assertFalse(serializer.is_valid()) - self.assertIn("non_field_errors", serializer.errors) - self.assertIn( - "value field must be a JSON object", - str(serializer.errors["non_field_errors"]), - ) + self.assertTrue(serializer.is_valid(), serializer.errors) + rule = serializer.save() - def test_hostname_and_path_match_invalid_hostname(self): - """Test that invalid hostnames are rejected in hostname_and_path_match rules.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["not a valid hostname!!!"], "paths": []}, - "action": db_models.RULE_ACTION_DENY, - } - ) - self.assertFalse(serializer.is_valid()) - self.assertIn("Invalid hostname", str(serializer.errors)) + self.assertIsNotNone(rule.id) + self.assertEqual(rule.kind, db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) + self.assertEqual(rule.value, {"hostnames": ["example.com"], "paths": ["/api"]}) + self.assertEqual(rule.action, db_models.RULE_ACTION_DENY) + self.assertEqual(rule.priority, 1) + self.assertEqual(rule.comment, "Deny example.com/api") + self.assertIsNotNone(rule.created_at) + self.assertIsNotNone(rule.updated_at) - def test_hostname_and_path_match_multiple_invalid_hostnames(self): - """Test that multiple invalid hostnames are reported.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": { - "hostnames": ["valid.com", "bad host", "also bad!"], - "paths": [], + def test_valid_rule_data_accepted(self): + """Valid rule data should pass serializer validation.""" + valid_cases = [ + ( + "valid hostnames", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": { + "hostnames": ["example.com", "sub.example.org"], + "paths": [], + }, + "action": db_models.RULE_ACTION_ALLOW, }, - "action": db_models.RULE_ACTION_DENY, - } - ) - self.assertFalse(serializer.is_valid()) - errors_str = str(serializer.errors) - self.assertIn("bad host", errors_str) - self.assertIn("also bad!", errors_str) - - def test_hostname_and_path_match_valid_hostnames_accepted(self): - """Test that valid hostnames pass validation.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": { - "hostnames": ["example.com", "sub.example.org"], - "paths": [], + ), + ( + "empty hostnames", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": [], "paths": []}, + "action": db_models.RULE_ACTION_ALLOW, }, - "action": db_models.RULE_ACTION_ALLOW, - } - ) - self.assertTrue(serializer.is_valid(), serializer.errors) - - def test_hostname_and_path_match_empty_hostnames_accepted(self): - """Test that an empty hostnames list passes validation.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": [], "paths": []}, - "action": db_models.RULE_ACTION_ALLOW, - } - ) - self.assertTrue(serializer.is_valid(), serializer.errors) - - def test_hostname_and_path_match_invalid_path_not_starting_with_slash(self): - """Test that paths not starting with / are rejected.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": ["api/v1"]}, - "action": db_models.RULE_ACTION_DENY, - } - ) - self.assertFalse(serializer.is_valid()) - self.assertIn("Invalid path", str(serializer.errors)) - - def test_hostname_and_path_match_invalid_path_non_string(self): - """Test that non-string paths are rejected.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": [123]}, - "action": db_models.RULE_ACTION_DENY, - } - ) - self.assertFalse(serializer.is_valid()) - - def test_hostname_and_path_match_valid_paths_accepted(self): - """Test that valid paths starting with / pass validation.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": { - "hostnames": ["example.com"], - "paths": ["/api", "/health"], + ), + ( + "valid paths", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": { + "hostnames": ["example.com"], + "paths": ["/api", "/health"], + }, + "action": db_models.RULE_ACTION_ALLOW, }, - "action": db_models.RULE_ACTION_ALLOW, - } - ) - self.assertTrue(serializer.is_valid(), serializer.errors) - - def test_hostname_and_path_match_empty_paths_accepted(self): - """Test that an empty paths list passes validation.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": []}, - "action": db_models.RULE_ACTION_ALLOW, - } - ) - self.assertTrue(serializer.is_valid(), serializer.errors) - - def test_hostname_and_path_match_multiple_invalid_paths(self): - """Test that multiple invalid paths are reported.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": [], "paths": ["no-slash", "also-bad"]}, - "action": db_models.RULE_ACTION_DENY, - } - ) - self.assertFalse(serializer.is_valid()) - errors_str = str(serializer.errors) - self.assertIn("no-slash", errors_str) - self.assertIn("also-bad", errors_str) - - def test_hostname_and_path_match_both_valid_hostnames_and_paths(self): - """Test that a rule with both valid hostnames and paths passes.""" - serializer = serializers.RuleSerializer( - data={ - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": { - "hostnames": ["example.com", "app.example.com"], - "paths": ["/api", "/v1/health"], + ), + ( + "empty paths", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": {"hostnames": ["example.com"], "paths": []}, + "action": db_models.RULE_ACTION_ALLOW, }, "action": db_models.RULE_ACTION_DENY, "priority": 3, From 9207755b4b2e7817995897ff7cb4c4e1857b74df Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:10 +0200 Subject: [PATCH 058/201] chore(deps): update dependency haproxy-spoe-auth to v77 (#408) * chore(deps): update dependency haproxy-spoe-auth to v77 * fix(tests): Fix root integration tests by ignoring route-policy specific tests. * chore: fmt files --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Ali Ugur --- terraform/tests/main.tftest.hcl | 4 ++-- tests/integration/conftest.py | 3 ++- tox.toml | 1 + 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/terraform/tests/main.tftest.hcl b/terraform/tests/main.tftest.hcl index 2cb3f83ad..74e141dde 100644 --- a/terraform/tests/main.tftest.hcl +++ b/terraform/tests/main.tftest.hcl @@ -30,7 +30,7 @@ run "basic_deploy" { hostname = "one.example.com" haproxy_spoe_auth = { # renovate: depName="haproxy-spoe-auth" - revision = 75 + revision = 77 } oauth_external_idp_integrator = { # renovate: depName="oauth-external-idp-integrator" @@ -53,7 +53,7 @@ run "basic_deploy" { haproxy_spoe_auth = { channel = "latest/edge" # renovate: depName="haproxy-spoe-auth" - revision = 75 + revision = 77 } } ] diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 9448fbfb5..51f57ee88 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -277,7 +277,8 @@ def deploy_iam_bundle_fixture(k8s_juju: jubilant.Juju): k8s_juju.integrate("traefik-public:traefik-route", "hydra:public-route") k8s_juju.integrate("traefik-public:traefik-route", "kratos:public-route") k8s_juju.integrate( - "traefik-public:traefik-route", "identity-platform-login-ui-operator:public-route" + "traefik-public:traefik-route", + "identity-platform-login-ui-operator:public-route", ) k8s_juju.config("kratos", {"enforce_mfa": False}) diff --git a/tox.toml b/tox.toml index 21dfc5cee..9badf371d 100644 --- a/tox.toml +++ b/tox.toml @@ -87,6 +87,7 @@ commands = [ "--ignore={toxinidir}/haproxy-ddos-protection-configurator", "--ignore={toxinidir}/haproxy-spoe-auth-operator", "--ignore={toxinidir}/haproxy-operator", + "--ignore={toxinidir}/haproxy-route-policy", "--log-cli-level=INFO", "-s", { replace = "posargs", extend = "true" }, From 8ea78d96fd2d6819deb9ea830e35ca958d6b692a Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:10 +0200 Subject: [PATCH 059/201] group tests by parameterizing --- .../policy/tests/test_views.py | 56 ++++++++++--------- 1 file changed, 30 insertions(+), 26 deletions(-) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 676b059b3..864d9bd60 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -213,32 +213,36 @@ def test_create_rule_with_defaults(self): self.assertEqual(data["priority"], 0) self.assertEqual(data["comment"], "") - def test_create_rule_invalid_kind(self): - """POST returns 400 when kind is invalid.""" - payload = { - "kind": "invalid_kind", - "value": 1, - "action": db_models.RULE_ACTION_ALLOW, - } - response = self.client.post("/api/v1/rules", data=payload, format="json") - self.assertEqual(response.status_code, 400) - - def test_create_rule_invalid_value_for_kind(self): - """POST returns 400 when value doesn't match kind requirements.""" - payload = { - "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": "not-a-dict", - "action": db_models.RULE_ACTION_DENY, - } - response = self.client.post("/api/v1/rules", data=payload, format="json") - self.assertEqual(response.status_code, 400) - - def test_create_rule_rejects_non_dict(self): - """POST returns 400 when the body is not a JSON object.""" - response = self.client.post( - "/api/v1/rules", data=[{"kind": "test"}], format="json" - ) - self.assertEqual(response.status_code, 400) + def test_create_rule_invalid_payload(self): + """POST returns 400 for invalid rule payloads.""" + invalid_payloads = [ + ( + "invalid kind", + { + "kind": "invalid_kind", + "value": 1, + "action": db_models.RULE_ACTION_ALLOW, + }, + ), + ( + "value doesn't match kind", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "value": "not-a-dict", + "action": db_models.RULE_ACTION_DENY, + }, + ), + ( + "body is not a JSON object", + [{"kind": "test"}], + ), + ] + for label, payload in invalid_payloads: + with self.subTest(label=label): + response = self.client.post( + "/api/v1/rules", data=payload, format="json" + ) + self.assertEqual(response.status_code, 400) class TestRuleDetailView(TestCase): From 8268b3d69d0e520c87a6ead939eea0082b3682b7 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 060/201] refactor Rule model to rename attribute from "value" to "parameters" --- haproxy-route-policy/policy/db_models.py | 4 ++-- ...le_alter_backendrequest_paths_and_more.py} | 24 +++++++++++++------ haproxy-route-policy/policy/serializers.py | 8 +++---- .../policy/tests/test_models.py | 16 +++++++------ .../policy/tests/test_views.py | 20 ++++++++-------- 5 files changed, 42 insertions(+), 30 deletions(-) rename haproxy-route-policy/policy/migrations/{0002_rule_alter_backendrequest_hostname_acls_and_more.py => 0002_rule_alter_backendrequest_paths_and_more.py} (59%) diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index caf23e64c..7eb294f6d 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -116,7 +116,7 @@ class Rule(models.Model): Attrs: id: UUID primary key. kind: The type of rule (e.g. hostname_and_path_match, match_request_id). - value: The rule value, structure depends on kind. + parameters: The rule parameters, structure depends on kind. action: Whether the rule allows or denies matching requests. priority: Rule priority (higher = evaluated first, deny wins on tie). comment: Optional human-readable comment. @@ -128,7 +128,7 @@ class Rule(models.Model): primary_key=True, default=uuid.uuid4, editable=False ) kind: models.TextField = models.TextField(choices=RULE_KIND_CHOICES) - value: models.JSONField = models.JSONField() + parameters: models.JSONField = models.JSONField() action: models.TextField = models.TextField(choices=RULE_ACTION_CHOICES) priority: models.IntegerField = models.IntegerField(default=0, blank=True) comment: models.TextField = models.TextField(default="", blank=True) diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py similarity index 59% rename from haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py rename to haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py index 84e403e52..0a7b61e4f 100644 --- a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_hostname_acls_and_more.py +++ b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py @@ -1,5 +1,6 @@ -# Generated by Django 6.0.3 on 2026-03-17 20:05 +# Generated by Django 6.0.3 on 2026-03-23 21:53 +import policy.db_models import uuid from django.db import migrations, models @@ -25,21 +26,30 @@ class Migration(migrations.Migration): ( "kind", models.TextField( - choices=[ - ("hostname_and_path_match", "hostname_and_path_match"), - ("match_request_id", "match_request_id"), - ] + choices=[("hostname_and_path_match", "hostname_and_path_match")] ), ), - ("value", models.JSONField()), + ("parameters", models.JSONField()), ( "action", models.TextField(choices=[("allow", "allow"), ("deny", "deny")]), ), - ("priority", models.IntegerField(default=0)), + ("priority", models.IntegerField(blank=True, default=0)), ("comment", models.TextField(blank=True, default="")), ("created_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), ], ), + migrations.AlterField( + model_name="backendrequest", + name="paths", + field=models.JSONField( + blank=True, default=list, validators=[policy.db_models.validate_paths] + ), + ), + migrations.AlterField( + model_name="backendrequest", + name="port", + field=models.IntegerField(validators=[policy.db_models.validate_port]), + ), ] diff --git a/haproxy-route-policy/policy/serializers.py b/haproxy-route-policy/policy/serializers.py index 590b2d223..a71d371a7 100644 --- a/haproxy-route-policy/policy/serializers.py +++ b/haproxy-route-policy/policy/serializers.py @@ -28,12 +28,12 @@ class Meta: # pyright: ignore[reportIncompatibleVariableOverride] def validate(self, attrs): """Custom validation logic for the Rule model.""" if attrs.get("kind") == RULE_KIND_HOSTNAME_AND_PATH_MATCH: - if not isinstance(attrs.get("value"), dict): + if not isinstance(attrs.get("parameters"), dict): raise serializers.ValidationError( - "The value field must be a JSON object." + "The parameters field must be a JSON object." ) - if hostnames := typing.cast(dict, attrs.get("value")).get("hostnames"): + if hostnames := typing.cast(dict, attrs.get("parameters")).get("hostnames"): if invalid_hostnames := [ hostname for hostname in hostnames if not domain(hostname) ]: @@ -41,7 +41,7 @@ def validate(self, attrs): f"Invalid hostname(s) in rule: {', '.join(invalid_hostnames)}" ) - if paths := typing.cast(dict, attrs.get("value")).get("paths"): + if paths := typing.cast(dict, attrs.get("parameters")).get("paths"): if invalid_paths := [path for path in paths if is_valid_path(path)]: raise serializers.ValidationError( f"Invalid path(s) in rule: {', '.join([str(path) for path in invalid_paths])}" diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 17e70ded4..8592df98d 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -52,7 +52,7 @@ def test_create_rule_defaults(self): serializer = serializers.RuleSerializer( data={ "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["test.com"], "paths": []}, + "parameters": {"hostnames": ["test.com"], "paths": []}, "action": db_models.RULE_ACTION_ALLOW, } ) @@ -67,7 +67,7 @@ def test_create_hostname_and_path_match_rule(self): serializer = serializers.RuleSerializer( data={ "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": ["/api"]}, + "parameters": {"hostnames": ["example.com"], "paths": ["/api"]}, "action": db_models.RULE_ACTION_DENY, "priority": 1, "comment": "Deny example.com/api", @@ -78,7 +78,9 @@ def test_create_hostname_and_path_match_rule(self): self.assertIsNotNone(rule.id) self.assertEqual(rule.kind, db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) - self.assertEqual(rule.value, {"hostnames": ["example.com"], "paths": ["/api"]}) + self.assertEqual( + rule.parameters, {"hostnames": ["example.com"], "paths": ["/api"]} + ) self.assertEqual(rule.action, db_models.RULE_ACTION_DENY) self.assertEqual(rule.priority, 1) self.assertEqual(rule.comment, "Deny example.com/api") @@ -92,7 +94,7 @@ def test_valid_rule_data_accepted(self): "valid hostnames", { "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": { + "parameters": { "hostnames": ["example.com", "sub.example.org"], "paths": [], }, @@ -103,7 +105,7 @@ def test_valid_rule_data_accepted(self): "empty hostnames", { "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": [], "paths": []}, + "parameters": {"hostnames": [], "paths": []}, "action": db_models.RULE_ACTION_ALLOW, }, ), @@ -111,7 +113,7 @@ def test_valid_rule_data_accepted(self): "valid paths", { "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": { + "parameters": { "hostnames": ["example.com"], "paths": ["/api", "/health"], }, @@ -122,7 +124,7 @@ def test_valid_rule_data_accepted(self): "empty paths", { "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": []}, + "parameters": {"hostnames": ["example.com"], "paths": []}, "action": db_models.RULE_ACTION_ALLOW, }, "action": db_models.RULE_ACTION_DENY, diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 864d9bd60..2f0254a7b 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -154,7 +154,7 @@ def test_list_returns_all_ordered_by_priority(self): """GET returns all rules ordered by descending priority.""" rule_low = db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": ["/api"]}, + parameters={"hostnames": ["example.com"], "paths": ["/api"]}, action=db_models.RULE_ACTION_ALLOW, priority=0, ) @@ -162,7 +162,7 @@ def test_list_returns_all_ordered_by_priority(self): rule_low.save() rule_high = db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.org"], "paths": ["/admin"]}, + parameters={"hostnames": ["example.org"], "paths": ["/admin"]}, action=db_models.RULE_ACTION_DENY, priority=10, ) @@ -181,7 +181,7 @@ def test_create_hostname_and_path_match_rule(self): """POST creates a hostname_and_path_match rule.""" payload = { "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": ["/api"]}, + "parameters": {"hostnames": ["example.com"], "paths": ["/api"]}, "action": db_models.RULE_ACTION_DENY, "priority": 5, "comment": "Block example.com/api", @@ -191,7 +191,7 @@ def test_create_hostname_and_path_match_rule(self): data = response.json() self.assertEqual(data["kind"], db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) self.assertEqual( - data["value"], {"hostnames": ["example.com"], "paths": ["/api"]} + data["parameters"], {"hostnames": ["example.com"], "paths": ["/api"]} ) self.assertEqual(data["action"], db_models.RULE_ACTION_DENY) self.assertEqual(data["priority"], 5) @@ -204,7 +204,7 @@ def test_create_rule_with_defaults(self): """POST creates a rule with default priority and comment.""" payload = { "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": {"hostnames": ["example.com"], "paths": ["/api"]}, + "parameters": {"hostnames": ["example.com"], "paths": ["/api"]}, "action": db_models.RULE_ACTION_DENY, } response = self.client.post("/api/v1/rules", data=payload, format="json") @@ -220,15 +220,15 @@ def test_create_rule_invalid_payload(self): "invalid kind", { "kind": "invalid_kind", - "value": 1, + "parameters": 1, "action": db_models.RULE_ACTION_ALLOW, }, ), ( - "value doesn't match kind", + "parameters doesn't match kind", { "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - "value": "not-a-dict", + "parameters": "not-a-dict", "action": db_models.RULE_ACTION_DENY, }, ), @@ -253,7 +253,7 @@ def setUp(self): self.client = APIClient() self.rule = db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": ["/api"]}, + parameters={"hostnames": ["example.com"], "paths": ["/api"]}, action=db_models.RULE_ACTION_DENY, priority=1, comment="Test rule", @@ -293,7 +293,7 @@ def test_update_rule(self): # Unchanged fields remain the same self.assertEqual(data["kind"], db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH) self.assertEqual( - data["value"], {"hostnames": ["example.com"], "paths": ["/api"]} + data["parameters"], {"hostnames": ["example.com"], "paths": ["/api"]} ) def test_update_nonexistent(self): From 99b30ffcf74726f204372b241986bb78d218d25c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 061/201] update test name --- haproxy-route-policy/policy/tests/test_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 8592df98d..5ceccbb28 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -47,7 +47,7 @@ def test_create_with_all_fields(self): class TestRuleModel(TestCase): """Tests for Rule model creation, serialisation, and validation.""" - def test_create_rule_defaults(self): + def test_create_rule_set_default_priority_and_comment(self): """Test that default values are set correctly.""" serializer = serializers.RuleSerializer( data={ From f424e46478099f2d4779af075252af0b6ef92a5e Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 062/201] update naming --- haproxy-route-policy/policy/tests/test_views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 2f0254a7b..44dc2e12c 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -200,7 +200,7 @@ def test_create_hostname_and_path_match_rule(self): self.assertIn("created_at", data) self.assertEqual(db_models.Rule.objects.count(), 1) - def test_create_rule_with_defaults(self): + def test_create_rule_set_default_priority_and_comment(self): """POST creates a rule with default priority and comment.""" payload = { "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, From b83802dc5d5ddd09bba59518793ba74d8d615efe Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 063/201] Add coverage-report as part of unit test suite --- haproxy-route-policy/pyproject.toml | 6 ++ haproxy-route-policy/tox.toml | 8 ++- haproxy-route-policy/uv.lock | 92 +++++++++++++++++++++++++++++ 3 files changed, 105 insertions(+), 1 deletion(-) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index 3eab72619..e1443b556 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -12,6 +12,9 @@ dependencies = [ ] [dependency-groups] +coverage-report = [ + "coverage[toml]>=7.13.5", +] lint = [ "codespell>=2.4.2", "django-stubs>=6.0.0", @@ -24,3 +27,6 @@ lint = [ static = [ "bandit[toml]>=1.9.4", ] +unit = [ + "coverage[toml]>=7.13.5", +] diff --git a/haproxy-route-policy/tox.toml b/haproxy-route-policy/tox.toml index 15b8d1214..327c78402 100644 --- a/haproxy-route-policy/tox.toml +++ b/haproxy-route-policy/tox.toml @@ -20,7 +20,7 @@ PY_COLORS = "1" description = "Run unit tests" commands = [ [ - "uv", + "coverage", "run", "manage.py", "test", @@ -29,6 +29,7 @@ commands = [ "-v2", ], ] +dependency_groups = ["unit"] [env.lint] description = "Check code against coding style standards" @@ -75,3 +76,8 @@ dependency_groups = [ "static" ] src_path = "{toxinidir}/policy/" tst_path = "{toxinidir}/policy/tests" all_path = ["{toxinidir}/policy/"] + +[env.coverage-report] +description = "Create test coverage report" +commands = [ [ "coverage", "report" ] ] +dependency_groups = [ "coverage-report" ] diff --git a/haproxy-route-policy/uv.lock b/haproxy-route-policy/uv.lock index 5e705cf7f..8caecae5a 100644 --- a/haproxy-route-policy/uv.lock +++ b/haproxy-route-policy/uv.lock @@ -44,6 +44,90 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] +[[package]] +name = "coverage" +version = "7.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" }, + { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" }, + { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" }, + { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" }, + { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/74/8c/74fedc9663dcf168b0a059d4ea756ecae4da77a489048f94b5f512a8d0b3/coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1", size = 219576, upload-time = "2026-03-17T10:31:09.045Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c9/44fb661c55062f0818a6ffd2685c67aa30816200d5f2817543717d4b92eb/coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3", size = 219942, upload-time = "2026-03-17T10:31:10.708Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/93419671cee82b780bab7ea96b67c8ef448f5f295f36bf5031154ec9a790/coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26", size = 250935, upload-time = "2026-03-17T10:31:12.392Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/1666e3a4462f8202d836920114fa7a5ee9275d1fa45366d336c551a162dd/coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3", size = 253541, upload-time = "2026-03-17T10:31:14.247Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5e/3ee3b835647be646dcf3c65a7c6c18f87c27326a858f72ab22c12730773d/coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b", size = 254780, upload-time = "2026-03-17T10:31:16.193Z" }, + { url = "https://files.pythonhosted.org/packages/44/b3/cb5bd1a04cfcc49ede6cd8409d80bee17661167686741e041abc7ee1b9a9/coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a", size = 256912, upload-time = "2026-03-17T10:31:17.89Z" }, + { url = "https://files.pythonhosted.org/packages/1b/66/c1dceb7b9714473800b075f5c8a84f4588f887a90eb8645282031676e242/coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969", size = 251165, upload-time = "2026-03-17T10:31:19.605Z" }, + { url = "https://files.pythonhosted.org/packages/b7/62/5502b73b97aa2e53ea22a39cf8649ff44827bef76d90bf638777daa27a9d/coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161", size = 252908, upload-time = "2026-03-17T10:31:21.312Z" }, + { url = "https://files.pythonhosted.org/packages/7d/37/7792c2d69854397ca77a55c4646e5897c467928b0e27f2d235d83b5d08c6/coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15", size = 250873, upload-time = "2026-03-17T10:31:23.565Z" }, + { url = "https://files.pythonhosted.org/packages/a3/23/bc866fb6163be52a8a9e5d708ba0d3b1283c12158cefca0a8bbb6e247a43/coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1", size = 255030, upload-time = "2026-03-17T10:31:25.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8b/ef67e1c222ef49860701d346b8bbb70881bef283bd5f6cbba68a39a086c7/coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6", size = 250694, upload-time = "2026-03-17T10:31:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/46/0d/866d1f74f0acddbb906db212e096dee77a8e2158ca5e6bb44729f9d93298/coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17", size = 252469, upload-time = "2026-03-17T10:31:29.472Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f5/be742fec31118f02ce42b21c6af187ad6a344fed546b56ca60caacc6a9a0/coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85", size = 222112, upload-time = "2026-03-17T10:31:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/66/40/7732d648ab9d069a46e686043241f01206348e2bbf128daea85be4d6414b/coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b", size = 222923, upload-time = "2026-03-17T10:31:33.633Z" }, + { url = "https://files.pythonhosted.org/packages/48/af/fea819c12a095781f6ccd504890aaddaf88b8fab263c4940e82c7b770124/coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664", size = 221540, upload-time = "2026-03-17T10:31:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/23/d2/17879af479df7fbbd44bd528a31692a48f6b25055d16482fdf5cdb633805/coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d", size = 220262, upload-time = "2026-03-17T10:31:37.184Z" }, + { url = "https://files.pythonhosted.org/packages/5b/4c/d20e554f988c8f91d6a02c5118f9abbbf73a8768a3048cb4962230d5743f/coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0", size = 220617, upload-time = "2026-03-17T10:31:39.245Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/f9f5277b95184f764b24e7231e166dfdb5780a46d408a2ac665969416d61/coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806", size = 261912, upload-time = "2026-03-17T10:31:41.324Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f6/7f1ab39393eeb50cfe4747ae8ef0e4fc564b989225aa1152e13a180d74f8/coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3", size = 263987, upload-time = "2026-03-17T10:31:43.724Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/62c084fb489ed9c6fbdf57e006752e7c516ea46fd690e5ed8b8617c7d52e/coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9", size = 266416, upload-time = "2026-03-17T10:31:45.769Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f6/df63d8660e1a0bff6125947afda112a0502736f470d62ca68b288ea762d8/coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd", size = 267558, upload-time = "2026-03-17T10:31:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/5b/02/353ca81d36779bd108f6d384425f7139ac3c58c750dcfaafe5d0bee6436b/coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606", size = 261163, upload-time = "2026-03-17T10:31:50.125Z" }, + { url = "https://files.pythonhosted.org/packages/2c/16/2e79106d5749bcaf3aee6d309123548e3276517cd7851faa8da213bc61bf/coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e", size = 263981, upload-time = "2026-03-17T10:31:51.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/c7/c29e0c59ffa6942030ae6f50b88ae49988e7e8da06de7ecdbf49c6d4feae/coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0", size = 261604, upload-time = "2026-03-17T10:31:53.872Z" }, + { url = "https://files.pythonhosted.org/packages/40/48/097cdc3db342f34006a308ab41c3a7c11c3f0d84750d340f45d88a782e00/coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87", size = 265321, upload-time = "2026-03-17T10:31:55.997Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/4994af354689e14fd03a75f8ec85a9a68d94e0188bbdab3fc1516b55e512/coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479", size = 260502, upload-time = "2026-03-17T10:31:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/22/c6/9bb9ef55903e628033560885f5c31aa227e46878118b63ab15dc7ba87797/coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2", size = 262688, upload-time = "2026-03-17T10:32:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/14/4f/f5df9007e50b15e53e01edea486814783a7f019893733d9e4d6caad75557/coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a", size = 222788, upload-time = "2026-03-17T10:32:02.246Z" }, + { url = "https://files.pythonhosted.org/packages/e1/98/aa7fccaa97d0f3192bec013c4e6fd6d294a6ed44b640e6bb61f479e00ed5/coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819", size = 223851, upload-time = "2026-03-17T10:32:04.416Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8b/e5c469f7352651e5f013198e9e21f97510b23de957dd06a84071683b4b60/coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911", size = 222104, upload-time = "2026-03-17T10:32:06.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/77/39703f0d1d4b478bfd30191d3c14f53caf596fac00efb3f8f6ee23646439/coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f", size = 219621, upload-time = "2026-03-17T10:32:08.589Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3e/51dff36d99ae14639a133d9b164d63e628532e2974d8b1edb99dd1ebc733/coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e", size = 219953, upload-time = "2026-03-17T10:32:10.507Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6c/1f1917b01eb647c2f2adc9962bd66c79eb978951cab61bdc1acab3290c07/coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a", size = 250992, upload-time = "2026-03-17T10:32:12.41Z" }, + { url = "https://files.pythonhosted.org/packages/22/e5/06b1f88f42a5a99df42ce61208bdec3bddb3d261412874280a19796fc09c/coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510", size = 253503, upload-time = "2026-03-17T10:32:14.449Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/2a148a51e5907e504fa7b85490277734e6771d8844ebcc48764a15e28155/coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247", size = 254852, upload-time = "2026-03-17T10:32:16.56Z" }, + { url = "https://files.pythonhosted.org/packages/61/77/50e8d3d85cc0b7ebe09f30f151d670e302c7ff4a1bf6243f71dd8b0981fa/coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6", size = 257161, upload-time = "2026-03-17T10:32:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c4/b5fd1d4b7bf8d0e75d997afd3925c59ba629fc8616f1b3aae7605132e256/coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0", size = 251021, upload-time = "2026-03-17T10:32:21.344Z" }, + { url = "https://files.pythonhosted.org/packages/f8/66/6ea21f910e92d69ef0b1c3346ea5922a51bad4446c9126db2ae96ee24c4c/coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882", size = 252858, upload-time = "2026-03-17T10:32:23.506Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ea/879c83cb5d61aa2a35fb80e72715e92672daef8191b84911a643f533840c/coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740", size = 250823, upload-time = "2026-03-17T10:32:25.516Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fb/616d95d3adb88b9803b275580bdeee8bd1b69a886d057652521f83d7322f/coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16", size = 255099, upload-time = "2026-03-17T10:32:27.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/93/25e6917c90ec1c9a56b0b26f6cad6408e5f13bb6b35d484a0d75c9cf000d/coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0", size = 250638, upload-time = "2026-03-17T10:32:29.914Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7b/dc1776b0464145a929deed214aef9fb1493f159b59ff3c7eeeedf91eddd0/coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0", size = 252295, upload-time = "2026-03-17T10:32:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fb/99cbbc56a26e07762a2740713f3c8f9f3f3106e3a3dd8cc4474954bccd34/coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc", size = 222360, upload-time = "2026-03-17T10:32:34.233Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b7/4758d4f73fb536347cc5e4ad63662f9d60ba9118cb6785e9616b2ce5d7fa/coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633", size = 223174, upload-time = "2026-03-17T10:32:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f2/24d84e1dfe70f8ac9fdf30d338239860d0d1d5da0bda528959d0ebc9da28/coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8", size = 221739, upload-time = "2026-03-17T10:32:38.736Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/4a168591057b3668c2428bff25dd3ebc21b629d666d90bcdfa0217940e84/coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b", size = 220351, upload-time = "2026-03-17T10:32:41.196Z" }, + { url = "https://files.pythonhosted.org/packages/f5/21/1fd5c4dbfe4a58b6b99649125635df46decdfd4a784c3cd6d410d303e370/coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c", size = 220612, upload-time = "2026-03-17T10:32:43.204Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fe/2a924b3055a5e7e4512655a9d4609781b0d62334fa0140c3e742926834e2/coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9", size = 261985, upload-time = "2026-03-17T10:32:45.514Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/c8928f2bd518c45990fe1a2ab8db42e914ef9b726c975facc4282578c3eb/coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29", size = 264107, upload-time = "2026-03-17T10:32:47.971Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ae/4ae35bbd9a0af9d820362751f0766582833c211224b38665c0f8de3d487f/coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607", size = 266513, upload-time = "2026-03-17T10:32:50.1Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/d326174c55af36f74eac6ae781612d9492f060ce8244b570bb9d50d9d609/coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90", size = 267650, upload-time = "2026-03-17T10:32:52.391Z" }, + { url = "https://files.pythonhosted.org/packages/7a/5e/31484d62cbd0eabd3412e30d74386ece4a0837d4f6c3040a653878bfc019/coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3", size = 261089, upload-time = "2026-03-17T10:32:54.544Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d8/49a72d6de146eebb0b7e48cc0f4bc2c0dd858e3d4790ab2b39a2872b62bd/coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab", size = 263982, upload-time = "2026-03-17T10:32:56.803Z" }, + { url = "https://files.pythonhosted.org/packages/06/3b/0351f1bd566e6e4dd39e978efe7958bde1d32f879e85589de147654f57bb/coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562", size = 261579, upload-time = "2026-03-17T10:32:59.466Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/796a2a2f4017f554d7810f5c573449b35b1e46788424a548d4d19201b222/coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2", size = 265316, upload-time = "2026-03-17T10:33:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/3d/16/d5ae91455541d1a78bc90abf495be600588aff8f6db5c8b0dae739fa39c9/coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea", size = 260427, upload-time = "2026-03-17T10:33:03.945Z" }, + { url = "https://files.pythonhosted.org/packages/48/11/07f413dba62db21fb3fad5d0de013a50e073cc4e2dc4306e770360f6dfc8/coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a", size = 262745, upload-time = "2026-03-17T10:33:06.285Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/d792371332eb4663115becf4bad47e047d16234b1aff687b1b18c58d60ae/coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215", size = 223146, upload-time = "2026-03-17T10:33:08.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/51/37221f59a111dca5e85be7dbf09696323b5b9f13ff65e0641d535ed06ea8/coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43", size = 224254, upload-time = "2026-03-17T10:33:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/54/83/6acacc889de8987441aa7d5adfbdbf33d288dad28704a67e574f1df9bcbb/coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45", size = 222276, upload-time = "2026-03-17T10:33:13.466Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" }, +] + [[package]] name = "django" version = "6.0.3" @@ -145,6 +229,9 @@ dependencies = [ ] [package.dev-dependencies] +coverage-report = [ + { name = "coverage" }, +] lint = [ { name = "codespell" }, { name = "django-stubs" }, @@ -157,6 +244,9 @@ lint = [ static = [ { name = "bandit" }, ] +unit = [ + { name = "coverage" }, +] [package.metadata] requires-dist = [ @@ -167,6 +257,7 @@ requires-dist = [ ] [package.metadata.requires-dev] +coverage-report = [{ name = "coverage", extras = ["toml"], specifier = ">=7.13.5" }] lint = [ { name = "codespell", specifier = ">=2.4.2" }, { name = "django-stubs", specifier = ">=6.0.0" }, @@ -177,6 +268,7 @@ lint = [ { name = "ruff", specifier = ">=0.15.6" }, ] static = [{ name = "bandit", extras = ["toml"], specifier = ">=1.9.4" }] +unit = [{ name = "coverage", extras = ["toml"], specifier = ">=7.13.5" }] [[package]] name = "librt" From 0f4ab50600363c6424cbb25532b233402679050d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 064/201] update env list --- haproxy-route-policy/tox.toml | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/haproxy-route-policy/tox.toml b/haproxy-route-policy/tox.toml index 327c78402..40fa551cc 100644 --- a/haproxy-route-policy/tox.toml +++ b/haproxy-route-policy/tox.toml @@ -5,7 +5,7 @@ skipsdist = true skip_missing_interpreters = true requires = ["tox>=4.21"] no_package = true -envlist = [ "lint", "unit"] +envlist = ["lint", "unit", "static", "coverage-report"] [env_run_base] passenv = ["PYTHONPATH"] @@ -69,8 +69,17 @@ dependency_groups = ["lint"] [env.static] description = "Run static analysis tests" -commands = [ [ "bandit", "-c", "{toxinidir}/pyproject.toml", "-r", "{[vars]src_path}", "{[vars]tst_path}" ] ] -dependency_groups = [ "static" ] +commands = [ + [ + "bandit", + "-c", + "{toxinidir}/pyproject.toml", + "-r", + "{[vars]src_path}", + "{[vars]tst_path}", + ], +] +dependency_groups = ["static"] [vars] src_path = "{toxinidir}/policy/" @@ -79,5 +88,5 @@ all_path = ["{toxinidir}/policy/"] [env.coverage-report] description = "Create test coverage report" -commands = [ [ "coverage", "report" ] ] -dependency_groups = [ "coverage-report" ] +commands = [["coverage", "report"]] +dependency_groups = ["coverage-report"] From fb49d5827c1fd6c9438a87c48bb1944e0ca5a6da Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 065/201] implement rule evaluation --- haproxy-route-policy/policy/rule_engine.py | 116 +++++++ .../policy/tests/test_rule_engine.py | 325 ++++++++++++++++++ .../policy/tests/test_views.py | 93 +++++ haproxy-route-policy/policy/views.py | 14 +- 4 files changed, 545 insertions(+), 3 deletions(-) create mode 100644 haproxy-route-policy/policy/rule_engine.py create mode 100644 haproxy-route-policy/policy/tests/test_rule_engine.py diff --git a/haproxy-route-policy/policy/rule_engine.py b/haproxy-route-policy/policy/rule_engine.py new file mode 100644 index 000000000..5d9d38499 --- /dev/null +++ b/haproxy-route-policy/policy/rule_engine.py @@ -0,0 +1,116 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Rule matching engine for evaluating backend requests against rules. + +Rules are evaluated following these principles: + P1: Rules are grouped by priority and evaluated starting from the highest + priority group. + P2: Within the same priority group, "deny" rules take precedence over + "allow" rules. + +If no rules match a request, its status remains "pending". +""" + +import logging +from itertools import groupby +from policy.db_models import ( + BackendRequest, + Rule, + RULE_ACTION_ALLOW, + RULE_ACTION_DENY, + RULE_KIND_HOSTNAME_AND_PATH_MATCH, + REQUEST_STATUS_ACCEPTED, + REQUEST_STATUS_REJECTED, + REQUEST_STATUS_PENDING, +) + +logger = logging.getLogger(__name__) + + +def _hostname_and_path_match(rule: Rule, request: BackendRequest) -> bool: + """Check if a hostname_and_path_match rule matches a backend request. + + A rule matches if: + 1. Any of the rule's `hostnames` appear in the request's `hostname_acls` + if `hostnames` is not empty. + 2. Any of the rule's `paths` appear in the request's `paths` + if `paths` is not empty.. + + Args: + rule: The rule to check. + request: The backend request to evaluate. + + Returns: + True if the rule matches the request, False otherwise. + """ + rule_hostnames: list = rule.value.get("hostnames", []) + rule_paths: list = rule.value.get("paths", []) + + hostname_matched = set(request.hostname_acls).intersection(set(rule_hostnames)) + path_matched = set(request.paths).intersection(set(rule_paths)) + if not rule_hostnames and not rule_paths: + return False + if not rule_hostnames: + return bool(path_matched) + if not rule_paths: + return bool(hostname_matched) + return bool(hostname_matched) and bool(path_matched) + + +def evaluate_request(request: BackendRequest) -> str: + """Evaluate a backend request against all rules and return the resulting status. + + Rules are fetched from the database, ordered by descending priority. + They are grouped by priority level and evaluated from highest to lowest. + + Within the same priority group: + - If any "deny" rule matches, the request is rejected. + - If any "allow" rule matches (and no deny matched), the request is accepted. + - If no rules match at this priority level, move to the next group. + + If no rules match at any priority level, the request stays "pending". + + Args: + request: The backend request to evaluate. + + Returns: + The resulting status string: "accepted", "rejected", or "pending". + """ + rules = Rule.objects.all().order_by("-priority") + + for _priority, group in groupby(rules, key=lambda rule: rule.priority): + allow_matched = False + deny_matched = False + + for rule in group: + if not _matches(rule, request): + continue + + if rule.action == RULE_ACTION_DENY: + deny_matched = True + elif rule.action == RULE_ACTION_ALLOW: + allow_matched = True + + # P2: deny rules have priority over allow rules within the same priority level + if deny_matched: + return REQUEST_STATUS_REJECTED + if allow_matched: + return REQUEST_STATUS_ACCEPTED + + return REQUEST_STATUS_PENDING + + +def _matches(rule: Rule, request: BackendRequest) -> bool: + """Dispatch matching logic based on the rule kind. + + Args: + rule: The rule to evaluate. + request: The backend request to evaluate against. + + Returns: + True if the rule matches the request. + """ + if rule.kind == RULE_KIND_HOSTNAME_AND_PATH_MATCH: + return _hostname_and_path_match(rule, request) + return False diff --git a/haproxy-route-policy/policy/tests/test_rule_engine.py b/haproxy-route-policy/policy/tests/test_rule_engine.py new file mode 100644 index 000000000..edf3d11ed --- /dev/null +++ b/haproxy-route-policy/policy/tests/test_rule_engine.py @@ -0,0 +1,325 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for the rule matching engine.""" + +from django.test import TestCase + +from policy import db_models +from policy.rule_engine import evaluate_request, _hostname_and_path_match + + +class TestHostnameAndPathMatch(TestCase): + """Tests for the _hostname_and_path_match matching function.""" + + def _make_request(self, hostname_acls=None, paths=None): + """Create and save a BackendRequest with the given hostnames and paths.""" + return db_models.BackendRequest.objects.create( + relation_id=1, + backend_name="test-backend", + hostname_acls=hostname_acls or [], + paths=paths or [], + port=443, + ) + + def _make_rule(self, hostnames=None, paths=None, action="deny", priority=0): + """Create and save a Rule with hostname_and_path_match kind.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": hostnames or [], "paths": paths or []}, + action=action, + priority=priority, + ) + rule.save() + return rule + + def test_exact_hostname_match(self): + """Rule matches when hostnames overlap exactly.""" + rule = self._make_rule(hostnames=["example.com"]) + request = self._make_request(hostname_acls=["example.com"]) + self.assertTrue(_hostname_and_path_match(rule, request)) + + def test_hostname_no_overlap(self): + """Rule does not match when hostnames don't overlap.""" + rule = self._make_rule(hostnames=["example.com"]) + request = self._make_request(hostname_acls=["other.com"]) + self.assertFalse(_hostname_and_path_match(rule, request)) + + def test_hostname_partial_overlap(self): + """Rule matches when at least one hostname overlaps.""" + rule = self._make_rule(hostnames=["example.com", "other.com"]) + request = self._make_request(hostname_acls=["example.com", "third.com"]) + self.assertTrue(_hostname_and_path_match(rule, request)) + + def test_empty_rule_hostnames_no_match(self): + """Rule with empty hostnames never matches.""" + rule = self._make_rule(hostnames=[]) + request = self._make_request(hostname_acls=["example.com"]) + self.assertFalse(_hostname_and_path_match(rule, request)) + + def test_empty_request_hostnames_no_match(self): + """Request with empty hostname_acls doesn't match a hostname rule.""" + rule = self._make_rule(hostnames=["example.com"]) + request = self._make_request(hostname_acls=[]) + self.assertFalse(_hostname_and_path_match(rule, request)) + + def test_empty_rule_paths_matches_all_paths(self): + """Rule with empty paths list matches any request paths (wildcard).""" + rule = self._make_rule(hostnames=["example.com"], paths=[]) + request = self._make_request( + hostname_acls=["example.com"], paths=["/api", "/health"] + ) + self.assertTrue(_hostname_and_path_match(rule, request)) + + def test_empty_rule_paths_matches_empty_request_paths(self): + """Rule with empty paths matches requests with no paths.""" + rule = self._make_rule(hostnames=["example.com"], paths=[]) + request = self._make_request(hostname_acls=["example.com"], paths=[]) + self.assertTrue(_hostname_and_path_match(rule, request)) + + def test_path_overlap(self): + """Rule matches when paths overlap.""" + rule = self._make_rule(hostnames=["example.com"], paths=["/api"]) + request = self._make_request( + hostname_acls=["example.com"], paths=["/api", "/health"] + ) + self.assertTrue(_hostname_and_path_match(rule, request)) + + def test_path_no_overlap(self): + """Rule does not match when paths don't overlap.""" + rule = self._make_rule(hostnames=["example.com"], paths=["/admin"]) + request = self._make_request( + hostname_acls=["example.com"], paths=["/api", "/health"] + ) + self.assertFalse(_hostname_and_path_match(rule, request)) + + def test_rule_paths_set_but_request_paths_empty(self): + """Rule with specific paths does not match request with no paths.""" + rule = self._make_rule(hostnames=["example.com"], paths=["/api"]) + request = self._make_request(hostname_acls=["example.com"], paths=[]) + self.assertFalse(_hostname_and_path_match(rule, request)) + + def test_hostname_match_but_path_mismatch(self): + """Rule doesn't match when hostnames match but paths don't.""" + rule = self._make_rule(hostnames=["example.com"], paths=["/admin"]) + request = self._make_request( + hostname_acls=["example.com"], paths=["/api"] + ) + self.assertFalse(_hostname_and_path_match(rule, request)) + + def test_multiple_hostnames_and_paths(self): + """Rule matches with multiple hostnames and paths that overlap.""" + rule = self._make_rule( + hostnames=["example.com", "other.com"], + paths=["/api", "/v2"], + ) + request = self._make_request( + hostname_acls=["other.com"], paths=["/v2", "/health"] + ) + self.assertTrue(_hostname_and_path_match(rule, request)) + + +class TestEvaluateRequest(TestCase): + """Tests for the evaluate_request function.""" + + def _make_request(self, hostname_acls=None, paths=None): + """Create and save a BackendRequest.""" + return db_models.BackendRequest.objects.create( + relation_id=1, + backend_name="test-backend", + hostname_acls=hostname_acls or [], + paths=paths or [], + port=443, + ) + + def _make_rule(self, hostnames=None, paths=None, action="deny", priority=0): + """Create and save a hostname_and_path_match Rule.""" + rule = db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": hostnames or [], "paths": paths or []}, + action=action, + priority=priority, + ) + rule.save() + return rule + + def test_no_rules_returns_pending(self): + """Request stays pending when no rules exist.""" + request = self._make_request(hostname_acls=["example.com"]) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_PENDING) + + def test_no_matching_rules_returns_pending(self): + """Request stays pending when no rules match.""" + self._make_rule(hostnames=["other.com"], action=db_models.RULE_ACTION_DENY) + request = self._make_request(hostname_acls=["example.com"]) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_PENDING) + + def test_single_allow_rule_accepts(self): + """Request is accepted when a single allow rule matches.""" + self._make_rule( + hostnames=["example.com"], action=db_models.RULE_ACTION_ALLOW + ) + request = self._make_request(hostname_acls=["example.com"]) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) + + def test_single_deny_rule_rejects(self): + """Request is rejected when a single deny rule matches.""" + self._make_rule( + hostnames=["example.com"], action=db_models.RULE_ACTION_DENY + ) + request = self._make_request(hostname_acls=["example.com"]) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_REJECTED) + + def test_deny_wins_over_allow_at_same_priority(self): + """Deny rule takes precedence over allow rule at the same priority.""" + self._make_rule( + hostnames=["example.com"], + action=db_models.RULE_ACTION_ALLOW, + priority=0, + ) + self._make_rule( + hostnames=["example.com"], + action=db_models.RULE_ACTION_DENY, + priority=0, + ) + request = self._make_request(hostname_acls=["example.com"]) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_REJECTED) + + def test_higher_priority_evaluated_first(self): + """Higher priority rules are evaluated before lower priority ones.""" + # Priority 1: allow example.com/client + self._make_rule( + hostnames=["example.com"], + paths=["/client"], + action=db_models.RULE_ACTION_ALLOW, + priority=1, + ) + # Priority 0: deny example.com (all paths) + self._make_rule( + hostnames=["example.com"], + action=db_models.RULE_ACTION_DENY, + priority=0, + ) + request = self._make_request( + hostname_acls=["example.com"], paths=["/client"] + ) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) + + def test_spec_example_client_allowed(self): + """Spec example: request for example.com/client is allowed. + + Rules: + Rule 1: deny example.com (all paths), priority=0 + Rule 2: allow example.com /api, priority=0 + Rule 3: allow example.com /client, priority=1 + """ + # Rule 1 + self._make_rule( + hostnames=["example.com"], + paths=[], + action=db_models.RULE_ACTION_DENY, + priority=0, + ) + # Rule 2 + self._make_rule( + hostnames=["example.com"], + paths=["/api"], + action=db_models.RULE_ACTION_ALLOW, + priority=0, + ) + # Rule 3 + self._make_rule( + hostnames=["example.com"], + paths=["/client"], + action=db_models.RULE_ACTION_ALLOW, + priority=1, + ) + request = self._make_request( + hostname_acls=["example.com"], paths=["/client"] + ) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) + + def test_spec_example_api_denied(self): + """Spec example: request for example.com/api is denied. + + Rules: + Rule 1: deny example.com (all paths), priority=0 + Rule 2: allow example.com /api, priority=0 + Rule 3: allow example.com /client, priority=1 + """ + # Rule 1 + self._make_rule( + hostnames=["example.com"], + paths=[], + action=db_models.RULE_ACTION_DENY, + priority=0, + ) + # Rule 2 + self._make_rule( + hostnames=["example.com"], + paths=["/api"], + action=db_models.RULE_ACTION_ALLOW, + priority=0, + ) + # Rule 3 + self._make_rule( + hostnames=["example.com"], + paths=["/client"], + action=db_models.RULE_ACTION_ALLOW, + priority=1, + ) + request = self._make_request( + hostname_acls=["example.com"], paths=["/api"] + ) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_REJECTED) + + def test_lower_priority_not_reached_if_higher_matches(self): + """If a higher priority group matches, lower priority groups are skipped.""" + # Priority 5: allow example.com + self._make_rule( + hostnames=["example.com"], + action=db_models.RULE_ACTION_ALLOW, + priority=5, + ) + # Priority 0: deny example.com + self._make_rule( + hostnames=["example.com"], + action=db_models.RULE_ACTION_DENY, + priority=0, + ) + request = self._make_request(hostname_acls=["example.com"]) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) + + def test_only_matching_rules_affect_outcome(self): + """Non-matching rules at the same priority don't affect the result.""" + # Deny other.com at priority 0 + self._make_rule( + hostnames=["other.com"], + action=db_models.RULE_ACTION_DENY, + priority=0, + ) + # Allow example.com at priority 0 + self._make_rule( + hostnames=["example.com"], + action=db_models.RULE_ACTION_ALLOW, + priority=0, + ) + request = self._make_request(hostname_acls=["example.com"]) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) + + def test_multiple_priority_groups_fallthrough(self): + """If highest priority group has no match, fall through to next.""" + # Priority 10: deny other.com (doesn't match) + self._make_rule( + hostnames=["other.com"], + action=db_models.RULE_ACTION_DENY, + priority=10, + ) + # Priority 0: allow example.com + self._make_rule( + hostnames=["example.com"], + action=db_models.RULE_ACTION_ALLOW, + priority=0, + ) + request = self._make_request(hostname_acls=["example.com"]) + self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 44dc2e12c..06d0f384c 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -88,6 +88,99 @@ def test_bulk_create(self): self.assertEqual(data[1]["port"], 443) self.assertEqual(db_models.BackendRequest.objects.count(), 2) + def test_bulk_create_evaluates_rules_on_creation(self): + """POST evaluates rules and sets status accordingly.""" + # Create a deny rule for example.com + db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": []}, + action=db_models.RULE_ACTION_DENY, + ).save() + payload = [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "paths": ["/api"], + "port": 443, + }, + ] + response = self.client.post("/api/v1/requests", data=payload, format="json") + self.assertEqual(response.status_code, 201) + data = response.json() + self.assertEqual(data[0]["status"], db_models.REQUEST_STATUS_REJECTED) + # Verify DB is updated too + self.assertEqual( + db_models.BackendRequest.objects.get(pk=data[0]["id"]).status, + db_models.REQUEST_STATUS_REJECTED, + ) + + def test_bulk_create_accepted_by_allow_rule(self): + """POST sets status to accepted when an allow rule matches.""" + db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": []}, + action=db_models.RULE_ACTION_ALLOW, + ).save() + payload = [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "port": 443, + }, + ] + response = self.client.post("/api/v1/requests", data=payload, format="json") + self.assertEqual(response.status_code, 201) + self.assertEqual(response.json()[0]["status"], db_models.REQUEST_STATUS_ACCEPTED) + + def test_bulk_create_pending_when_no_rules_match(self): + """POST leaves status as pending when no rules match.""" + # Rule for other.com, request for example.com + db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["other.com"], "paths": []}, + action=db_models.RULE_ACTION_DENY, + ).save() + payload = [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "port": 443, + }, + ] + response = self.client.post("/api/v1/requests", data=payload, format="json") + self.assertEqual(response.status_code, 201) + self.assertEqual(response.json()[0]["status"], db_models.REQUEST_STATUS_PENDING) + + def test_bulk_create_mixed_statuses(self): + """POST evaluates each request independently against rules.""" + db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + value={"hostnames": ["example.com"], "paths": []}, + action=db_models.RULE_ACTION_DENY, + ).save() + payload = [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "port": 443, + }, + { + "relation_id": 2, + "hostname_acls": ["other.com"], + "backend_name": "backend-2", + "port": 443, + }, + ] + response = self.client.post("/api/v1/requests", data=payload, format="json") + self.assertEqual(response.status_code, 201) + data = response.json() + self.assertEqual(data[0]["status"], db_models.REQUEST_STATUS_REJECTED) + self.assertEqual(data[1]["status"], db_models.REQUEST_STATUS_PENDING) + def test_bulk_create_rejects_non_list(self): """POST returns 400 when the body is not a list.""" response = self.client.post( diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 16cfc874a..62fcc96dc 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -17,6 +17,7 @@ from django.db import transaction from policy import serializers from .db_models import REQUEST_STATUSES +from policy.rule_engine import evaluate_request class ListCreateRequestsView(APIView): @@ -37,7 +38,9 @@ def get(self, request): def post(self, request): """Bulk create backend requests. - All new requests are set to 'pending' (evaluation logic is deferred). + Each new request is evaluated against existing rules immediately. + If a matching rule is found, the request status is set accordingly. + If no rules match, the request stays as 'pending'. """ if not isinstance(request.data, list): return Response( @@ -53,8 +56,13 @@ def post(self, request): data=backend_request ) if serializer.is_valid(raise_exception=True): - serializer.save() - created.append(serializer.data) + instance = BackendRequest(**serializer.validated_data) + # Evaluate rules and update status + instance.status = evaluate_request(instance) + instance.save() + created.append( + serializers.BackendRequestSerializer(instance).data + ) except ValidationError as e: return Response({"error": str(e)}, status=HTTP_400_BAD_REQUEST) except IntegrityError: From cd413be184a5c216d8659aec7977b981a5641028 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 066/201] add change artifact --- docs/release-notes/artifacts/pr0401.yaml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0401.yaml diff --git a/docs/release-notes/artifacts/pr0401.yaml b/docs/release-notes/artifacts/pr0401.yaml new file mode 100644 index 000000000..490a4332b --- /dev/null +++ b/docs/release-notes/artifacts/pr0401.yaml @@ -0,0 +1,20 @@ +version_schema: 2 + +changes: + - title: Added rule matching engine and request evaluation on creation + author: tphan025 + type: minor + description: > + Added a rule matching engine that evaluates backend requests against rules + ordered by descending priority. Within the same priority group, deny rules + take precedence over allow rules. Integrated the engine into the bulk create + endpoint so that each new request is evaluated immediately and its status is + set to accepted, rejected, or pending accordingly. Included unit tests for the + matching logic and integration tests for rule evaluation during request creation. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/401 + related_doc: + related_issue: + visibility: public + highlight: false From fa372c1b4a42347695ebc4da64778e731d6bc7ca Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 067/201] update imports --- haproxy-route-policy/policy/views.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 62fcc96dc..aab89efb8 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -18,6 +18,7 @@ from policy import serializers from .db_models import REQUEST_STATUSES from policy.rule_engine import evaluate_request +from .serializers import BackendRequestSerializer, RuleSerializer class ListCreateRequestsView(APIView): @@ -52,17 +53,13 @@ def post(self, request): try: with transaction.atomic(): for backend_request in request.data: - serializer = serializers.BackendRequestSerializer( - data=backend_request - ) + serializer = BackendRequestSerializer(data=backend_request) if serializer.is_valid(raise_exception=True): instance = BackendRequest(**serializer.validated_data) # Evaluate rules and update status instance.status = evaluate_request(instance) instance.save() - created.append( - serializers.BackendRequestSerializer(instance).data - ) + created.append(BackendRequestSerializer(instance).data) except ValidationError as e: return Response({"error": str(e)}, status=HTTP_400_BAD_REQUEST) except IntegrityError: @@ -93,12 +90,12 @@ class ListCreateRulesView(APIView): def get(self, request): """List all rules.""" queryset = Rule.objects.all().order_by("-priority", "created_at") - serializer = serializers.RuleSerializer(queryset, many=True) + serializer = RuleSerializer(queryset, many=True) return Response(serializer.data) def post(self, request): """Create a new rule.""" - serializer = serializers.RuleSerializer(data=request.data) + serializer = RuleSerializer(data=request.data) if serializer.is_valid(raise_exception=True): serializer.save() return Response(serializer.data, status=HTTP_201_CREATED) From f21a12825d61fc91d0cd9dc62b2e61a70ab588d7 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 068/201] update naming --- haproxy-route-policy/policy/rule_engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haproxy-route-policy/policy/rule_engine.py b/haproxy-route-policy/policy/rule_engine.py index 5d9d38499..1305de417 100644 --- a/haproxy-route-policy/policy/rule_engine.py +++ b/haproxy-route-policy/policy/rule_engine.py @@ -44,8 +44,8 @@ def _hostname_and_path_match(rule: Rule, request: BackendRequest) -> bool: Returns: True if the rule matches the request, False otherwise. """ - rule_hostnames: list = rule.value.get("hostnames", []) - rule_paths: list = rule.value.get("paths", []) + rule_hostnames: list = rule.parameters.get("hostnames", []) + rule_paths: list = rule.parameters.get("paths", []) hostname_matched = set(request.hostname_acls).intersection(set(rule_hostnames)) path_matched = set(request.paths).intersection(set(rule_paths)) From f4ad042b802b3a1c3a62b469200862f4aa0dc3cc Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 069/201] update rules matching logic --- haproxy-route-policy/policy/db_models.py | 2 +- .../policy/migrations/0001_initial.py | 16 +++++- .../policy/migrations/0002_rule.py | 27 +++++++++ ...ule_alter_backendrequest_paths_and_more.py | 55 ------------------- haproxy-route-policy/policy/rule_engine.py | 4 +- haproxy-route-policy/policy/views.py | 18 +++++- 6 files changed, 58 insertions(+), 64 deletions(-) create mode 100644 haproxy-route-policy/policy/migrations/0002_rule.py delete mode 100644 haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 7eb294f6d..03b178c39 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -93,7 +93,7 @@ class BackendRequest(models.Model): hostname_acls: models.JSONField = models.JSONField( default=list, validators=[validate_hostname_acls], blank=True ) - backend_name: models.TextField = models.TextField() + backend_name: models.TextField = models.TextField(unique=True) paths: models.JSONField = models.JSONField( default=list, validators=[validate_paths], blank=True ) diff --git a/haproxy-route-policy/policy/migrations/0001_initial.py b/haproxy-route-policy/policy/migrations/0001_initial.py index 7d7a16bcd..17c86d4be 100644 --- a/haproxy-route-policy/policy/migrations/0001_initial.py +++ b/haproxy-route-policy/policy/migrations/0001_initial.py @@ -32,9 +32,19 @@ class Migration(migrations.Migration): validators=[policy.db_models.validate_hostname_acls], ), ), - ("backend_name", models.TextField()), - ("paths", models.JSONField(blank=True, default=list)), - ("port", models.IntegerField()), + ("backend_name", models.TextField(unique=True)), + ( + "paths", + models.JSONField( + blank=True, + default=list, + validators=[policy.db_models.validate_paths], + ), + ), + ( + "port", + models.IntegerField(validators=[policy.db_models.validate_port]), + ), ( "status", models.TextField( diff --git a/haproxy-route-policy/policy/migrations/0002_rule.py b/haproxy-route-policy/policy/migrations/0002_rule.py new file mode 100644 index 000000000..b428ddbaa --- /dev/null +++ b/haproxy-route-policy/policy/migrations/0002_rule.py @@ -0,0 +1,27 @@ +# Generated by Django 6.0.3 on 2026-03-24 14:42 + +import uuid +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('policy', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='Rule', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('kind', models.TextField(choices=[('hostname_and_path_match', 'hostname_and_path_match')])), + ('parameters', models.JSONField()), + ('action', models.TextField(choices=[('allow', 'allow'), ('deny', 'deny')])), + ('priority', models.IntegerField(blank=True, default=0)), + ('comment', models.TextField(blank=True, default='')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + ] diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py deleted file mode 100644 index 0a7b61e4f..000000000 --- a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py +++ /dev/null @@ -1,55 +0,0 @@ -# Generated by Django 6.0.3 on 2026-03-23 21:53 - -import policy.db_models -import uuid -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("policy", "0001_initial"), - ] - - operations = [ - migrations.CreateModel( - name="Rule", - fields=[ - ( - "id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "kind", - models.TextField( - choices=[("hostname_and_path_match", "hostname_and_path_match")] - ), - ), - ("parameters", models.JSONField()), - ( - "action", - models.TextField(choices=[("allow", "allow"), ("deny", "deny")]), - ), - ("priority", models.IntegerField(blank=True, default=0)), - ("comment", models.TextField(blank=True, default="")), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ], - ), - migrations.AlterField( - model_name="backendrequest", - name="paths", - field=models.JSONField( - blank=True, default=list, validators=[policy.db_models.validate_paths] - ), - ), - migrations.AlterField( - model_name="backendrequest", - name="port", - field=models.IntegerField(validators=[policy.db_models.validate_port]), - ), - ] diff --git a/haproxy-route-policy/policy/rule_engine.py b/haproxy-route-policy/policy/rule_engine.py index 1305de417..467d2a577 100644 --- a/haproxy-route-policy/policy/rule_engine.py +++ b/haproxy-route-policy/policy/rule_engine.py @@ -47,8 +47,8 @@ def _hostname_and_path_match(rule: Rule, request: BackendRequest) -> bool: rule_hostnames: list = rule.parameters.get("hostnames", []) rule_paths: list = rule.parameters.get("paths", []) - hostname_matched = set(request.hostname_acls).intersection(set(rule_hostnames)) - path_matched = set(request.paths).intersection(set(rule_paths)) + hostname_matched = set(request.hostname_acls).issubset(rule_hostnames) + path_matched = set(request.paths).issubset(rule_paths) if not rule_hostnames and not rule_paths: return False if not rule_hostnames: diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index aab89efb8..1e71ba0c9 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -24,6 +24,13 @@ class ListCreateRequestsView(APIView): """View for listing and bulk-creating backend requests.""" + def get_request_by_backend_name(self, backend_name: str) -> BackendRequest | None: + """Get a backend request by its backend name.""" + try: + return BackendRequest.objects.get(backend_name=backend_name) + except BackendRequest.DoesNotExist: + return None + def get(self, request): """List all requests, optionally filtered by status.""" status = request.GET.get("status") @@ -53,7 +60,11 @@ def post(self, request): try: with transaction.atomic(): for backend_request in request.data: - serializer = BackendRequestSerializer(data=backend_request) + # Get the request with the same backend_name if it exists and update it, otherwise create a new one + req = self.get_request_by_backend_name( + backend_request.get("backend_name") + ) + serializer = BackendRequestSerializer(req, data=backend_request) if serializer.is_valid(raise_exception=True): instance = BackendRequest(**serializer.validated_data) # Evaluate rules and update status @@ -62,9 +73,10 @@ def post(self, request): created.append(BackendRequestSerializer(instance).data) except ValidationError as e: return Response({"error": str(e)}, status=HTTP_400_BAD_REQUEST) - except IntegrityError: + except IntegrityError as e: return Response( - {"error": "Invalid request data."}, status=HTTP_400_BAD_REQUEST + {"error": f"Invalid request data: {str(e)}"}, + status=HTTP_400_BAD_REQUEST, ) return Response(created, status=HTTP_201_CREATED) From 334ca69d257a32eb1f5ad3713bb72ec9f0de3879 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:15 +0200 Subject: [PATCH 070/201] update tests --- haproxy-route-policy/policy/rule_engine.py | 22 +++++++++++++------ .../policy/tests/test_rule_engine.py | 4 ++-- .../policy/tests/test_views.py | 8 +++---- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/haproxy-route-policy/policy/rule_engine.py b/haproxy-route-policy/policy/rule_engine.py index 467d2a577..7856fbbbf 100644 --- a/haproxy-route-policy/policy/rule_engine.py +++ b/haproxy-route-policy/policy/rule_engine.py @@ -47,15 +47,23 @@ def _hostname_and_path_match(rule: Rule, request: BackendRequest) -> bool: rule_hostnames: list = rule.parameters.get("hostnames", []) rule_paths: list = rule.parameters.get("paths", []) - hostname_matched = set(request.hostname_acls).issubset(rule_hostnames) - path_matched = set(request.paths).issubset(rule_paths) - if not rule_hostnames and not rule_paths: - return False + # A rule with no hostnames can never match. if not rule_hostnames: - return bool(path_matched) + return False + + # At least one rule hostname must appear in the request's hostname_acls. + hostname_matched = bool( + set(rule_hostnames).intersection(request.hostname_acls) + ) + if not hostname_matched: + return False + + # Empty rule paths means "match all paths" (wildcard). if not rule_paths: - return bool(hostname_matched) - return bool(hostname_matched) and bool(path_matched) + return True + + # At least one rule path must appear in the request's paths. + return bool(set(rule_paths).intersection(request.paths)) def evaluate_request(request: BackendRequest) -> str: diff --git a/haproxy-route-policy/policy/tests/test_rule_engine.py b/haproxy-route-policy/policy/tests/test_rule_engine.py index edf3d11ed..b484a00bf 100644 --- a/haproxy-route-policy/policy/tests/test_rule_engine.py +++ b/haproxy-route-policy/policy/tests/test_rule_engine.py @@ -26,7 +26,7 @@ def _make_rule(self, hostnames=None, paths=None, action="deny", priority=0): """Create and save a Rule with hostname_and_path_match kind.""" rule = db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": hostnames or [], "paths": paths or []}, + parameters={"hostnames": hostnames or [], "paths": paths or []}, action=action, priority=priority, ) @@ -136,7 +136,7 @@ def _make_rule(self, hostnames=None, paths=None, action="deny", priority=0): """Create and save a hostname_and_path_match Rule.""" rule = db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": hostnames or [], "paths": paths or []}, + parameters={"hostnames": hostnames or [], "paths": paths or []}, action=action, priority=priority, ) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 06d0f384c..943ca7969 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -93,7 +93,7 @@ def test_bulk_create_evaluates_rules_on_creation(self): # Create a deny rule for example.com db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": []}, + parameters={"hostnames": ["example.com"], "paths": []}, action=db_models.RULE_ACTION_DENY, ).save() payload = [ @@ -119,7 +119,7 @@ def test_bulk_create_accepted_by_allow_rule(self): """POST sets status to accepted when an allow rule matches.""" db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": []}, + parameters={"hostnames": ["example.com"], "paths": []}, action=db_models.RULE_ACTION_ALLOW, ).save() payload = [ @@ -139,7 +139,7 @@ def test_bulk_create_pending_when_no_rules_match(self): # Rule for other.com, request for example.com db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["other.com"], "paths": []}, + parameters={"hostnames": ["other.com"], "paths": []}, action=db_models.RULE_ACTION_DENY, ).save() payload = [ @@ -158,7 +158,7 @@ def test_bulk_create_mixed_statuses(self): """POST evaluates each request independently against rules.""" db_models.Rule( kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - value={"hostnames": ["example.com"], "paths": []}, + parameters={"hostnames": ["example.com"], "paths": []}, action=db_models.RULE_ACTION_DENY, ).save() payload = [ From 37095116e5806bb603687f367ca3442ba4561d17 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:21 +0200 Subject: [PATCH 071/201] save request using serializer with the correct instace --- haproxy-route-policy/policy/views.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 1e71ba0c9..54ceec66c 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -66,11 +66,13 @@ def post(self, request): ) serializer = BackendRequestSerializer(req, data=backend_request) if serializer.is_valid(raise_exception=True): - instance = BackendRequest(**serializer.validated_data) # Evaluate rules and update status - instance.status = evaluate_request(instance) - instance.save() - created.append(BackendRequestSerializer(instance).data) + serializer.save( + status=evaluate_request( + BackendRequest(**serializer.validated_data) + ) + ) + created.append(serializer.data) except ValidationError as e: return Response({"error": str(e)}, status=HTTP_400_BAD_REQUEST) except IntegrityError as e: From c6d78706e4b5da33fe8f7723cb9ebfe79bb6fdf7 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 072/201] Haproxy route policy rules api (#400) --- ...ule_alter_backendrequest_paths_and_more.py | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py new file mode 100644 index 000000000..0a7b61e4f --- /dev/null +++ b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py @@ -0,0 +1,55 @@ +# Generated by Django 6.0.3 on 2026-03-23 21:53 + +import policy.db_models +import uuid +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("policy", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="Rule", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + ), + ), + ( + "kind", + models.TextField( + choices=[("hostname_and_path_match", "hostname_and_path_match")] + ), + ), + ("parameters", models.JSONField()), + ( + "action", + models.TextField(choices=[("allow", "allow"), ("deny", "deny")]), + ), + ("priority", models.IntegerField(blank=True, default=0)), + ("comment", models.TextField(blank=True, default="")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ], + ), + migrations.AlterField( + model_name="backendrequest", + name="paths", + field=models.JSONField( + blank=True, default=list, validators=[policy.db_models.validate_paths] + ), + ), + migrations.AlterField( + model_name="backendrequest", + name="port", + field=models.IntegerField(validators=[policy.db_models.validate_port]), + ), + ] From 7a2ddf0a986125e2b9b4c02495acae7c9638ee3e Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 073/201] group tests --- .../policy/tests/test_views.py | 175 +++++++++--------- 1 file changed, 86 insertions(+), 89 deletions(-) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 943ca7969..9b987a090 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -88,98 +88,95 @@ def test_bulk_create(self): self.assertEqual(data[1]["port"], 443) self.assertEqual(db_models.BackendRequest.objects.count(), 2) - def test_bulk_create_evaluates_rules_on_creation(self): - """POST evaluates rules and sets status accordingly.""" - # Create a deny rule for example.com - db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - parameters={"hostnames": ["example.com"], "paths": []}, - action=db_models.RULE_ACTION_DENY, - ).save() - payload = [ - { - "relation_id": 1, - "hostname_acls": ["example.com"], - "backend_name": "backend-1", - "paths": ["/api"], - "port": 443, - }, - ] - response = self.client.post("/api/v1/requests", data=payload, format="json") - self.assertEqual(response.status_code, 201) - data = response.json() - self.assertEqual(data[0]["status"], db_models.REQUEST_STATUS_REJECTED) - # Verify DB is updated too - self.assertEqual( - db_models.BackendRequest.objects.get(pk=data[0]["id"]).status, - db_models.REQUEST_STATUS_REJECTED, - ) - - def test_bulk_create_accepted_by_allow_rule(self): - """POST sets status to accepted when an allow rule matches.""" - db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - parameters={"hostnames": ["example.com"], "paths": []}, - action=db_models.RULE_ACTION_ALLOW, - ).save() - payload = [ - { - "relation_id": 1, - "hostname_acls": ["example.com"], - "backend_name": "backend-1", - "port": 443, - }, + def test_evaluate_requests(self): + """POST evaluates rules and sets status accordingly for each request.""" + cases = [ + ( + "denied by matching deny rule", + {"hostnames": ["example.com"], "paths": []}, + db_models.RULE_ACTION_DENY, + [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "paths": ["/api"], + "port": 443, + }, + ], + [db_models.REQUEST_STATUS_REJECTED], + ), + ( + "accepted by matching allow rule", + {"hostnames": ["example.com"], "paths": []}, + db_models.RULE_ACTION_ALLOW, + [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "port": 443, + }, + ], + [db_models.REQUEST_STATUS_ACCEPTED], + ), + ( + "pending when no rules match", + {"hostnames": ["other.com"], "paths": []}, + db_models.RULE_ACTION_DENY, + [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "port": 443, + }, + ], + [db_models.REQUEST_STATUS_PENDING], + ), + ( + "mixed statuses per request", + {"hostnames": ["example.com"], "paths": []}, + db_models.RULE_ACTION_DENY, + [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "port": 443, + }, + { + "relation_id": 2, + "hostname_acls": ["other.com"], + "backend_name": "backend-2", + "port": 443, + }, + ], + [ + db_models.REQUEST_STATUS_REJECTED, + db_models.REQUEST_STATUS_PENDING, + ], + ), ] - response = self.client.post("/api/v1/requests", data=payload, format="json") - self.assertEqual(response.status_code, 201) - self.assertEqual(response.json()[0]["status"], db_models.REQUEST_STATUS_ACCEPTED) + for label, rule_params, rule_action, payload, expected_statuses in cases: + with self.subTest(label=label): + # Clean slate for each sub-test + db_models.Rule.objects.all().delete() + db_models.BackendRequest.objects.all().delete() - def test_bulk_create_pending_when_no_rules_match(self): - """POST leaves status as pending when no rules match.""" - # Rule for other.com, request for example.com - db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - parameters={"hostnames": ["other.com"], "paths": []}, - action=db_models.RULE_ACTION_DENY, - ).save() - payload = [ - { - "relation_id": 1, - "hostname_acls": ["example.com"], - "backend_name": "backend-1", - "port": 443, - }, - ] - response = self.client.post("/api/v1/requests", data=payload, format="json") - self.assertEqual(response.status_code, 201) - self.assertEqual(response.json()[0]["status"], db_models.REQUEST_STATUS_PENDING) + db_models.Rule( + kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + parameters=rule_params, + action=rule_action, + ).save() - def test_bulk_create_mixed_statuses(self): - """POST evaluates each request independently against rules.""" - db_models.Rule( - kind=db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, - parameters={"hostnames": ["example.com"], "paths": []}, - action=db_models.RULE_ACTION_DENY, - ).save() - payload = [ - { - "relation_id": 1, - "hostname_acls": ["example.com"], - "backend_name": "backend-1", - "port": 443, - }, - { - "relation_id": 2, - "hostname_acls": ["other.com"], - "backend_name": "backend-2", - "port": 443, - }, - ] - response = self.client.post("/api/v1/requests", data=payload, format="json") - self.assertEqual(response.status_code, 201) - data = response.json() - self.assertEqual(data[0]["status"], db_models.REQUEST_STATUS_REJECTED) - self.assertEqual(data[1]["status"], db_models.REQUEST_STATUS_PENDING) + response = self.client.post( + "/api/v1/requests", data=payload, format="json" + ) + self.assertEqual(response.status_code, 201) + data = response.json() + actual_statuses = [r["status"] for r in data] + self.assertEqual(actual_statuses, expected_statuses) def test_bulk_create_rejects_non_list(self): """POST returns 400 when the body is not a list.""" From d77225f1652a96aea0a15fa937a6b22b82bcbc7b Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 074/201] update formatting --- .../policy/migrations/0002_rule.py | 37 +++++++++++++------ haproxy-route-policy/policy/rule_engine.py | 4 +- .../policy/tests/test_rule_engine.py | 24 +++--------- 3 files changed, 33 insertions(+), 32 deletions(-) diff --git a/haproxy-route-policy/policy/migrations/0002_rule.py b/haproxy-route-policy/policy/migrations/0002_rule.py index b428ddbaa..4f3fa0ae6 100644 --- a/haproxy-route-policy/policy/migrations/0002_rule.py +++ b/haproxy-route-policy/policy/migrations/0002_rule.py @@ -5,23 +5,38 @@ class Migration(migrations.Migration): - dependencies = [ - ('policy', '0001_initial'), + ("policy", "0001_initial"), ] operations = [ migrations.CreateModel( - name='Rule', + name="Rule", fields=[ - ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), - ('kind', models.TextField(choices=[('hostname_and_path_match', 'hostname_and_path_match')])), - ('parameters', models.JSONField()), - ('action', models.TextField(choices=[('allow', 'allow'), ('deny', 'deny')])), - ('priority', models.IntegerField(blank=True, default=0)), - ('comment', models.TextField(blank=True, default='')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('updated_at', models.DateTimeField(auto_now=True)), + ( + "id", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + ), + ), + ( + "kind", + models.TextField( + choices=[("hostname_and_path_match", "hostname_and_path_match")] + ), + ), + ("parameters", models.JSONField()), + ( + "action", + models.TextField(choices=[("allow", "allow"), ("deny", "deny")]), + ), + ("priority", models.IntegerField(blank=True, default=0)), + ("comment", models.TextField(blank=True, default="")), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), ], ), ] diff --git a/haproxy-route-policy/policy/rule_engine.py b/haproxy-route-policy/policy/rule_engine.py index 7856fbbbf..c59a27535 100644 --- a/haproxy-route-policy/policy/rule_engine.py +++ b/haproxy-route-policy/policy/rule_engine.py @@ -52,9 +52,7 @@ def _hostname_and_path_match(rule: Rule, request: BackendRequest) -> bool: return False # At least one rule hostname must appear in the request's hostname_acls. - hostname_matched = bool( - set(rule_hostnames).intersection(request.hostname_acls) - ) + hostname_matched = bool(set(rule_hostnames).intersection(request.hostname_acls)) if not hostname_matched: return False diff --git a/haproxy-route-policy/policy/tests/test_rule_engine.py b/haproxy-route-policy/policy/tests/test_rule_engine.py index b484a00bf..03666b170 100644 --- a/haproxy-route-policy/policy/tests/test_rule_engine.py +++ b/haproxy-route-policy/policy/tests/test_rule_engine.py @@ -102,9 +102,7 @@ def test_rule_paths_set_but_request_paths_empty(self): def test_hostname_match_but_path_mismatch(self): """Rule doesn't match when hostnames match but paths don't.""" rule = self._make_rule(hostnames=["example.com"], paths=["/admin"]) - request = self._make_request( - hostname_acls=["example.com"], paths=["/api"] - ) + request = self._make_request(hostname_acls=["example.com"], paths=["/api"]) self.assertFalse(_hostname_and_path_match(rule, request)) def test_multiple_hostnames_and_paths(self): @@ -156,17 +154,13 @@ def test_no_matching_rules_returns_pending(self): def test_single_allow_rule_accepts(self): """Request is accepted when a single allow rule matches.""" - self._make_rule( - hostnames=["example.com"], action=db_models.RULE_ACTION_ALLOW - ) + self._make_rule(hostnames=["example.com"], action=db_models.RULE_ACTION_ALLOW) request = self._make_request(hostname_acls=["example.com"]) self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) def test_single_deny_rule_rejects(self): """Request is rejected when a single deny rule matches.""" - self._make_rule( - hostnames=["example.com"], action=db_models.RULE_ACTION_DENY - ) + self._make_rule(hostnames=["example.com"], action=db_models.RULE_ACTION_DENY) request = self._make_request(hostname_acls=["example.com"]) self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_REJECTED) @@ -200,9 +194,7 @@ def test_higher_priority_evaluated_first(self): action=db_models.RULE_ACTION_DENY, priority=0, ) - request = self._make_request( - hostname_acls=["example.com"], paths=["/client"] - ) + request = self._make_request(hostname_acls=["example.com"], paths=["/client"]) self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) def test_spec_example_client_allowed(self): @@ -234,9 +226,7 @@ def test_spec_example_client_allowed(self): action=db_models.RULE_ACTION_ALLOW, priority=1, ) - request = self._make_request( - hostname_acls=["example.com"], paths=["/client"] - ) + request = self._make_request(hostname_acls=["example.com"], paths=["/client"]) self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_ACCEPTED) def test_spec_example_api_denied(self): @@ -268,9 +258,7 @@ def test_spec_example_api_denied(self): action=db_models.RULE_ACTION_ALLOW, priority=1, ) - request = self._make_request( - hostname_acls=["example.com"], paths=["/api"] - ) + request = self._make_request(hostname_acls=["example.com"], paths=["/api"]) self.assertEqual(evaluate_request(request), db_models.REQUEST_STATUS_REJECTED) def test_lower_priority_not_reached_if_higher_matches(self): From 9075f59de4a1f06981ae8c0728c1dad7f330b15f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 075/201] Add authentication configuration for django-restframework and adapt tests for auth --- .../haproxy_route_policy/settings.py | 12 +++ .../haproxy_route_policy/test_settings.py | 7 ++ .../test_settings_authenticated.py | 16 ++++ .../policy/tests/test_auth.py | 85 +++++++++++++++++++ haproxy-route-policy/pyproject.toml | 4 + haproxy-route-policy/tox.toml | 20 ++++- haproxy-route-policy/uv.lock | 29 +++++++ 7 files changed, 172 insertions(+), 1 deletion(-) create mode 100644 haproxy-route-policy/haproxy_route_policy/test_settings_authenticated.py create mode 100644 haproxy-route-policy/policy/tests/test_auth.py diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 7c3d2c341..0ef69592f 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -124,6 +124,18 @@ DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" +# django rest framework options +REST_FRAMEWORK = { + "DEFAULT_AUTHENTICATION_CLASSES": ( + "rest_framework.authentication.BasicAuthentication", + "rest_framework.authentication.SessionAuthentication", + "rest_framework_simplejwt.authentication.JWTAuthentication", + ), + "DEFAULT_PERMISSION_CLASSES": [ + "rest_framework.permissions.IsAuthenticated", + ], +} + env_log_level = os.getenv("DJANGO_LOG_LEVEL", "INFO").upper() if env_log_level not in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]: env_log_level = "INFO" diff --git a/haproxy-route-policy/haproxy_route_policy/test_settings.py b/haproxy-route-policy/haproxy_route_policy/test_settings.py index dc1db7f24..91c46a6cc 100644 --- a/haproxy-route-policy/haproxy_route_policy/test_settings.py +++ b/haproxy-route-policy/haproxy_route_policy/test_settings.py @@ -14,3 +14,10 @@ "NAME": ":memory:", } } + +REST_FRAMEWORK = { + "DEFAULT_AUTHENTICATION_CLASSES": [], + "DEFAULT_PERMISSION_CLASSES": [ + "rest_framework.permissions.AllowAny", + ], +} diff --git a/haproxy-route-policy/haproxy_route_policy/test_settings_authenticated.py b/haproxy-route-policy/haproxy_route_policy/test_settings_authenticated.py new file mode 100644 index 000000000..c3090bfc0 --- /dev/null +++ b/haproxy-route-policy/haproxy_route_policy/test_settings_authenticated.py @@ -0,0 +1,16 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Django settings for running tests with SQLite in an authenticated setup.""" + +from haproxy_route_policy.settings import * # noqa: F401, F403 + +# Mock secret key for testing. +SECRET_KEY = "test-secret-key" + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", + } +} diff --git a/haproxy-route-policy/policy/tests/test_auth.py b/haproxy-route-policy/policy/tests/test_auth.py new file mode 100644 index 000000000..d91bbc1cb --- /dev/null +++ b/haproxy-route-policy/policy/tests/test_auth.py @@ -0,0 +1,85 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Authentication tests.""" + +from django.test import TestCase, tag +from rest_framework.test import APIClient +from django.contrib.auth.models import User + + +@tag("auth") +class TestAuthenticationRequired(TestCase): + """Tests that endpoints require authentication.""" + + def setUp(self): + self.client = APIClient() + + def test_list_requests_unauthenticated(self): + """GET /api/v1/requests returns 401/403 without auth.""" + response = self.client.get("/api/v1/requests") + self.assertIn(response.status_code, [401, 403]) + + def test_create_requests_unauthenticated(self): + """POST /api/v1/requests returns 401/403 without auth.""" + response = self.client.post("/api/v1/requests", [], format="json") + self.assertIn(response.status_code, [401, 403]) + + def test_list_rules_unauthenticated(self): + """GET /api/v1/rules returns 401/403 without auth.""" + response = self.client.get("/api/v1/rules") + self.assertIn(response.status_code, [401, 403]) + + +@tag("auth") +class TestAuthenticated(TestCase): + """Tests endpoints as an authenticated user.""" + + def setUp(self): + self.user = User.objects.create_user("admin", "admin@example.com", "admin") + self.client = APIClient() + # Add nosec to ignore bandit warning as this is for testing. + self.client.login(username="admin", password="admin") # nosec + + def test_create_requests_authenticated(self): + """POST /api/v1/requests returns 201 with auth.""" + payload = [ + { + "relation_id": 1, + "hostname_acls": ["example.com"], + "backend_name": "backend-1", + "paths": ["/api"], + "port": 443, + }, + { + "relation_id": 2, + "backend_name": "backend-2", + "port": 443, + }, + ] + response = self.client.post("/api/v1/requests", data=payload, format="json") + self.assertEqual(response.status_code, 201) + + def test_create_rules_authenticated(self): + """POST /api/v1/rules returns 201 with auth.""" + payload = { + "name": "Test Rule", + "action": "allow", + "kind": "hostname_and_path_match", + "parameters": { + "hostnames": ["example.com"], + "paths": ["/api"], + }, + } + response = self.client.post("/api/v1/rules", data=payload, format="json") + self.assertEqual(response.status_code, 201) + + def test_list_requests_authenticated(self): + """GET /api/v1/requests returns 200 with auth.""" + response = self.client.get("/api/v1/requests") + self.assertEqual(response.status_code, 200) + + def test_list_rules_authenticated(self): + """GET /api/v1/rules returns 200 with auth.""" + response = self.client.get("/api/v1/rules") + self.assertEqual(response.status_code, 200) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index e1443b556..ad337e6a7 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -7,11 +7,15 @@ requires-python = ">=3.12" dependencies = [ "django>=6.0.3", "djangorestframework>=3.16.1", + "djangorestframework-simplejwt>=5.5.1", "validators>=0.35.0", "whitenoise>=6.12.0", ] [dependency-groups] +auth = [ + "djangorestframework-simplejwt>=5.5.1", +] coverage-report = [ "coverage[toml]>=7.13.5", ] diff --git a/haproxy-route-policy/tox.toml b/haproxy-route-policy/tox.toml index 40fa551cc..3923feee0 100644 --- a/haproxy-route-policy/tox.toml +++ b/haproxy-route-policy/tox.toml @@ -5,7 +5,7 @@ skipsdist = true skip_missing_interpreters = true requires = ["tox>=4.21"] no_package = true -envlist = ["lint", "unit", "static", "coverage-report"] +envlist = ["lint", "unit", "unit-auth", "static", "coverage-report"] [env_run_base] passenv = ["PYTHONPATH"] @@ -26,11 +26,29 @@ commands = [ "test", "policy", "--settings=haproxy_route_policy.test_settings", + "--exclude-tag=auth", "-v2", ], ] dependency_groups = ["unit"] + +[env.unit_auth] +description = "Run auth unit tests" +commands = [ + [ + "coverage", + "run", + "manage.py", + "test", + "policy", + "--settings=haproxy_route_policy.test_settings_authenticated", + "--tag=auth", + "-v2", + ], +] +dependency_groups = ["unit", "auth"] + [env.lint] description = "Check code against coding style standards" commands = [ diff --git a/haproxy-route-policy/uv.lock b/haproxy-route-policy/uv.lock index 8caecae5a..45b26319a 100644 --- a/haproxy-route-policy/uv.lock +++ b/haproxy-route-policy/uv.lock @@ -194,6 +194,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/ce/bf8b9d3f415be4ac5588545b5fcdbbb841977db1c1d923f7568eeabe1689/djangorestframework-3.16.1-py3-none-any.whl", hash = "sha256:33a59f47fb9c85ede792cbf88bde71893bcda0667bc573f784649521f1102cec", size = 1080442, upload-time = "2025-08-06T17:50:50.667Z" }, ] +[[package]] +name = "djangorestframework-simplejwt" +version = "5.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "django" }, + { name = "djangorestframework" }, + { name = "pyjwt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/27/2874a325c11112066139769f7794afae238a07ce6adf96259f08fd37a9d7/djangorestframework_simplejwt-5.5.1.tar.gz", hash = "sha256:e72c5572f51d7803021288e2057afcbd03f17fe11d484096f40a460abc76e87f", size = 101265, upload-time = "2025-07-21T16:52:25.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/94/fdfb7b2f0b16cd3ed4d4171c55c1c07a2d1e3b106c5978c8ad0c15b4a48b/djangorestframework_simplejwt-5.5.1-py3-none-any.whl", hash = "sha256:2c30f3707053d384e9f315d11c2daccfcb548d4faa453111ca19a542b732e469", size = 107674, upload-time = "2025-07-21T16:52:07.493Z" }, +] + [[package]] name = "djangorestframework-stubs" version = "3.16.8" @@ -224,11 +238,15 @@ source = { virtual = "." } dependencies = [ { name = "django" }, { name = "djangorestframework" }, + { name = "djangorestframework-simplejwt" }, { name = "validators" }, { name = "whitenoise" }, ] [package.dev-dependencies] +auth = [ + { name = "djangorestframework-simplejwt" }, +] coverage-report = [ { name = "coverage" }, ] @@ -252,11 +270,13 @@ unit = [ requires-dist = [ { name = "django", specifier = ">=6.0.3" }, { name = "djangorestframework", specifier = ">=3.16.1" }, + { name = "djangorestframework-simplejwt", specifier = ">=5.5.1" }, { name = "validators", specifier = ">=0.35.0" }, { name = "whitenoise", specifier = ">=6.12.0" }, ] [package.metadata.requires-dev] +auth = [{ name = "djangorestframework-simplejwt", specifier = ">=5.5.1" }] coverage-report = [{ name = "coverage", extras = ["toml"], specifier = ">=7.13.5" }] lint = [ { name = "codespell", specifier = ">=2.4.2" }, @@ -411,6 +431,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pyjwt" +version = "2.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/27/a3b6e5bf6ff856d2509292e95c8f57f0df7017cf5394921fc4e4ef40308a/pyjwt-2.12.1.tar.gz", hash = "sha256:c74a7a2adf861c04d002db713dd85f84beb242228e671280bf709d765b03672b", size = 102564, upload-time = "2026-03-13T19:27:37.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/7a/8dd906bd22e79e47397a61742927f6747fe93242ef86645ee9092e610244/pyjwt-2.12.1-py3-none-any.whl", hash = "sha256:28ca37c070cad8ba8cd9790cd940535d40274d22f80ab87f3ac6a713e6e8454c", size = 29726, upload-time = "2026-03-13T19:27:35.677Z" }, +] + [[package]] name = "pyyaml" version = "6.0.3" From 9770b2d083affd415f7d8ce17c0c610e10985d12 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 076/201] add change artifact --- docs/release-notes/artifacts/pr0412.yaml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0412.yaml diff --git a/docs/release-notes/artifacts/pr0412.yaml b/docs/release-notes/artifacts/pr0412.yaml new file mode 100644 index 000000000..c90f99ce4 --- /dev/null +++ b/docs/release-notes/artifacts/pr0412.yaml @@ -0,0 +1,20 @@ +version_schema: 2 + +changes: + - title: Added authentication to haproxy-route-policy REST API + author: tphan025 + type: minor + description: > + Configured Django REST Framework with JWT and session-based authentication + as default authentication classes, requiring all API endpoints to be accessed + by authenticated users. Added test_settings_authenticated.py for auth-enabled + tests, a dedicated unit-auth tox environment, and integration tests verifying + that unauthenticated requests are rejected and authenticated requests succeed. + Added djangorestframework-simplejwt as a dependency. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/412 + related_doc: + related_issue: + visibility: public + highlight: false From bb5027756d2de7aa4cdbdbf9da05d45e1b474c89 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 077/201] Add token urls --- haproxy-route-policy/haproxy_route_policy/urls.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/haproxy-route-policy/haproxy_route_policy/urls.py b/haproxy-route-policy/haproxy_route_policy/urls.py index 0fb11c0d9..711c78c61 100644 --- a/haproxy-route-policy/haproxy_route_policy/urls.py +++ b/haproxy-route-policy/haproxy_route_policy/urls.py @@ -20,10 +20,18 @@ from django.contrib import admin from django.urls import include, path +from rest_framework_simplejwt.views import ( + TokenObtainPairView, + TokenRefreshView, + TokenVerifyView, +) from policy import urls as policy_urls urlpatterns = [ path("admin/", admin.site.urls), + path("api/token/", TokenObtainPairView.as_view(), name="token_obtain_pair"), + path("api/token/refresh/", TokenRefreshView.as_view(), name="token_refresh"), + path("api/token/verify/", TokenVerifyView.as_view(), name="token_verify"), path("", include(policy_urls)), ] From f94c20d3cae9384072a187a5212a0a2da9249999 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 078/201] switch DB engine to postgres --- .../haproxy_route_policy/settings.py | 14 ++++-- haproxy-route-policy/pyproject.toml | 1 + haproxy-route-policy/uv.lock | 43 +++++++++++++++++++ 3 files changed, 55 insertions(+), 3 deletions(-) diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 0ef69592f..05b70ee77 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -16,6 +16,7 @@ from pathlib import Path import os import json +import psycopg2.extensions # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent @@ -76,9 +77,16 @@ DATABASES = { "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": BASE_DIR / "db.sqlite3", - } + "ENGINE": "django.db.backends.postgresql", + "PASSWORD": os.getenv("DJANGO_DATABASE_PASSWORD", ""), + "HOST": os.getenv("DJANGO_DATABASE_HOST", "localhost"), + "PORT": os.getenv("DJANGO_DATABASE_PORT", 5432), + "USER": os.getenv("DJANGO_DATABASE_USER", "postgres"), + "NAME": os.getenv("DJANGO_DATABASE_NAME", "postgres"), + "OPTIONS": { + "isolation_level": psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE, + }, + }, } diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index ad337e6a7..886b7f29d 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -8,6 +8,7 @@ dependencies = [ "django>=6.0.3", "djangorestframework>=3.16.1", "djangorestframework-simplejwt>=5.5.1", + "psycopg2-binary>=2.9.11", "validators>=0.35.0", "whitenoise>=6.12.0", ] diff --git a/haproxy-route-policy/uv.lock b/haproxy-route-policy/uv.lock index 45b26319a..924df5019 100644 --- a/haproxy-route-policy/uv.lock +++ b/haproxy-route-policy/uv.lock @@ -239,6 +239,7 @@ dependencies = [ { name = "django" }, { name = "djangorestframework" }, { name = "djangorestframework-simplejwt" }, + { name = "psycopg2-binary" }, { name = "validators" }, { name = "whitenoise" }, ] @@ -271,6 +272,7 @@ requires-dist = [ { name = "django", specifier = ">=6.0.3" }, { name = "djangorestframework", specifier = ">=3.16.1" }, { name = "djangorestframework-simplejwt", specifier = ">=5.5.1" }, + { name = "psycopg2-binary", specifier = ">=2.9.11" }, { name = "validators", specifier = ">=0.35.0" }, { name = "whitenoise", specifier = ">=6.12.0" }, ] @@ -422,6 +424,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, ] +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a8/a2709681b3ac11b0b1786def10006b8995125ba268c9a54bea6f5ae8bd3e/psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c", size = 3756572, upload-time = "2025-10-10T11:12:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/62/e1/c2b38d256d0dafd32713e9f31982a5b028f4a3651f446be70785f484f472/psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee", size = 3864529, upload-time = "2025-10-10T11:12:36.791Z" }, + { url = "https://files.pythonhosted.org/packages/11/32/b2ffe8f3853c181e88f0a157c5fb4e383102238d73c52ac6d93a5c8bffe6/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0", size = 4411242, upload-time = "2025-10-10T11:12:42.388Z" }, + { url = "https://files.pythonhosted.org/packages/10/04/6ca7477e6160ae258dc96f67c371157776564679aefd247b66f4661501a2/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766", size = 4468258, upload-time = "2025-10-10T11:12:48.654Z" }, + { url = "https://files.pythonhosted.org/packages/3c/7e/6a1a38f86412df101435809f225d57c1a021307dd0689f7a5e7fe83588b1/psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3", size = 4166295, upload-time = "2025-10-10T11:12:52.525Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7d/c07374c501b45f3579a9eb761cbf2604ddef3d96ad48679112c2c5aa9c25/psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f", size = 3983133, upload-time = "2025-10-30T02:55:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/82/56/993b7104cb8345ad7d4516538ccf8f0d0ac640b1ebd8c754a7b024e76878/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4", size = 3652383, upload-time = "2025-10-10T11:12:56.387Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ac/eaeb6029362fd8d454a27374d84c6866c82c33bfc24587b4face5a8e43ef/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c", size = 3298168, upload-time = "2025-10-10T11:13:00.403Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/50c3facc66bded9ada5cbc0de867499a703dc6bca6be03070b4e3b65da6c/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60", size = 3044712, upload-time = "2025-10-30T02:55:27.975Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8e/b7de019a1f562f72ada81081a12823d3c1590bedc48d7d2559410a2763fe/psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1", size = 3347549, upload-time = "2025-10-10T11:13:03.971Z" }, + { url = "https://files.pythonhosted.org/packages/80/2d/1bb683f64737bbb1f86c82b7359db1eb2be4e2c0c13b947f80efefa7d3e5/psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa", size = 2714215, upload-time = "2025-10-10T11:13:07.14Z" }, + { url = "https://files.pythonhosted.org/packages/64/12/93ef0098590cf51d9732b4f139533732565704f45bdc1ffa741b7c95fb54/psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1", size = 3756567, upload-time = "2025-10-10T11:13:11.885Z" }, + { url = "https://files.pythonhosted.org/packages/7c/a9/9d55c614a891288f15ca4b5209b09f0f01e3124056924e17b81b9fa054cc/psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f", size = 3864755, upload-time = "2025-10-10T11:13:17.727Z" }, + { url = "https://files.pythonhosted.org/packages/13/1e/98874ce72fd29cbde93209977b196a2edae03f8490d1bd8158e7f1daf3a0/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5", size = 4411646, upload-time = "2025-10-10T11:13:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/a335ce6645334fb8d758cc358810defca14a1d19ffbc8a10bd38a2328565/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8", size = 4468701, upload-time = "2025-10-10T11:13:29.266Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/c8b4f53f34e295e45709b7568bf9b9407a612ea30387d35eb9fa84f269b4/psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c", size = 4166293, upload-time = "2025-10-10T11:13:33.336Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e0/f8cc36eadd1b716ab36bb290618a3292e009867e5c97ce4aba908cb99644/psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f", size = 3983184, upload-time = "2025-10-30T02:55:32.483Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/2a8fe18a4e61cfb3417da67b6318e12691772c0696d79434184a511906dc/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747", size = 3652650, upload-time = "2025-10-10T11:13:38.181Z" }, + { url = "https://files.pythonhosted.org/packages/76/36/03801461b31b29fe58d228c24388f999fe814dfc302856e0d17f97d7c54d/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f", size = 3298663, upload-time = "2025-10-10T11:13:44.878Z" }, + { url = "https://files.pythonhosted.org/packages/97/77/21b0ea2e1a73aa5fa9222b2a6b8ba325c43c3a8d54272839c991f2345656/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b", size = 3044737, upload-time = "2025-10-30T02:55:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/67/69/f36abe5f118c1dca6d3726ceae164b9356985805480731ac6712a63f24f0/psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d", size = 3347643, upload-time = "2025-10-10T11:13:53.499Z" }, + { url = "https://files.pythonhosted.org/packages/e1/36/9c0c326fe3a4227953dfb29f5d0c8ae3b8eb8c1cd2967aa569f50cb3c61f/psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316", size = 2803913, upload-time = "2025-10-10T11:13:57.058Z" }, +] + [[package]] name = "pygments" version = "2.19.2" From 04557c57b08539b7e2c7b4e3f367d15f89c27e73 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 079/201] add change artifact --- docs/release-notes/artifacts/pr0413.yaml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0413.yaml diff --git a/docs/release-notes/artifacts/pr0413.yaml b/docs/release-notes/artifacts/pr0413.yaml new file mode 100644 index 000000000..dd9096d30 --- /dev/null +++ b/docs/release-notes/artifacts/pr0413.yaml @@ -0,0 +1,17 @@ +version_schema: 2 + +changes: + - title: Switched haproxy-route-policy database backend to PostgreSQL + author: tphan025 + type: minor + description: > + Changed the default database backend from SQLite to PostgreSQL with + environment-variable-based configuration for host, port, user, password, + and database name. Added psycopg2-binary as a project dependency. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/413 + related_doc: + related_issue: + visibility: public + highlight: false From 6e8989b5b69c04401d9b452a97d15a7918af926b Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 080/201] Add snap files --- .../haproxy_route_policy/settings.py | 10 +++- haproxy-route-policy/pyproject.toml | 1 + haproxy-route-policy/snap/hooks/configure | 52 +++++++++++++++++++ haproxy-route-policy/snap/hooks/install | 13 +++++ .../snap/scripts/bin/gunicorn-start | 31 +++++++++++ haproxy-route-policy/snap/scripts/bin/manage | 25 +++++++++ haproxy-route-policy/snap/scripts/bin/prepare | 27 ++++++++++ haproxy-route-policy/snap/snapcraft.yaml | 51 ++++++++++++++++++ haproxy-route-policy/uv.lock | 23 ++++++++ 9 files changed, 232 insertions(+), 1 deletion(-) create mode 100644 haproxy-route-policy/snap/hooks/configure create mode 100755 haproxy-route-policy/snap/hooks/install create mode 100755 haproxy-route-policy/snap/scripts/bin/gunicorn-start create mode 100755 haproxy-route-policy/snap/scripts/bin/manage create mode 100755 haproxy-route-policy/snap/scripts/bin/prepare create mode 100644 haproxy-route-policy/snap/snapcraft.yaml diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 05b70ee77..685f5412c 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -20,7 +20,15 @@ # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent -SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") +SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", "") +if SECRET_KEY == "": + # Read the secret key from a file to have it persist between + # the manage.py script calls and the gunicorn process + # (it needs to be the same for the JWT tokens). + secret_path = Path(BASE_DIR, ".secret") + if secret_path.exists(): + with open(secret_path, "r", encoding="utf-8") as secretfile: + SECRET_KEY = secretfile.read().strip() DEBUG = os.environ.get("DJANGO_DEBUG", "").lower() == "true" ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS", "[]")) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index 886b7f29d..022999062 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -8,6 +8,7 @@ dependencies = [ "django>=6.0.3", "djangorestframework>=3.16.1", "djangorestframework-simplejwt>=5.5.1", + "gunicorn>=23.0.0", "psycopg2-binary>=2.9.11", "validators>=0.35.0", "whitenoise>=6.12.0", diff --git a/haproxy-route-policy/snap/hooks/configure b/haproxy-route-policy/snap/hooks/configure new file mode 100644 index 000000000..75634aa49 --- /dev/null +++ b/haproxy-route-policy/snap/hooks/configure @@ -0,0 +1,52 @@ +#!/bin/sh + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +DJANGO_DEBUG="$(snapctl get debug)" +export DJANGO_DEBUG + +case "$DJANGO_DEBUG" in + "true") ;; + "false") ;; + *) + >&2 echo "'$DJANGO_DEBUG is not a supported value for django_debug. Possible values are true, false" + return 1 + ;; +esac + +DJANGO_LOG_LEVEL="$(snapctl get log-level)" +export DJANGO_LOG_LEVEL + +case "$DJANGO_LOG_LEVEL" in + "debug") ;; + "info") ;; + "warning") ;; + "error") ;; + "critical") ;; + "DEBUG") ;; + "INFO") ;; + "WARNING") ;; + "ERROR") ;; + "CRITICAL") ;; + *) + >&2 echo "'$DJANGO_LOG_LEVEL is not a supported value for debug. Possible values are debug, info, warning, error, critical" + return 1 + ;; +esac + +DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" +export DJANGO_ALLOWED_HOSTS +DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" +export DJANGO_DATABASE_PASSWORD +DJANGO_DATABASE_HOST="$(snapctl get database-host)" +export DJANGO_DATABASE_HOST +DJANGO_DATABASE_PORT="$(snapctl get database-port)" +export DJANGO_DATABASE_PORT +DJANGO_DATABASE_USER="$(snapctl get database-user)" +export DJANGO_DATABASE_USER +DJANGO_DATABASE_NAME="$(snapctl get database-name)" +export DJANGO_DATABASE_NAME + +snapctl stop "$SNAP_INSTANCE_NAME" +snapctl start "$SNAP_INSTANCE_NAME" diff --git a/haproxy-route-policy/snap/hooks/install b/haproxy-route-policy/snap/hooks/install new file mode 100755 index 000000000..1a849ae74 --- /dev/null +++ b/haproxy-route-policy/snap/hooks/install @@ -0,0 +1,13 @@ +#!/bin/sh + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +set -xe + +# Create some directories +mkdir -p "$SNAP_DATA/app" + +# set default configuration values +snapctl set debug='false' +snapctl set log-level='INFO' diff --git a/haproxy-route-policy/snap/scripts/bin/gunicorn-start b/haproxy-route-policy/snap/scripts/bin/gunicorn-start new file mode 100755 index 000000000..b6996ca1d --- /dev/null +++ b/haproxy-route-policy/snap/scripts/bin/gunicorn-start @@ -0,0 +1,31 @@ +#!/bin/sh + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +set -xe + +DJANGO_DEBUG="$(snapctl get debug)" +export DJANGO_DEBUG +DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" +export DJANGO_ALLOWED_HOSTS +DJANGO_LOG_LEVEL="$(snapctl get log-level)" +export DJANGO_LOG_LEVEL +DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" +export DJANGO_DATABASE_PASSWORD +DJANGO_DATABASE_HOST="$(snapctl get database-host)" +export DJANGO_DATABASE_HOST +DJANGO_DATABASE_PORT="$(snapctl get database-port)" +export DJANGO_DATABASE_PORT +DJANGO_DATABASE_USER="$(snapctl get database-user)" +export DJANGO_DATABASE_USER +DJANGO_DATABASE_NAME="$(snapctl get database-name)" +export DJANGO_DATABASE_NAME + +LOG_LEVEL="info" +if [ "$DJANGO_DEBUG" = "true" ]; then + LOG_LEVEL="debug" +fi + +exec gunicorn --chdir "$SNAP_DATA/app" --bind 0.0.0.0:8080 haproxy_route_policy.wsgi \ + --capture-output --log-level="$LOG_LEVEL" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage new file mode 100755 index 000000000..3829fcfe2 --- /dev/null +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -0,0 +1,25 @@ +#!/bin/sh + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +set -e + +DJANGO_DEBUG="$(snapctl get debug)" +export DJANGO_DEBUG +DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" +export DJANGO_ALLOWED_HOSTS +DJANGO_LOG_LEVEL="$(snapctl get log-level)" +export DJANGO_LOG_LEVEL +DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" +export DJANGO_DATABASE_PASSWORD +DJANGO_DATABASE_HOST="$(snapctl get database-host)" +export DJANGO_DATABASE_HOST +DJANGO_DATABASE_PORT="$(snapctl get database-port)" +export DJANGO_DATABASE_PORT +DJANGO_DATABASE_USER="$(snapctl get database-user)" +export DJANGO_DATABASE_USER +DJANGO_DATABASE_NAME="$(snapctl get database-name)" +export DJANGO_DATABASE_NAME + +exec uv run "$SNAP_DATA/app/manage.py" "$@" diff --git a/haproxy-route-policy/snap/scripts/bin/prepare b/haproxy-route-policy/snap/scripts/bin/prepare new file mode 100755 index 000000000..650b93ea0 --- /dev/null +++ b/haproxy-route-policy/snap/scripts/bin/prepare @@ -0,0 +1,27 @@ +#!/bin/sh + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +# The goal of this script is to prepare the snap environment +# for the Django application. + +set -xe + +# ---- +# API (django and gunicorn) +# The only thing that should be kept between refreshes is the database + +# Create the static directory for django +cp -r "$SNAP/app" "$SNAP_DATA/" +chmod -R 755 "$SNAP_DATA/app" + +# Prepare the django app +DJANGO_SECRET_KEY="$(python3 -c 'import secrets; print(secrets.token_urlsafe(50))')" +export DJANGO_SECRET_KEY +printf "%s" "$DJANGO_SECRET_KEY" > "$SNAP_DATA/app/.secret" +python3 "$SNAP_DATA/app/manage.py" collectstatic --noinput + +# Change ownership of some snap directories to allow snap_daemon to read/write +# https://snapcraft.io/docs/system-usernames +chown -R 584788:root "$SNAP_DATA/app" diff --git a/haproxy-route-policy/snap/snapcraft.yaml b/haproxy-route-policy/snap/snapcraft.yaml new file mode 100644 index 000000000..f3e782bd3 --- /dev/null +++ b/haproxy-route-policy/snap/snapcraft.yaml @@ -0,0 +1,51 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +name: haproxy-route-policy +base: core24 +version: "0.1" +license: Apache-2.0 +summary: HAProxy Route Policy API +description: | + This snap bundles the HAProxy Route Policy Django application to be included in the haproxy-route-policy-operator. +confinement: strict +platforms: + amd64: + build-on: [amd64] + build-for: [amd64] + +system-usernames: + _daemon_: shared + +parts: + haproxy-route-policy: + plugin: uv + source: . + build-snaps: + - astral-uv + stage-packages: + - gunicorn + stage-snaps: + - astral-uv + + scripts: + plugin: dump + source: ./snap/scripts + override-prime: | + craftctl default + chmod -R +rx $CRAFT_PRIME/bin + +apps: + gunicorn: + command: bin/gunicorn-start + daemon: simple + restart-condition: always + plugs: + - network + - network-bind + + manage: + command: bin/manage + plugs: + - network + - network-bind diff --git a/haproxy-route-policy/uv.lock b/haproxy-route-policy/uv.lock index 924df5019..932e3511b 100644 --- a/haproxy-route-policy/uv.lock +++ b/haproxy-route-policy/uv.lock @@ -231,6 +231,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/5f/d908ce938356b209d4d27a7fb159ab9100b8814396a69c0204bb66e38703/djangorestframework_types-0.9.0-py3-none-any.whl", hash = "sha256:5e4258fe43774d0a3d018780170bd702bf615407fe244453ea5ec6e6676b98c4", size = 54947, upload-time = "2024-10-10T00:42:02.311Z" }, ] +[[package]] +name = "gunicorn" +version = "25.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/13/dd3f8e40ea3ee907a6cbf3d1f1f81afcc3ecd0087d313baabfe95372f15c/gunicorn-25.2.0.tar.gz", hash = "sha256:10bd7adb36d44945d97d0a1fdf9a0fb086ae9c7b39e56b4dece8555a6bf4a09c", size = 632709, upload-time = "2026-03-24T22:49:54.433Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/53/fb024445837e02cd5cf989cf349bfac6f3f433c05184ea5d49c8ade751c6/gunicorn-25.2.0-py3-none-any.whl", hash = "sha256:88f5b444d0055bf298435384af7294f325e2273fd37ba9f9ff7b98e0a1e5dfdc", size = 211659, upload-time = "2026-03-24T22:49:52.528Z" }, +] + [[package]] name = "haproxy-route-policy" version = "0.1.0" @@ -239,6 +251,7 @@ dependencies = [ { name = "django" }, { name = "djangorestframework" }, { name = "djangorestframework-simplejwt" }, + { name = "gunicorn" }, { name = "psycopg2-binary" }, { name = "validators" }, { name = "whitenoise" }, @@ -272,6 +285,7 @@ requires-dist = [ { name = "django", specifier = ">=6.0.3" }, { name = "djangorestframework", specifier = ">=3.16.1" }, { name = "djangorestframework-simplejwt", specifier = ">=5.5.1" }, + { name = "gunicorn", specifier = ">=23.0.0" }, { name = "psycopg2-binary", specifier = ">=2.9.11" }, { name = "validators", specifier = ">=0.35.0" }, { name = "whitenoise", specifier = ">=6.12.0" }, @@ -415,6 +429,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + [[package]] name = "pathspec" version = "1.0.4" From d48faf01d08e12a90de50348664e769b801eca9d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 081/201] fix build issue --- haproxy-route-policy/pyproject.toml | 7 +++++++ haproxy-route-policy/snap/scripts/bin/gunicorn-start | 2 +- haproxy-route-policy/snap/scripts/bin/manage | 2 +- haproxy-route-policy/snap/scripts/bin/prepare | 10 +++++----- haproxy-route-policy/snap/snapcraft.yaml | 2 -- haproxy-route-policy/uv.lock | 2 +- 6 files changed, 15 insertions(+), 10 deletions(-) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index 022999062..210730f1c 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -14,6 +14,13 @@ dependencies = [ "whitenoise>=6.12.0", ] +[build-system] +requires = ["uv_build>=0.7.2,<1"] +build-backend = "uv_build" + +[tool.uv.build-backend] +module-root = "" + [dependency-groups] auth = [ "djangorestframework-simplejwt>=5.5.1", diff --git a/haproxy-route-policy/snap/scripts/bin/gunicorn-start b/haproxy-route-policy/snap/scripts/bin/gunicorn-start index b6996ca1d..1faa7184c 100755 --- a/haproxy-route-policy/snap/scripts/bin/gunicorn-start +++ b/haproxy-route-policy/snap/scripts/bin/gunicorn-start @@ -27,5 +27,5 @@ if [ "$DJANGO_DEBUG" = "true" ]; then LOG_LEVEL="debug" fi -exec gunicorn --chdir "$SNAP_DATA/app" --bind 0.0.0.0:8080 haproxy_route_policy.wsgi \ +exec gunicorn --bind 0.0.0.0:8080 haproxy_route_policy.wsgi \ --capture-output --log-level="$LOG_LEVEL" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index 3829fcfe2..8a7ef2d65 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -22,4 +22,4 @@ export DJANGO_DATABASE_USER DJANGO_DATABASE_NAME="$(snapctl get database-name)" export DJANGO_DATABASE_NAME -exec uv run "$SNAP_DATA/app/manage.py" "$@" +exec python3 -m django "$@" --settings=haproxy_route_policy.settings diff --git a/haproxy-route-policy/snap/scripts/bin/prepare b/haproxy-route-policy/snap/scripts/bin/prepare index 650b93ea0..a05b5aa5b 100755 --- a/haproxy-route-policy/snap/scripts/bin/prepare +++ b/haproxy-route-policy/snap/scripts/bin/prepare @@ -10,17 +10,17 @@ set -xe # ---- # API (django and gunicorn) -# The only thing that should be kept between refreshes is the database -# Create the static directory for django -cp -r "$SNAP/app" "$SNAP_DATA/" -chmod -R 755 "$SNAP_DATA/app" +# Create the writable app directory for runtime data +mkdir -p "$SNAP_DATA/app" # Prepare the django app DJANGO_SECRET_KEY="$(python3 -c 'import secrets; print(secrets.token_urlsafe(50))')" export DJANGO_SECRET_KEY printf "%s" "$DJANGO_SECRET_KEY" > "$SNAP_DATA/app/.secret" -python3 "$SNAP_DATA/app/manage.py" collectstatic --noinput + +export DJANGO_STATIC_ROOT="$SNAP_DATA/app/static" +python3 -m django collectstatic --noinput --settings=haproxy_route_policy.settings # Change ownership of some snap directories to allow snap_daemon to read/write # https://snapcraft.io/docs/system-usernames diff --git a/haproxy-route-policy/snap/snapcraft.yaml b/haproxy-route-policy/snap/snapcraft.yaml index f3e782bd3..007fe1258 100644 --- a/haproxy-route-policy/snap/snapcraft.yaml +++ b/haproxy-route-policy/snap/snapcraft.yaml @@ -23,8 +23,6 @@ parts: source: . build-snaps: - astral-uv - stage-packages: - - gunicorn stage-snaps: - astral-uv diff --git a/haproxy-route-policy/uv.lock b/haproxy-route-policy/uv.lock index 932e3511b..c46a486cb 100644 --- a/haproxy-route-policy/uv.lock +++ b/haproxy-route-policy/uv.lock @@ -246,7 +246,7 @@ wheels = [ [[package]] name = "haproxy-route-policy" version = "0.1.0" -source = { virtual = "." } +source = { editable = "." } dependencies = [ { name = "django" }, { name = "djangorestframework" }, From 786efba7acade954b956ecc53f5899f6945253b3 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 082/201] update module-name --- haproxy-route-policy/pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/haproxy-route-policy/pyproject.toml b/haproxy-route-policy/pyproject.toml index 210730f1c..e076f2a5e 100644 --- a/haproxy-route-policy/pyproject.toml +++ b/haproxy-route-policy/pyproject.toml @@ -20,6 +20,7 @@ build-backend = "uv_build" [tool.uv.build-backend] module-root = "" +module-name = ["haproxy_route_policy", "policy"] [dependency-groups] auth = [ From d24fab2e26b2c7e86681d46ee57550c433b1900e Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 083/201] set secret key as default, drop fetching from file --- .../haproxy_route_policy/settings.py | 10 +------ haproxy-route-policy/snap/hooks/install | 6 ++--- .../snap/scripts/bin/gunicorn-start | 2 ++ haproxy-route-policy/snap/scripts/bin/manage | 5 ++++ haproxy-route-policy/snap/scripts/bin/prepare | 27 ------------------- 5 files changed, 11 insertions(+), 39 deletions(-) delete mode 100755 haproxy-route-policy/snap/scripts/bin/prepare diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 685f5412c..05b70ee77 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -20,15 +20,7 @@ # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent -SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", "") -if SECRET_KEY == "": - # Read the secret key from a file to have it persist between - # the manage.py script calls and the gunicorn process - # (it needs to be the same for the JWT tokens). - secret_path = Path(BASE_DIR, ".secret") - if secret_path.exists(): - with open(secret_path, "r", encoding="utf-8") as secretfile: - SECRET_KEY = secretfile.read().strip() +SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") DEBUG = os.environ.get("DJANGO_DEBUG", "").lower() == "true" ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS", "[]")) diff --git a/haproxy-route-policy/snap/hooks/install b/haproxy-route-policy/snap/hooks/install index 1a849ae74..94bcbfbef 100755 --- a/haproxy-route-policy/snap/hooks/install +++ b/haproxy-route-policy/snap/hooks/install @@ -5,9 +5,9 @@ set -xe -# Create some directories -mkdir -p "$SNAP_DATA/app" - # set default configuration values snapctl set debug='false' snapctl set log-level='INFO' +snapctl set allowed-hosts='["localhost", "127.0.0.1", "0.0.0.0"]' +SECRET_KEY=$(python3 -c 'import secrets; print(secrets.token_urlsafe(50))') +snapctl set secret-key="$SECRET_KEY" diff --git a/haproxy-route-policy/snap/scripts/bin/gunicorn-start b/haproxy-route-policy/snap/scripts/bin/gunicorn-start index 1faa7184c..19de8ea5c 100755 --- a/haproxy-route-policy/snap/scripts/bin/gunicorn-start +++ b/haproxy-route-policy/snap/scripts/bin/gunicorn-start @@ -5,6 +5,8 @@ set -xe +DJANGO_SECRET_KEY="$(snapctl get secret-key)" +export DJANGO_SECRET_KEY DJANGO_DEBUG="$(snapctl get debug)" export DJANGO_DEBUG DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index 8a7ef2d65..fd5853fd6 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -5,9 +5,14 @@ set -e +DJANGO_SECRET_KEY="$(snapctl get secret-key)" +export DJANGO_SECRET_KEY DJANGO_DEBUG="$(snapctl get debug)" export DJANGO_DEBUG DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" +if [ -z "$DJANGO_ALLOWED_HOSTS" ]; then + DJANGO_ALLOWED_HOSTS="[]" +fi export DJANGO_ALLOWED_HOSTS DJANGO_LOG_LEVEL="$(snapctl get log-level)" export DJANGO_LOG_LEVEL diff --git a/haproxy-route-policy/snap/scripts/bin/prepare b/haproxy-route-policy/snap/scripts/bin/prepare deleted file mode 100755 index a05b5aa5b..000000000 --- a/haproxy-route-policy/snap/scripts/bin/prepare +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh - -# Copyright 2026 Canonical Ltd. -# See LICENSE file for licensing details. - -# The goal of this script is to prepare the snap environment -# for the Django application. - -set -xe - -# ---- -# API (django and gunicorn) - -# Create the writable app directory for runtime data -mkdir -p "$SNAP_DATA/app" - -# Prepare the django app -DJANGO_SECRET_KEY="$(python3 -c 'import secrets; print(secrets.token_urlsafe(50))')" -export DJANGO_SECRET_KEY -printf "%s" "$DJANGO_SECRET_KEY" > "$SNAP_DATA/app/.secret" - -export DJANGO_STATIC_ROOT="$SNAP_DATA/app/static" -python3 -m django collectstatic --noinput --settings=haproxy_route_policy.settings - -# Change ownership of some snap directories to allow snap_daemon to read/write -# https://snapcraft.io/docs/system-usernames -chown -R 584788:root "$SNAP_DATA/app" From f2c6f0aeabc254af7116641e3e93cd3a80f6c916 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 084/201] update readme, fix secret key generation --- haproxy-route-policy/README.md | 24 ++++++++++++++++++++ haproxy-route-policy/snap/hooks/install | 4 ++-- haproxy-route-policy/snap/scripts/bin/manage | 2 +- haproxy-route-policy/snap/snapcraft.yaml | 5 ++++ 4 files changed, 32 insertions(+), 3 deletions(-) diff --git a/haproxy-route-policy/README.md b/haproxy-route-policy/README.md index e69de29bb..f97e7ced4 100644 --- a/haproxy-route-policy/README.md +++ b/haproxy-route-policy/README.md @@ -0,0 +1,24 @@ +#### Basic setup + +Start a PostgreSQL database: + +``` +docker run -d --name postgres -p 127.0.0.1:5432:5432 -e POSTGRES_PASSWORD=postgres -e POSTGRES_USERNAME=postgres postgres:latest +``` + +Basic snap configurations: + +``` +sudo snap set haproxy-route-policy database-password=postgres +sudo snap set haproxy-route-policy database-host=localhost +sudo snap set haproxy-route-policy database-port=5432 +sudo snap set haproxy-route-policy database-user=postgres +sudo snap set haproxy-route-policy database-name=postgres +``` + +## Learn more +* [Read more](https://charmhub.io/haproxy-operator/docs) + +## Project and community +* [Issues](https://github.com/canonical/haproxy-operator/issues) +* [Matrix](https://matrix.to/#/#charmhub-charmdev:ubuntu.com) diff --git a/haproxy-route-policy/snap/hooks/install b/haproxy-route-policy/snap/hooks/install index 94bcbfbef..81c5db0cf 100755 --- a/haproxy-route-policy/snap/hooks/install +++ b/haproxy-route-policy/snap/hooks/install @@ -8,6 +8,6 @@ set -xe # set default configuration values snapctl set debug='false' snapctl set log-level='INFO' -snapctl set allowed-hosts='["localhost", "127.0.0.1", "0.0.0.0"]' -SECRET_KEY=$(python3 -c 'import secrets; print(secrets.token_urlsafe(50))') +snapctl set allowed-hosts='["*"]' +SECRET_KEY="$(tr -dc a-zA-Z0-9 < /dev/urandom | head -c 50)" snapctl set secret-key="$SECRET_KEY" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index fd5853fd6..f25438206 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -27,4 +27,4 @@ export DJANGO_DATABASE_USER DJANGO_DATABASE_NAME="$(snapctl get database-name)" export DJANGO_DATABASE_NAME -exec python3 -m django "$@" --settings=haproxy_route_policy.settings +exec $SNAP/bin/uv run $SNAP/app/manage.py "$@" --settings=haproxy_route_policy.settings diff --git a/haproxy-route-policy/snap/snapcraft.yaml b/haproxy-route-policy/snap/snapcraft.yaml index 007fe1258..813eac1fb 100644 --- a/haproxy-route-policy/snap/snapcraft.yaml +++ b/haproxy-route-policy/snap/snapcraft.yaml @@ -25,6 +25,11 @@ parts: - astral-uv stage-snaps: - astral-uv + override-build: | + # Also copy the source code to the install directory for the manage.py script + cp -r . $SNAPCRAFT_PART_INSTALL/app + chown -R 584792:584792 $SNAPCRAFT_PART_INSTALL/app + craftctl default scripts: plugin: dump From e4c591a0c0ff61f752e2fd31ed1263bf416dd950 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 085/201] add change artifact --- docs/release-notes/artifacts/pr0415.yaml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0415.yaml diff --git a/docs/release-notes/artifacts/pr0415.yaml b/docs/release-notes/artifacts/pr0415.yaml new file mode 100644 index 000000000..8bb7166c7 --- /dev/null +++ b/docs/release-notes/artifacts/pr0415.yaml @@ -0,0 +1,20 @@ +version_schema: 2 + +changes: + - title: Added snap packaging and runtime scripts for haproxy-route-policy + author: tphan025 + type: minor + description: > + Added snap packaging for the haproxy-route-policy app, including + `snap/snapcraft.yaml`, install/configure hooks, and helper scripts to run + Gunicorn and Django management commands with snap configuration values. + Added Gunicorn as a dependency and configured uv build metadata in + `pyproject.toml` for packaging. Updated the app README with a basic setup + flow for PostgreSQL and snap configuration. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/415 + related_doc: + related_issue: + visibility: public + highlight: false From 9da89700318194043d9c76ad83dc6c190496e842 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 086/201] add build snap workflow --- .github/workflows/test.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 23e5cc81d..551ace356 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -24,3 +24,17 @@ jobs: self-hosted-runner-image: "noble" working-directory: ${{ matrix.charm.working-directory }} with-uv: true + build-snap-haproxy-route-policy: + name: Build Snap + runs-on: ubuntu-latest + steps: + - name: Build Snap + id: snapcraft + uses: snapcore/action-build@v1 + with: + path: ./haproxy-route-policy + - name: Upload Snap Artifact + uses: actions/upload-artifact@v3 + with: + name: snap + path: ${{ steps.snapcraft.outputs.snap }} From eed68e3e06af557e4f6346eaacbaa011c598e939 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 087/201] use upload-artifact v4 --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 551ace356..66fba5126 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -34,7 +34,7 @@ jobs: with: path: ./haproxy-route-policy - name: Upload Snap Artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: snap path: ${{ steps.snapcraft.outputs.snap }} From d04a0f56c12851fe1951affee7b2483061cca848 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 088/201] update path --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 66fba5126..978a7fd13 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -32,7 +32,7 @@ jobs: id: snapcraft uses: snapcore/action-build@v1 with: - path: ./haproxy-route-policy + path: haproxy-route-policy - name: Upload Snap Artifact uses: actions/upload-artifact@v4 with: From acb18014980f628f7b639aa69901ef90dbbbf8d7 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:29 +0200 Subject: [PATCH 089/201] add checkout step --- .github/workflows/test.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 978a7fd13..b106d68d7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -28,13 +28,14 @@ jobs: name: Build Snap runs-on: ubuntu-latest steps: + - uses: actions/checkout@v6 - name: Build Snap id: snapcraft uses: snapcore/action-build@v1 with: path: haproxy-route-policy - name: Upload Snap Artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: snap path: ${{ steps.snapcraft.outputs.snap }} From 8541e19330240ef0fd9e019b0cdc973e3a97d4cd Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 090/201] change working dir for build action --- .github/workflows/test.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b106d68d7..d3e8280c1 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -32,8 +32,6 @@ jobs: - name: Build Snap id: snapcraft uses: snapcore/action-build@v1 - with: - path: haproxy-route-policy - name: Upload Snap Artifact uses: actions/upload-artifact@v5 with: From ffa05144baada07b0db25a4ea7ad0b7695463821 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 091/201] sparse checkout the policy directory --- .github/workflows/test.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index d3e8280c1..ad6bcf2bd 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -29,6 +29,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 + with: + sparse-checkout: haproxy-route-policy - name: Build Snap id: snapcraft uses: snapcore/action-build@v1 From 2c4c6a125244c080d610aa6cdb9c5bf3ab81ef38 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 092/201] debug --- .github/workflows/test.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index ad6bcf2bd..0eba2ab41 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -31,6 +31,9 @@ jobs: - uses: actions/checkout@v6 with: sparse-checkout: haproxy-route-policy + - run: | + pwd + ls -la - name: Build Snap id: snapcraft uses: snapcore/action-build@v1 From 04852761a239163146e23f41ba59bc2d9404b3e3 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 093/201] debug path --- .github/workflows/test.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 0eba2ab41..8c6ff04f3 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -34,9 +34,12 @@ jobs: - run: | pwd ls -la + ls -la haproxy-route-policy - name: Build Snap id: snapcraft uses: snapcore/action-build@v1 + with: + path: haproxy-route-policy - name: Upload Snap Artifact uses: actions/upload-artifact@v5 with: From e583aec110c8776678e06bbe690a8be4a614f9a9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 094/201] remove debug --- .github/workflows/test.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 8c6ff04f3..fcc1406d7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -31,10 +31,6 @@ jobs: - uses: actions/checkout@v6 with: sparse-checkout: haproxy-route-policy - - run: | - pwd - ls -la - ls -la haproxy-route-policy - name: Build Snap id: snapcraft uses: snapcore/action-build@v1 From c142c200163f88a01c0287cd81a9aee2e3943ea6 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 095/201] update scripts to guard against empty DB config values and update allowed-hosts to be more restrictive --- haproxy-route-policy/snap/hooks/configure | 5 +++++ haproxy-route-policy/snap/hooks/install | 2 +- haproxy-route-policy/snap/scripts/bin/gunicorn-start | 5 +++++ haproxy-route-policy/snap/scripts/bin/manage | 5 +++++ 4 files changed, 16 insertions(+), 1 deletion(-) diff --git a/haproxy-route-policy/snap/hooks/configure b/haproxy-route-policy/snap/hooks/configure index 75634aa49..ccc0f6441 100644 --- a/haproxy-route-policy/snap/hooks/configure +++ b/haproxy-route-policy/snap/hooks/configure @@ -48,5 +48,10 @@ export DJANGO_DATABASE_USER DJANGO_DATABASE_NAME="$(snapctl get database-name)" export DJANGO_DATABASE_NAME +if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJANGO_DATABASE_USER" ] || [ -z "$DJANGO_DATABASE_PASSWORD" ] || [ -z "$DJANGO_DATABASE_NAME" ]; then + >&2 echo "One or more database configuration values are missing. Please ensure database-host, database-port, database-user, database-password, and database-name are all set." + return 1 +fi + snapctl stop "$SNAP_INSTANCE_NAME" snapctl start "$SNAP_INSTANCE_NAME" diff --git a/haproxy-route-policy/snap/hooks/install b/haproxy-route-policy/snap/hooks/install index 81c5db0cf..60cc60e5a 100755 --- a/haproxy-route-policy/snap/hooks/install +++ b/haproxy-route-policy/snap/hooks/install @@ -8,6 +8,6 @@ set -xe # set default configuration values snapctl set debug='false' snapctl set log-level='INFO' -snapctl set allowed-hosts='["*"]' +snapctl set allowed-hosts='["localhost", "127.0.0.1"]' SECRET_KEY="$(tr -dc a-zA-Z0-9 < /dev/urandom | head -c 50)" snapctl set secret-key="$SECRET_KEY" diff --git a/haproxy-route-policy/snap/scripts/bin/gunicorn-start b/haproxy-route-policy/snap/scripts/bin/gunicorn-start index 19de8ea5c..e42d3d2cd 100755 --- a/haproxy-route-policy/snap/scripts/bin/gunicorn-start +++ b/haproxy-route-policy/snap/scripts/bin/gunicorn-start @@ -29,5 +29,10 @@ if [ "$DJANGO_DEBUG" = "true" ]; then LOG_LEVEL="debug" fi +if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJANGO_DATABASE_USER" ] || [ -z "$DJANGO_DATABASE_PASSWORD" ] || [ -z "$DJANGO_DATABASE_NAME" ]; then + >&2 echo "One or more database configuration values are missing. Please ensure database-host, database-port, database-user, database-password, and database-name are all set." + return 1 +fi + exec gunicorn --bind 0.0.0.0:8080 haproxy_route_policy.wsgi \ --capture-output --log-level="$LOG_LEVEL" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index f25438206..f051b38c8 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -27,4 +27,9 @@ export DJANGO_DATABASE_USER DJANGO_DATABASE_NAME="$(snapctl get database-name)" export DJANGO_DATABASE_NAME +if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJANGO_DATABASE_USER" ] || [ -z "$DJANGO_DATABASE_PASSWORD" ] || [ -z "$DJANGO_DATABASE_NAME" ]; then + >&2 echo "One or more database configuration values are missing. Please ensure database-host, database-port, database-user, database-password, and database-name are all set." + return 1 +fi + exec $SNAP/bin/uv run $SNAP/app/manage.py "$@" --settings=haproxy_route_policy.settings From 46210081fff56b48ee019264846656f506e7ef67 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 096/201] update docs link --- haproxy-route-policy/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/README.md b/haproxy-route-policy/README.md index f97e7ced4..539f4ab85 100644 --- a/haproxy-route-policy/README.md +++ b/haproxy-route-policy/README.md @@ -17,7 +17,7 @@ sudo snap set haproxy-route-policy database-name=postgres ``` ## Learn more -* [Read more](https://charmhub.io/haproxy-operator/docs) +* [Read more](https://documentation.ubuntu.com/haproxy-charm/latest/) ## Project and community * [Issues](https://github.com/canonical/haproxy-operator/issues) From d1476c5dcd315d0ffe785001693c54ebd0db971b Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 097/201] fix(deps): update all non-major dependencies (#381) * fix(deps): update all non-major dependencies * Test a fix for the RTD workflows * Update Python version to 3.12 * Revert change to uses key --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Erin Conley Co-authored-by: swetha1654 --- .github/workflows/docs_rtd.yaml | 1 + docs/requirements.txt | 12 ++++---- .../pyproject.toml | 2 +- haproxy-ddos-protection-configurator/uv.lock | 14 ++++----- haproxy-operator/pyproject.toml | 6 ++-- haproxy-operator/uv.lock | 30 +++++++++---------- haproxy-spoe-auth-operator/pyproject.toml | 2 +- haproxy-spoe-auth-operator/uv.lock | 14 ++++----- 8 files changed, 41 insertions(+), 40 deletions(-) diff --git a/.github/workflows/docs_rtd.yaml b/.github/workflows/docs_rtd.yaml index 07a1acb9c..99e5e1e46 100644 --- a/.github/workflows/docs_rtd.yaml +++ b/.github/workflows/docs_rtd.yaml @@ -7,4 +7,5 @@ jobs: secrets: inherit with: enable-sphinx-python-dependency-build-checks: false + python-version: '3.12' diff --git a/docs/requirements.txt b/docs/requirements.txt index 4ea51cd9c..4ec4c8b30 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,18 +1,18 @@ # Canonical theme (still needed for Furo theme and custom templates) -canonical-sphinx==0.5.2 +canonical-sphinx==0.6.0 # Extensions previously auto-loaded by canonical-sphinx myst-parser==4.0.1 sphinx-autobuild==2024.10.3 -sphinx-design==0.6.1 +sphinx-design==0.7.0 sphinx-notfound-page==1.1.0 sphinx-reredirects==0.1.6 -sphinx-tabs==3.4.7 +sphinx-tabs==3.5.0 sphinxcontrib-jquery==4.1 sphinxext-opengraph==0.13.0 # Extra extensions, previously bundled as canonical-sphinx-extensions -sphinx-config-options==0.1.0 +sphinx-config-options==0.1.1 sphinx-contributor-listing==0.1.0 sphinx-filtered-toctree==0.1.0 sphinx-related-links==0.1.2 @@ -23,7 +23,7 @@ sphinx-youtube-links==0.1.0 # Other dependencies packaging==26.0 -sphinxcontrib-svg2pdfconverter[CairoSVG]==2.0.0 +sphinxcontrib-svg2pdfconverter[CairoSVG]==2.1.0 sphinx-last-updated-by-git==0.3.8 sphinx-sitemap==2.9.0 @@ -32,4 +32,4 @@ rst2html==2020.7.4 vale==3.13.0.0 # Additional extensions -sphinxcontrib-mermaid==2.0.0 +sphinxcontrib-mermaid==2.0.1 diff --git a/haproxy-ddos-protection-configurator/pyproject.toml b/haproxy-ddos-protection-configurator/pyproject.toml index e76c8a304..ebdcd0e65 100644 --- a/haproxy-ddos-protection-configurator/pyproject.toml +++ b/haproxy-ddos-protection-configurator/pyproject.toml @@ -14,7 +14,7 @@ classifiers = [ "Programming Language :: Python :: 3.14", ] dependencies = [ - "ops==3.5.2", + "ops==3.6.0", "pydantic==2.12.5", ] diff --git a/haproxy-ddos-protection-configurator/uv.lock b/haproxy-ddos-protection-configurator/uv.lock index 206e5cb50..3568a315b 100644 --- a/haproxy-ddos-protection-configurator/uv.lock +++ b/haproxy-ddos-protection-configurator/uv.lock @@ -582,7 +582,7 @@ unit = [ [package.metadata] requires-dist = [ - { name = "ops", specifier = "==3.5.2" }, + { name = "ops", specifier = "==3.6.0" }, { name = "pydantic", specifier = "==2.12.5" }, ] @@ -1101,16 +1101,16 @@ wheels = [ [[package]] name = "ops" -version = "3.5.2" +version = "3.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "pyyaml" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/05/72/50bdb29831b8ed92034e9657fec89301d8df3aa8da3da1d37ecbdf1baab6/ops-3.5.2.tar.gz", hash = "sha256:849c9ed85eadf265b8a927d5e857cd112221dd71b35e4b13329ccb938c3afd18", size = 578181, upload-time = "2026-02-11T01:49:48.345Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/bb/79b7efdb1243cbad11b6568c51ba4fb7358cd2c4d13bfd971a77c0aa7440/ops-3.6.0.tar.gz", hash = "sha256:a1c3361049c66759840a436143b07c74c2a46dcc44cbfd1177a9051f849c7971", size = 579236, upload-time = "2026-02-26T04:19:12.689Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/5c/84b41a67c2dc904f92f424e981eb65641ce095936fb9e6d7b4a315072d1a/ops-3.5.2-py3-none-any.whl", hash = "sha256:c715128a51ddcdf0fff463428b0f56a93e5963187e599b66594b4fc74458781b", size = 211688, upload-time = "2026-02-11T01:49:43.935Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b6/d7daab4f841566d3cb0402d3463f7c1a00626724d6d7c02d7bf934ae6c86/ops-3.6.0-py3-none-any.whl", hash = "sha256:341c6688684446cc4b42860738898683feb271175bb9c4775ae68c81e4e0976a", size = 211856, upload-time = "2026-02-26T04:19:08.012Z" }, ] [package.optional-dependencies] @@ -1120,16 +1120,16 @@ testing = [ [[package]] name = "ops-scenario" -version = "8.5.2" +version = "8.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ops" }, { name = "pyyaml" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/27/a999aa877a34fc1b2c07b0f51cb1dc58a89e23bcaf4f626e28bec39825fd/ops_scenario-8.5.2.tar.gz", hash = "sha256:ebcdc4f8837f9a1cd42624f49d9d8b2502ebeeedad552516225b3420f989c369", size = 71693, upload-time = "2026-02-11T01:49:49.59Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/c8/15d9f91eafa46d1dfa7f580be3274c22399f941724b74e274334de9468bb/ops_scenario-8.6.0.tar.gz", hash = "sha256:5a40a91fd5e9b6c8249933944dfc6e807ad2ddbd36a68c800746b9bb8a0eabfb", size = 71728, upload-time = "2026-02-26T04:19:15.34Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/b1/57816b48087fa391d0b113e067ca80fdd36a2103c57cf9cf28fe5a82f52e/ops_scenario-8.5.2-py3-none-any.whl", hash = "sha256:79125d82ca753394d9d9e4a53c55931d3d0114421c1b745f5611cb5827d37012", size = 64241, upload-time = "2026-02-11T01:49:45.753Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d2/fb3176805339d3aa95b9d6e43478d0e34355c6c46f27723249f46bb8d19d/ops_scenario-8.6.0-py3-none-any.whl", hash = "sha256:469490a042dc45eca24eef7aa1b9214704d97d67503ad8465414ab68dc989d30", size = 64241, upload-time = "2026-02-26T04:19:09.579Z" }, ] [[package]] diff --git a/haproxy-operator/pyproject.toml b/haproxy-operator/pyproject.toml index b989cdecb..b1343c939 100644 --- a/haproxy-operator/pyproject.toml +++ b/haproxy-operator/pyproject.toml @@ -14,12 +14,12 @@ classifiers = [ "Programming Language :: Python :: 3.14", ] dependencies = [ - "cosl==1.4.0", + "cosl==1.6.1", "cryptography==46.0.5", "interface-hacluster @ git+https://github.com/charmed-kubernetes/charm-interface-hacluster@1.32+ck2", "jsonschema==4.26.0", - "opentelemetry-api==1.39.1", - "ops==3.5.2", + "opentelemetry-api==1.40.0", + "ops==3.6.0", "pydantic==2.12.5", "validators>=0.35.0", ] diff --git a/haproxy-operator/uv.lock b/haproxy-operator/uv.lock index 5b838d65b..342591401 100644 --- a/haproxy-operator/uv.lock +++ b/haproxy-operator/uv.lock @@ -339,7 +339,7 @@ wheels = [ [[package]] name = "cosl" -version = "1.4.0" +version = "1.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ops" }, @@ -348,9 +348,9 @@ dependencies = [ { name = "tenacity" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/de/c41b6308ce2a6a1523fe1d5cebb831ad779e55008f8d8c0c724fccc4b407/cosl-1.4.0.tar.gz", hash = "sha256:eb6ebf682f76eec24e3c9759fb6fe5185660fcf7bb3dd8adc42e5a74816c8615", size = 46191, upload-time = "2025-11-25T17:16:01.105Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/98/1a1f8aa7965ede9abfeb649b83375baf4e2f523778f90b841281cbe3603a/cosl-1.6.1.tar.gz", hash = "sha256:f96a6a978dfdee4a3b460cc48fa18514663bbc1c3a4f323315e3dbe3e6a2a596", size = 149512, upload-time = "2026-03-09T21:44:46.744Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/9b/716ceb6021530b9cdbbfd681b7f296b660cdc763c365283e82581a71c299/cosl-1.4.0-py3-none-any.whl", hash = "sha256:410042805b17876c19d405ff5bf5c2461a84a7bff389ce3ad928f44e8c09b882", size = 36649, upload-time = "2025-11-25T17:16:00.098Z" }, + { url = "https://files.pythonhosted.org/packages/f0/8f/3ca0f470fbc7b26ed33b5fe5815e38b6a628b8bb4df961924cae38755c46/cosl-1.6.1-py3-none-any.whl", hash = "sha256:12db85a81317c5b056171642098be91c09e78e04875ed1262b99681dea43b533", size = 37800, upload-time = "2026-03-09T21:44:45.373Z" }, ] [[package]] @@ -710,12 +710,12 @@ unit = [ [package.metadata] requires-dist = [ - { name = "cosl", specifier = "==1.4.0" }, + { name = "cosl", specifier = "==1.6.1" }, { name = "cryptography", specifier = "==46.0.5" }, { name = "interface-hacluster", git = "https://github.com/charmed-kubernetes/charm-interface-hacluster?rev=1.32%2Bck2" }, { name = "jsonschema", specifier = "==4.26.0" }, - { name = "opentelemetry-api", specifier = "==1.39.1" }, - { name = "ops", specifier = "==3.5.2" }, + { name = "opentelemetry-api", specifier = "==1.40.0" }, + { name = "ops", specifier = "==3.6.0" }, { name = "pydantic", specifier = "==2.12.5" }, { name = "validators", specifier = ">=0.35.0" }, ] @@ -1217,29 +1217,29 @@ wheels = [ [[package]] name = "opentelemetry-api" -version = "1.39.1" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "importlib-metadata" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, ] [[package]] name = "ops" -version = "3.5.2" +version = "3.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "pyyaml" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/05/72/50bdb29831b8ed92034e9657fec89301d8df3aa8da3da1d37ecbdf1baab6/ops-3.5.2.tar.gz", hash = "sha256:849c9ed85eadf265b8a927d5e857cd112221dd71b35e4b13329ccb938c3afd18", size = 578181, upload-time = "2026-02-11T01:49:48.345Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/bb/79b7efdb1243cbad11b6568c51ba4fb7358cd2c4d13bfd971a77c0aa7440/ops-3.6.0.tar.gz", hash = "sha256:a1c3361049c66759840a436143b07c74c2a46dcc44cbfd1177a9051f849c7971", size = 579236, upload-time = "2026-02-26T04:19:12.689Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/5c/84b41a67c2dc904f92f424e981eb65641ce095936fb9e6d7b4a315072d1a/ops-3.5.2-py3-none-any.whl", hash = "sha256:c715128a51ddcdf0fff463428b0f56a93e5963187e599b66594b4fc74458781b", size = 211688, upload-time = "2026-02-11T01:49:43.935Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b6/d7daab4f841566d3cb0402d3463f7c1a00626724d6d7c02d7bf934ae6c86/ops-3.6.0-py3-none-any.whl", hash = "sha256:341c6688684446cc4b42860738898683feb271175bb9c4775ae68c81e4e0976a", size = 211856, upload-time = "2026-02-26T04:19:08.012Z" }, ] [package.optional-dependencies] @@ -1249,16 +1249,16 @@ testing = [ [[package]] name = "ops-scenario" -version = "8.5.2" +version = "8.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ops" }, { name = "pyyaml" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/27/a999aa877a34fc1b2c07b0f51cb1dc58a89e23bcaf4f626e28bec39825fd/ops_scenario-8.5.2.tar.gz", hash = "sha256:ebcdc4f8837f9a1cd42624f49d9d8b2502ebeeedad552516225b3420f989c369", size = 71693, upload-time = "2026-02-11T01:49:49.59Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/c8/15d9f91eafa46d1dfa7f580be3274c22399f941724b74e274334de9468bb/ops_scenario-8.6.0.tar.gz", hash = "sha256:5a40a91fd5e9b6c8249933944dfc6e807ad2ddbd36a68c800746b9bb8a0eabfb", size = 71728, upload-time = "2026-02-26T04:19:15.34Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/b1/57816b48087fa391d0b113e067ca80fdd36a2103c57cf9cf28fe5a82f52e/ops_scenario-8.5.2-py3-none-any.whl", hash = "sha256:79125d82ca753394d9d9e4a53c55931d3d0114421c1b745f5611cb5827d37012", size = 64241, upload-time = "2026-02-11T01:49:45.753Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d2/fb3176805339d3aa95b9d6e43478d0e34355c6c46f27723249f46bb8d19d/ops_scenario-8.6.0-py3-none-any.whl", hash = "sha256:469490a042dc45eca24eef7aa1b9214704d97d67503ad8465414ab68dc989d30", size = 64241, upload-time = "2026-02-26T04:19:09.579Z" }, ] [[package]] diff --git a/haproxy-spoe-auth-operator/pyproject.toml b/haproxy-spoe-auth-operator/pyproject.toml index f1bac528c..cc2b563c8 100644 --- a/haproxy-spoe-auth-operator/pyproject.toml +++ b/haproxy-spoe-auth-operator/pyproject.toml @@ -17,7 +17,7 @@ dependencies = [ "charmlibs-snap==1.0.1", "jinja2==3.1.6", "jsonschema==4.26.0", - "ops==3.5.2", + "ops==3.6.0", "pydantic==2.12.5", ] diff --git a/haproxy-spoe-auth-operator/uv.lock b/haproxy-spoe-auth-operator/uv.lock index 468ac81e2..e6e23f718 100644 --- a/haproxy-spoe-auth-operator/uv.lock +++ b/haproxy-spoe-auth-operator/uv.lock @@ -498,7 +498,7 @@ requires-dist = [ { name = "charmlibs-snap", specifier = "==1.0.1" }, { name = "jinja2", specifier = "==3.1.6" }, { name = "jsonschema", specifier = "==4.26.0" }, - { name = "ops", specifier = "==3.5.2" }, + { name = "ops", specifier = "==3.6.0" }, { name = "pydantic", specifier = "==2.12.5" }, ] @@ -911,16 +911,16 @@ wheels = [ [[package]] name = "ops" -version = "3.5.2" +version = "3.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "pyyaml" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/05/72/50bdb29831b8ed92034e9657fec89301d8df3aa8da3da1d37ecbdf1baab6/ops-3.5.2.tar.gz", hash = "sha256:849c9ed85eadf265b8a927d5e857cd112221dd71b35e4b13329ccb938c3afd18", size = 578181, upload-time = "2026-02-11T01:49:48.345Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/bb/79b7efdb1243cbad11b6568c51ba4fb7358cd2c4d13bfd971a77c0aa7440/ops-3.6.0.tar.gz", hash = "sha256:a1c3361049c66759840a436143b07c74c2a46dcc44cbfd1177a9051f849c7971", size = 579236, upload-time = "2026-02-26T04:19:12.689Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/5c/84b41a67c2dc904f92f424e981eb65641ce095936fb9e6d7b4a315072d1a/ops-3.5.2-py3-none-any.whl", hash = "sha256:c715128a51ddcdf0fff463428b0f56a93e5963187e599b66594b4fc74458781b", size = 211688, upload-time = "2026-02-11T01:49:43.935Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b6/d7daab4f841566d3cb0402d3463f7c1a00626724d6d7c02d7bf934ae6c86/ops-3.6.0-py3-none-any.whl", hash = "sha256:341c6688684446cc4b42860738898683feb271175bb9c4775ae68c81e4e0976a", size = 211856, upload-time = "2026-02-26T04:19:08.012Z" }, ] [package.optional-dependencies] @@ -930,16 +930,16 @@ testing = [ [[package]] name = "ops-scenario" -version = "8.5.2" +version = "8.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ops" }, { name = "pyyaml" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/27/a999aa877a34fc1b2c07b0f51cb1dc58a89e23bcaf4f626e28bec39825fd/ops_scenario-8.5.2.tar.gz", hash = "sha256:ebcdc4f8837f9a1cd42624f49d9d8b2502ebeeedad552516225b3420f989c369", size = 71693, upload-time = "2026-02-11T01:49:49.59Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/c8/15d9f91eafa46d1dfa7f580be3274c22399f941724b74e274334de9468bb/ops_scenario-8.6.0.tar.gz", hash = "sha256:5a40a91fd5e9b6c8249933944dfc6e807ad2ddbd36a68c800746b9bb8a0eabfb", size = 71728, upload-time = "2026-02-26T04:19:15.34Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/b1/57816b48087fa391d0b113e067ca80fdd36a2103c57cf9cf28fe5a82f52e/ops_scenario-8.5.2-py3-none-any.whl", hash = "sha256:79125d82ca753394d9d9e4a53c55931d3d0114421c1b745f5611cb5827d37012", size = 64241, upload-time = "2026-02-11T01:49:45.753Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d2/fb3176805339d3aa95b9d6e43478d0e34355c6c46f27723249f46bb8d19d/ops_scenario-8.6.0-py3-none-any.whl", hash = "sha256:469490a042dc45eca24eef7aa1b9214704d97d67503ad8465414ab68dc989d30", size = 64241, upload-time = "2026-02-26T04:19:09.579Z" }, ] [[package]] From 3258f9d85343628a3ca66de36f198c1c53241b51 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:30 +0200 Subject: [PATCH 098/201] fix(deps): update dependency cryptography to v46.0.6 [security] (#418) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- haproxy-operator/pyproject.toml | 2 +- haproxy-operator/uv.lock | 92 ++++++++++++++++----------------- 2 files changed, 47 insertions(+), 47 deletions(-) diff --git a/haproxy-operator/pyproject.toml b/haproxy-operator/pyproject.toml index b1343c939..56a154388 100644 --- a/haproxy-operator/pyproject.toml +++ b/haproxy-operator/pyproject.toml @@ -15,7 +15,7 @@ classifiers = [ ] dependencies = [ "cosl==1.6.1", - "cryptography==46.0.5", + "cryptography==46.0.6", "interface-hacluster @ git+https://github.com/charmed-kubernetes/charm-interface-hacluster@1.32+ck2", "jsonschema==4.26.0", "opentelemetry-api==1.40.0", diff --git a/haproxy-operator/uv.lock b/haproxy-operator/uv.lock index 342591401..825b36acf 100644 --- a/haproxy-operator/uv.lock +++ b/haproxy-operator/uv.lock @@ -429,55 +429,55 @@ wheels = [ [[package]] name = "cryptography" -version = "46.0.5" +version = "46.0.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, - { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, - { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, - { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, - { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, - { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, - { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, - { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, - { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, - { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, - { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, - { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, - { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, - { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, - { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, - { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, - { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, - { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, - { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, - { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, - { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, - { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, - { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, - { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, - { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, - { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, - { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, - { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, - { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, - { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, - { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, - { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, - { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, - { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, - { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" }, + { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" }, + { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" }, + { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" }, + { url = "https://files.pythonhosted.org/packages/01/41/3a578f7fd5c70611c0aacba52cd13cb364a5dee895a5c1d467208a9380b0/cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275", size = 7117147, upload-time = "2026-03-25T23:33:48.249Z" }, + { url = "https://files.pythonhosted.org/packages/fa/87/887f35a6fca9dde90cad08e0de0c89263a8e59b2d2ff904fd9fcd8025b6f/cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4", size = 4266221, upload-time = "2026-03-25T23:33:49.874Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a8/0a90c4f0b0871e0e3d1ed126aed101328a8a57fd9fd17f00fb67e82a51ca/cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b", size = 4408952, upload-time = "2026-03-25T23:33:52.128Z" }, + { url = "https://files.pythonhosted.org/packages/16/0b/b239701eb946523e4e9f329336e4ff32b1247e109cbab32d1a7b61da8ed7/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707", size = 4270141, upload-time = "2026-03-25T23:33:54.11Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a8/976acdd4f0f30df7b25605f4b9d3d89295351665c2091d18224f7ad5cdbf/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361", size = 4904178, upload-time = "2026-03-25T23:33:55.725Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1b/bf0e01a88efd0e59679b69f42d4afd5bced8700bb5e80617b2d63a3741af/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b", size = 4441812, upload-time = "2026-03-25T23:33:57.364Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/11df86de2ea389c65aa1806f331cae145f2ed18011f30234cc10ca253de8/cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca", size = 3963923, upload-time = "2026-03-25T23:33:59.361Z" }, + { url = "https://files.pythonhosted.org/packages/91/e0/207fb177c3a9ef6a8108f234208c3e9e76a6aa8cf20d51932916bd43bda0/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013", size = 4269695, upload-time = "2026-03-25T23:34:00.909Z" }, + { url = "https://files.pythonhosted.org/packages/21/5e/19f3260ed1e95bced52ace7501fabcd266df67077eeb382b79c81729d2d3/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4", size = 4869785, upload-time = "2026-03-25T23:34:02.796Z" }, + { url = "https://files.pythonhosted.org/packages/10/38/cd7864d79aa1d92ef6f1a584281433419b955ad5a5ba8d1eb6c872165bcb/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a", size = 4441404, upload-time = "2026-03-25T23:34:04.35Z" }, + { url = "https://files.pythonhosted.org/packages/09/0a/4fe7a8d25fed74419f91835cf5829ade6408fd1963c9eae9c4bce390ecbb/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d", size = 4397549, upload-time = "2026-03-25T23:34:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a0/7d738944eac6513cd60a8da98b65951f4a3b279b93479a7e8926d9cd730b/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736", size = 4651874, upload-time = "2026-03-25T23:34:07.916Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f1/c2326781ca05208845efca38bf714f76939ae446cd492d7613808badedf1/cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed", size = 3001511, upload-time = "2026-03-25T23:34:09.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/fe4a23eb549ac9d903bd4698ffda13383808ef0876cc912bcb2838799ece/cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4", size = 3471692, upload-time = "2026-03-25T23:34:11.613Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" }, + { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" }, + { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" }, + { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" }, + { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" }, + { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" }, + { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" }, ] [[package]] @@ -711,7 +711,7 @@ unit = [ [package.metadata] requires-dist = [ { name = "cosl", specifier = "==1.6.1" }, - { name = "cryptography", specifier = "==46.0.5" }, + { name = "cryptography", specifier = "==46.0.6" }, { name = "interface-hacluster", git = "https://github.com/charmed-kubernetes/charm-interface-hacluster?rev=1.32%2Bck2" }, { name = "jsonschema", specifier = "==4.26.0" }, { name = "opentelemetry-api", specifier = "==1.40.0" }, From 41551452e72b41ae18eb21b143b356c981676b56 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:52 +0200 Subject: [PATCH 099/201] Haproxy route policy rules matching (#401) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Implement request API * update model validation before save and add unit tests * use environment variables for secret key * ruff format * add secret key for testing * remove port attribute from test * add requirements.txt for testing * reintroduce port field * Add change artifact * run lint with uv * add unit testing * remove custom test * update migration * Wrap creation under `transaction.atomic` Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> * Potential fix for pull request finding Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> * remove unused code * minor fixes to settings * Potential fix for pull request finding Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> * use django serializer * update gitignore * update view to use django rest * remove python-version * update gitignore * add missing license headers * Add rules engine * update migration * update view * fix lint * remove extra tests * add validation and update tests * update view * remove to_dict * use serializer for get * use serializer * remove unused tests * use filter for delete query * update tests and move validation to serializer class * Apply suggestion from @github-actions[bot] Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> * Update haproxy-route-policy/policy/migrations/0001_initial.py Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> * Revert "Update haproxy-route-policy/policy/migrations/0001_initial.py" This reverts commit 10a2708834d797cfbe036bfdfc33f26fe0c7eed2. * ignore migration files for license header * remove license header from generated files * add change artifact * add envlist to tox commands * update envlist * convert pk to uuid for requests * Add guard against mal-formed uuid and parameter. Add logging configs, Add middleware to guard against db connection errors * add validators for port and paths * add tests for validators * add note for migration * ruff format * remove unused imports * add static tests * guard rules API against pk * update view, middle wares and tests * refactor tests by parametrizing * group tests by parameterizing * refactor Rule model to rename attribute from "value" to "parameters" * update test name * update naming * Add coverage-report as part of unit test suite * update env list * implement rule evaluation * add change artifact * update imports * update naming * update rules matching logic * update tests * save request using serializer with the correct instace * group tests * update formatting --------- Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Ali UĞUR <39213991+alithethird@users.noreply.github.com> --- ...ule_alter_backendrequest_paths_and_more.py | 55 ------------------- 1 file changed, 55 deletions(-) delete mode 100644 haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py diff --git a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py b/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py deleted file mode 100644 index 0a7b61e4f..000000000 --- a/haproxy-route-policy/policy/migrations/0002_rule_alter_backendrequest_paths_and_more.py +++ /dev/null @@ -1,55 +0,0 @@ -# Generated by Django 6.0.3 on 2026-03-23 21:53 - -import policy.db_models -import uuid -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("policy", "0001_initial"), - ] - - operations = [ - migrations.CreateModel( - name="Rule", - fields=[ - ( - "id", - models.UUIDField( - default=uuid.uuid4, - editable=False, - primary_key=True, - serialize=False, - ), - ), - ( - "kind", - models.TextField( - choices=[("hostname_and_path_match", "hostname_and_path_match")] - ), - ), - ("parameters", models.JSONField()), - ( - "action", - models.TextField(choices=[("allow", "allow"), ("deny", "deny")]), - ), - ("priority", models.IntegerField(blank=True, default=0)), - ("comment", models.TextField(blank=True, default="")), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ], - ), - migrations.AlterField( - model_name="backendrequest", - name="paths", - field=models.JSONField( - blank=True, default=list, validators=[policy.db_models.validate_paths] - ), - ), - migrations.AlterField( - model_name="backendrequest", - name="port", - field=models.IntegerField(validators=[policy.db_models.validate_port]), - ), - ] From 7de256cdc422d3d0abc86259dab937a8409c322c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:52 +0200 Subject: [PATCH 100/201] chore: update Copilot collections to v0.8.0 (#419) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .github/.copilot-collections.yaml | 2 +- .github/skills/documentation-build/SKILL.md | 96 +++ .../skills/documentation-diataxis/SKILL.md | 152 +++++ .github/skills/documentation-review/SKILL.md | 198 ++++++ .../references/doc-review-report-template.md | 68 ++ .../skills/documentation-structure/SKILL.md | 79 +++ .github/skills/documentation-style/SKILL.md | 74 ++ .../references/doc-style-guide.md | 643 ++++++++++++++++++ .github/skills/documentation-verify/SKILL.md | 80 +++ .../references/report_format.md | 144 ++++ .../references/verification_procedures.md | 178 +++++ 11 files changed, 1713 insertions(+), 1 deletion(-) create mode 100644 .github/skills/documentation-build/SKILL.md create mode 100644 .github/skills/documentation-diataxis/SKILL.md create mode 100644 .github/skills/documentation-review/SKILL.md create mode 100644 .github/skills/documentation-review/references/doc-review-report-template.md create mode 100644 .github/skills/documentation-structure/SKILL.md create mode 100644 .github/skills/documentation-style/SKILL.md create mode 100644 .github/skills/documentation-style/references/doc-style-guide.md create mode 100644 .github/skills/documentation-verify/SKILL.md create mode 100644 .github/skills/documentation-verify/references/report_format.md create mode 100644 .github/skills/documentation-verify/references/verification_procedures.md diff --git a/.github/.copilot-collections.yaml b/.github/.copilot-collections.yaml index 9f1015dde..33f4a2270 100644 --- a/.github/.copilot-collections.yaml +++ b/.github/.copilot-collections.yaml @@ -1,5 +1,5 @@ copilot: - version: "v0.7.0" + version: "v0.8.0" collections: - charm-python - pfe-charms diff --git a/.github/skills/documentation-build/SKILL.md b/.github/skills/documentation-build/SKILL.md new file mode 100644 index 000000000..859bb1aab --- /dev/null +++ b/.github/skills/documentation-build/SKILL.md @@ -0,0 +1,96 @@ +--- +name: documentation-build +description: "Validates documentation builds successfully. Use when checking Sphinx/RTD build integrity or diagnosing build failures. Reports errors, warnings, and build configuration issues." +--- + +# Documentation Build Validation + +## Scope + +Build validation only: detect documentation build configuration, +run applicable build targets, collect all errors and warnings, +and categorise by severity. + +## Inputs + +- Repository root. + +## Actions + +1. **Identify documentation directory**: The `docs/` directory + is the default location for Sphinx documentation. + If not found, search common doc roots in this order: + `doc/`, `documentation/`, `site/`, `docs-src/`. + If still not found, perform a bounded search for `conf.py` + (max depth 4) and use its parent as the docs root. + +2. **Detect build configuration**: Check for the presence of: + + - `.readthedocs.yaml` + - `docs/conf.py` + - Makefile targets in the `docs/` directory + + If RTD artefacts are absent in the target repository, + report "not applicable" in findings and exit cleanly. + +3. **Run build targets** (when applicable): + + ```bash + cd docs + make clean + make html + ``` + + Run additional checks for targets that exist. + Check each target before running to avoid false failures: + + ```bash + # Check and run each target if available + for target in spelling linkcheck woke lint-md; do + if make -n $target 2>/dev/null; then + make $target + fi + done + ``` + +4. **Capture output and handle failures**: If any command fails: + + 1. Capture the full error output from stderr and stdout + 2. Run `make clean` to reset build state + 3. Retry the failed build command once + 4. If retry fails, STOP and report all captured errors + 5. Do not proceed to content analysis until build succeeds + + Warnings must be collected and reported, but are not blocking + unless the repository explicitly treats warnings as errors. + +5. **Categorise findings by severity**: + + - **Errors**: Build failures, broken links, missing files. + - **Warnings**: Deprecation notices, missing references, + formatting issues. + - **Info**: Suggestions, minor notices. + +6. **Verify completion**: Confirm the validation completed: + + - Build targets were executed (or determined not applicable) + - Output was captured (errors and warnings) + - Findings were categorized by severity + + State the completion status: + - `✓ Build validation complete: [N] errors, [M] warnings found` + - OR `✓ Build validation complete: No issues found` + - OR `✓ Build validation: Not applicable - RTD artifacts not detected` + +## Constraints + +- Do not approve documentation that fails the Sphinx build. +- Build docs locally to catch build warnings. +- Do not invent Makefile targets; + only use targets confirmed to exist in the target repository. + +## Output + +A build validation report listing all errors and warnings, +categorised by severity. If RTD artefacts are not detected, +report "Not applicable -- RTD artefacts not detected in target repo". diff --git a/.github/skills/documentation-diataxis/SKILL.md b/.github/skills/documentation-diataxis/SKILL.md new file mode 100644 index 000000000..662f49c0b --- /dev/null +++ b/.github/skills/documentation-diataxis/SKILL.md @@ -0,0 +1,152 @@ +--- +name: documentation-diataxis +description: "Analyzes documentation against Diataxis framework (Tutorial, How-to, Reference, Explanation). Use when reviewing documentation structure or classifying content type. Identifies misalignments between declared category and actual content." +--- + +# Diataxis Classification Review + +## Scope + +Diataxis classification only: identify whether each page is a tutorial, +how-to guide, explanation, or reference; note structural mismatches +between content type and declared category, and suggest improvements +where real issues exist. +Ground classification in the Diataxis foundations: the two axes of craft +(action vs cognition, acquisition vs application) define four user needs +(learning, goals, information, understanding). + +This skill identifies flaws and provides actionable recommendations; +it does not enforce compliance or apply pass/fail criteria. + +## Inputs + +- Documentation file(s) under review. +- Diataxis framework principles (embedded in classification criteria below). + +## Actions + +1. **Identify intended category**: Determine the declared category + based on directory location + (`tutorial/`, `how-to/`, `explanation/`, `reference/`) + and file metadata (front matter keys such as `category`, `type`, + `diataxis`, or reST `.. meta::` entries). + +2. **Infer actual category**: Analyse the text's structure, tone, + and progression to determine which quadrant it actually resembles. + Use these classification criteria: + + - **Tutorial indicators**: + - Step-by-step progression building a complete project + - Imperative mood ("Create a file", "Run this command") + - Learning-focused language ("you will learn", "by the end") + - Safe, controlled environment (specific versions, no branching) + - Frequent reassurance and checkpoints + - Teaches by doing, not explaining + + - **How-to guide indicators**: + - Problem-solution format with clear goal + - Assumes existing knowledge and competence + - Clearly states prerequisites and applicability scope + - Action-oriented ("To achieve X, do Y") + - Flexible, allows for variation + - Focuses on results, not learning + - Omits explanations unless critical to success + + - **Reference indicators**: + - Descriptive, declarative statements + - Comprehensive coverage of subject + - Neutral, technical tone + - Structured for lookup (tables, lists, alphabetical) + - Parameters, options, API signatures + - Accuracy over narrative + + - **Explanation indicators**: + - Conceptual focus ("why" and "how it works") + - Discursive, exploratory tone + - Comparative analysis + - Context, background, relationships + - Illuminates understanding, not action + - May include history, design decisions, alternatives + - May contain subjective opinions and personal perspectives + +3. **Check user need alignment**: + + Map the page to the user need implied by the action/cognition and + acquisition/application axes (learning, goals, information, + understanding). + + - **Tutorials**: Is it a learning-oriented lesson? + Does it build confidence through doing? Is it linear and safe? + - **How-to guides**: Is it a task-oriented recipe? + Does it help a competent user solve a specific problem? + Is it goal-focused? + - **Reference**: Is it information-oriented? + Does it describe things accurately and completely? + Is it structured for lookup? + - **Explanation**: Is it understanding-oriented? + Does it clarify concepts, context, and relationships? + Is it discursive? + +4. **Note hard-to-fit genres**: Some documentation types do not align + cleanly with a single quadrant (for example, release notes or + contributing guides). Flag these cases explicitly, reference the + Diataxis guidance on complex hierarchies + (https://diataxis.fr/complex-hierarchies/), and choose the closest + fit category for reporting. + +5. **Evaluate quality**: + + - **Functional quality**: Is the content accurate, complete, + consistent, useful, and precise? + - Missing prerequisites or dependencies + - Incomplete steps or procedures + - Inconsistent terminology or naming + - Outdated information (version mismatches, deprecated features) + + - **Deep quality**: Does the content have good flow? + Does it anticipate user questions? + Is the cognitive load appropriate? Is the experience clear? + - Paragraph length (>4 sentences may suggest need for breaking) + - Sentence complexity (nested clauses, dense jargon) + - Transition quality (abrupt topic changes, missing connectives) + - Progressive disclosure (introducing too much too soon) + - User journey mapping (gaps in expected flow) + +6. **Document misalignments**: Explicitly identify where the document + fails to meet the needs of its category, jumps between categories, + or where quality breaks down. For each issue, provide: + - Specific location (section, paragraph) + - Nature of the problem + - Impact on user experience + - Concrete suggestion for improvement + +7. **Verify completion**: Confirm the analysis completed: + + - Category classification completed (declared vs inferred) + - User need alignment analyzed + - Quality assessment performed (functional and deep quality) + - Misalignments documented with recommendations + + State the completion status: + - `✓ Diataxis analysis complete: [declared category] → [inferred category], [N] issues found` + - OR `✓ Diataxis analysis complete: Content aligns well with [category]` + +## Constraints + +- Do not ignore the Diataxis framework. +- Assign each page to exactly one quadrant. +- Do not introduce categories beyond the four quadrants. + +## Output + +A Diataxis Analysis Report detailing: + +- Declared category (from metadata/directory structure). +- Inferred category (from content analysis). +- User need alignment analysis (which quadrant best serves the user). +- Functional quality findings (with specific examples). +- Deep quality findings (with specific examples). +- Identified issues and actionable recommendations for improvement. + +If no significant issues are found, state that the documentation +aligns well with its intended category. diff --git a/.github/skills/documentation-review/SKILL.md b/.github/skills/documentation-review/SKILL.md new file mode 100644 index 000000000..15b9e6f9c --- /dev/null +++ b/.github/skills/documentation-review/SKILL.md @@ -0,0 +1,198 @@ +--- +name: documentation-review +description: "Performs comprehensive documentation review including build validation, Diataxis analysis, structure audit, accuracy verification, and style compliance. Use when reviewing documentation changes or auditing documentation quality." +--- + +# Documentation Review + +## Scope + +Orchestration only: defines the end-to-end review workflow, +specifies the order in which atomic skills are invoked, +and renders the final consolidated report using the report template +at `references/doc-review-report-template.md`. + +## Persona + +You are a technical documentation reviewer and editor for the project. +Your job is to ensure the documentation is clear, accurate, +consistent with code, and follows the project's style guide. +You apply the Diataxis framework +(Tutorial, How-to, Explanation, Reference) rigorously. + +## Workflow + +Follow these stages sequentially. **Do not skip stages.** + +### Execution Requirements + +**CRITICAL:** After completing each stage, you MUST: +1. Confirm the skill was actually invoked (not just described) +2. Capture the output and record findings +3. State the completion status explicitly + +**Verification Pattern:** +After each stage, state: +- `✓ Stage [N] complete: [skill-name] generated [N] findings` +- If no findings: `✓ Stage [N] complete: [skill-name] found no issues` + +**Do NOT proceed to Stage [N+1] until Stage [N] is verified complete.** + +--- + +### Stage 1: Build Validation + +**Execute:** Use the `documentation-build` skill to validate the documentation build. + +**Capture:** Record all build errors and warnings. + +**Verify:** Confirm build status (pass/fail) before proceeding. + +**Decision Point:** If the build fails, report build issues immediately and STOP. Do not proceed to content analysis until the documentation builds without errors. + +**Checkpoint:** `✓ Stage 1 complete: documentation-build [passed/failed with N errors]` + +--- + +### Stage 2: Documentation Structure Discovery + +**Execute:** Map the documentation structure before analyzing content. + +**Actions:** +1. List all documentation files under `docs/` (or equivalent) +2. Identify the documentation build system (Sphinx, MkDocs, Jekyll, etc.) +3. Note the directory structure (flat vs. categorized) +4. Record any metadata patterns (frontmatter, sidebar configs) + +**Output:** Store this structural map internally for use in later stages. + +**Checkpoint:** Confirm you have identified: +- [ ] Documentation root directory +- [ ] Build system type +- [ ] File organization pattern +- [ ] Metadata conventions (if any) + +Then state: `✓ Stage 2 complete: Structure mapped ([N] files in [system] with [pattern] organization)` + +--- + +### Stage 3: Diataxis Classification + +**Execute:** Use the `documentation-diataxis` skill to analyze each documentation page. + +**Capture:** Record the declared category (from metadata/directory) and inferred category (from content analysis) for each page. + +**Verify:** Confirm you have classification results for all documentation pages analyzed. + +**Checkpoint:** `✓ Stage 3 complete: documentation-diataxis analyzed [N] pages, found [N] misalignments` + +--- + +### Stage 4: Structure Audit + +**Execute:** Use the `documentation-structure` skill to validate documentation organization. + +**Input Required:** Use the Diataxis classification output from Stage 3 to validate directory placement. + +**Capture:** Record all structural violations (file naming, metadata, directory placement, navigation, cross-references). + +**Verify:** Confirm structural audit completed for all pages. + +**Checkpoint:** `✓ Stage 4 complete: documentation-structure found [N] violations` OR `✓ Stage 4 complete: documentation-structure found no violations` + +--- + +### Stage 5: Accuracy Verification + +**Execute:** Use the `documentation-verify` skill to cross-reference documentation claims against source code. + +**Capture:** Record all accuracy findings grouped by classification (unsupported, outdated, incorrect, imprecise, speculative, inconclusive). + +**Verify:** Confirm code verification completed for all claims in changed documentation. + +**Checkpoint:** `✓ Stage 5 complete: documentation-verify found [N] accuracy issues` OR `✓ Stage 5 complete: documentation-verify found no accuracy issues` + +--- + +### Stage 6: Style Review + +**Execute:** Use the `documentation-style` skill to evaluate documentation against the project style guide. + +**Capture:** Record all style violations with quoted passages from the style guide. + +**Verify:** Confirm style review completed for all documentation. + +**Checkpoint:** `✓ Stage 6 complete: documentation-style found [N] style violations` OR `✓ Stage 6 complete: documentation-style found no style violations` + +--- + +### Stage 7: Consolidated Report + +**Execute:** Synthesize findings from all stages into a structured, actionable review using the report template at `references/doc-review-report-template.md`. + +**Assembly Instructions:** + +For each skill output, extract findings and populate the corresponding report section: + +| Skill | Report Section | Format | +|-------|----------------|--------| +| documentation-build | Build Findings | List of errors/warnings or "No issues found" | +| documentation-verify | Accuracy Findings | Grouped by classification (unsupported/outdated/incorrect/imprecise) | +| documentation-diataxis | Diataxis Findings | Table of declared vs inferred categories, list misalignments | +| documentation-structure | Structure Findings | List of violations or "No issues found" | +| documentation-style | Style Findings | List of violations with quoted style guide passages | + +**Handling Empty Results:** +- If a skill produces no findings: Write "No issues found" in that section +- If a skill is not applicable: Write "Not applicable - [reason]" (e.g., "Not applicable -- RTD artefacts not detected") + +**Priority Order:** Present findings in priority order (highest priority first): + +1. **Build Findings (BLOCKING)** - Must be resolved before content analysis +2. **Accuracy Findings (CRITICAL)** - Code-documentation mismatches +3. **Diataxis Findings (HIGH)** - Category misalignments affecting usability +4. **Structure Findings (MEDIUM)** - Organizational and navigation issues +5. **Style Findings (LOW)** - Style guide compliance + +**Checkpoint:** `✓ Stage 7 complete: Consolidated report generated with findings from [N] stages` + +--- + +## Error Handling + +If a stage fails to complete, handle as follows: + +### Build Validation Failure +- Report build errors immediately +- STOP the workflow - do not proceed to content analysis +- Include build errors in the final report + +### Skill Invocation Errors +- Report the error in the corresponding report section +- Continue with remaining stages +- Note the failure in the Summary section + +### Missing Dependencies +- Report what's missing (e.g., "Style guide not found at docs/style-guide.md") +- Mark that stage as "Incomplete" in the report +- Continue with other stages + +### Incomplete Stages +If any stage could not be completed, add an "Incomplete Stages" section to the report listing: +- Which stage failed +- Why it failed +- What's needed to complete it + +--- + +## Constraints + +- Provide criticism and suggestions rather than direct bulk rewrites. +- Do not modify source code to fix documentation + without explicit request. +- Before restructuring large documentation sections + (for example, moving files between tutorial and how-to), ask first. +- Before suggesting new coverage entities, categories, + or metadata patterns, ask first. +- If code examples seem correct + but do not match your understanding of the codebase, ask first. diff --git a/.github/skills/documentation-review/references/doc-review-report-template.md b/.github/skills/documentation-review/references/doc-review-report-template.md new file mode 100644 index 000000000..c872ecb65 --- /dev/null +++ b/.github/skills/documentation-review/references/doc-review-report-template.md @@ -0,0 +1,68 @@ +## Documentation Review Report + +**Reviewed:** + +**Date:** + +**Skills Applied:** + +--- + +### Build Findings (BLOCKING) + +*Source: documentation-build* + + + +--- + +### Accuracy Findings (CRITICAL) + +*Source: documentation-verify* + + + +--- + +### Diataxis Findings (HIGH) + +*Source: documentation-diataxis* + + + +--- + +### Structure Findings (MEDIUM) + +*Source: documentation-structure* + + + +--- + +### Style Findings (LOW) + +*Source: documentation-style* + + + +--- + +### Incomplete Stages + +*Only include this section if any stage failed to complete* + + + +--- + +### Summary + + + +**Recommended Action Priority:** +1. Resolve blocking build issues first +2. Fix critical accuracy/code-backing issues +3. Address high-priority Diataxis misalignments +4. Resolve structural issues +5. Apply style corrections diff --git a/.github/skills/documentation-structure/SKILL.md b/.github/skills/documentation-structure/SKILL.md new file mode 100644 index 000000000..c0b075801 --- /dev/null +++ b/.github/skills/documentation-structure/SKILL.md @@ -0,0 +1,79 @@ +--- +name: documentation-structure +description: "Validates documentation structural integrity including heading hierarchy, metadata, file naming, navigation, and cross-references. Use when checking documentation organization or validating toctree structure." +--- + +# Documentation Structure Audit + +## Scope + +Document structure only: heading hierarchy, section ordering, +presence of required sections (introduction, prerequisites, steps, reference), +file naming, metadata blocks, navigation, and cross-references. + +## Inputs + +- Documentation file(s) under review. +- Documentation directory structure. +- Diataxis classification output + (from the `documentation-diataxis` skill, when run as part of the orchestrated review). + +## Actions + +1. **File Naming**: + + Verify files use lowercase with dashes + and the correct extension for their syntax + (for example, `connect-vscode.rst` for reST, `connect-vscode.md` for MyST). + +2. **Metadata**: + + Ensure every page has required metadata near the top + when the repository's docs conventions require it: + `.. meta::` after the anchor label for reST, + or the MyST equivalent (front matter or `meta` directive) for Markdown sources. + +3. **Directory Placement**: + + Confirm the file is located in the directory matching its intended + Diataxis category + (for example, tutorials in `tutorial/`, how-to guides in `how-to/`). + +4. **Navigation**: + + Ensure new pages are added to the `toctree`. + +5. **Cross-References**: + + - Prefer stable reference roles + (`:ref:` for reST, `{ref}`/`{numref}` for MyST) over page-level links. + - Flag uses of `:doc:`/`{doc}` (or equivalents) + except for index-like pages that are unlikely to be moved or renamed. + - Suggest adding links to improve documentation discoverability. + - Verify cross-references resolve correctly. + +6. **Verification**: + + Confirm the structural audit completed: + + - File naming checked for all files + - Metadata presence verified + - Directory placement validated (using Diataxis classification if available) + - Navigation structure checked (toctree entries) + - Cross-references validated + + State the completion status: + - `✓ Structure audit complete: [N] violations found` + - OR `✓ Structure audit complete: No violations found` + +## Constraints + +- Do not modify files during the audit; this is a read-only review. +- Do not invent metadata requirements; check against repository's documented conventions. +- Focus on structural issues only; do not flag style preferences. +- When Diataxis classification is available (from orchestrated review), use it to validate directory placement. Otherwise, infer from directory name. + +## Output + +A list of structural or metadata violations covering file naming, +metadata, directory placement, navigation, and cross-reference issues. diff --git a/.github/skills/documentation-style/SKILL.md b/.github/skills/documentation-style/SKILL.md new file mode 100644 index 000000000..6e0364bc1 --- /dev/null +++ b/.github/skills/documentation-style/SKILL.md @@ -0,0 +1,74 @@ +--- +name: documentation-style +description: "Enforces project documentation style guide compliance for tone, voice, terminology, punctuation, and formatting. Use when checking documentation style or validating MyST/reST syntax. Cites specific style guide violations." +--- + +# Documentation Style Review + +## Scope + +Style conformance only: tone, voice, terminology, punctuation, +Oxford comma, active versus passive voice, prohibited phrases, +and formatting conventions. + +## Inputs + +- Documentation file(s) under review. +- Normative style asset: `references/doc-style-guide.md`. +- Syntax-specific style guides (fetched at runtime): + - MyST: `https://raw.githubusercontent.com/canonical/sphinx-docs-starter-pack/refs/heads/main/docs/reference/myst-syntax-reference.md` + - reST: `https://raw.githubusercontent.com/canonical/sphinx-docs-starter-pack/refs/heads/main/docs/reference/rst-syntax-reference.rst` + +## Actions + +1. **Load style guides**: Read `references/doc-style-guide.md`. + Fetch the syntax-specific guide matching the file type + (MyST for `.md`, reST for `.rst`). + +2. **Syntax compliance**: Check headings, lists, code blocks, + inline literals, and directives against the applicable syntax guide. + Treat every instruction in the guide as mandatory; + do not rely on a subset of rules. + +3. **Full style guide compliance**: Read and apply all rules defined + in `references/doc-style-guide.md`. + Treat every instruction in the guide as mandatory; + do not rely on a subset of rules. + +4. **Style guide citation**: For every violation found, + locate and quote the specific passage + in `references/doc-style-guide.md` or the syntax-specific guide + that supports the finding. + +5. **Fallback behaviour**: If syntax guides cannot be fetched + (offline or network blocked), continue the review + using `references/doc-style-guide.md` + and the syntax patterns already present in the documentation set. + + If `references/doc-style-guide.md` is unavailable, + STOP the review and report that the style guide cannot be accessed. + +6. **Verification**: + + Confirm the style review completed: + + - Style guides loaded (doc-style-guide.md and syntax-specific guide if available) + - Syntax compliance checked against applicable guide + - Style guide compliance checked against all rules in doc-style-guide.md + - All violations cited with quoted passages from style guides + + State the completion status: + - `✓ Style review complete: [N] violations found` + - OR `✓ Style review complete: No violations found` + +## Constraints + +- Quote style guides when making style suggestions. +- Do not suggest style or markup changes without quoting the style guides. + +## Output + +A list of style violations, each accompanied by: + +- The specific passage quoted from the style guide or syntax reference. +- The observation or suggested change. diff --git a/.github/skills/documentation-style/references/doc-style-guide.md b/.github/skills/documentation-style/references/doc-style-guide.md new file mode 100644 index 000000000..03fe38520 --- /dev/null +++ b/.github/skills/documentation-style/references/doc-style-guide.md @@ -0,0 +1,643 @@ +```{eval-rst} +:orphan: + +.. meta:: + :description: Documentation style guide covering file naming, + structure, semantic line breaks, reStructuredText and Markdown + conventions, terminology, and project-specific patterns. +``` + + + +# Documentation style guide + +This style guide documents the established conventions used in the project documentation. It captures actual patterns observed across the documentation set and serves as a reference for maintaining consistency in new contributions. + +This guide is subordinate to your organization's documentation standards but records project-specific decisions and patterns that extend or clarify those standards. + +--- + +## File naming and organization + +**Directory structure** + +The documentation follows the [Diátaxis](https://diataxis.fr/) framework with four main sections: + +``` +docs/ +├── tutorial/ # Step-by-step learning paths +├── how-to/ # Task-oriented guides +├── explanation/ # Conceptual information +└── reference/ # Technical specifications +``` + +**File naming convention** + +All filenames use lowercase letters and dashes for word separation. + +Examples: + +- Good: `part-1-get-started.rst` +- Good: `connect-editor.rst` +- Good: `network-interface.rst` +- Good: `container-vs-dockerfile.rst` +- Avoid: `ConnectEditor.rst` (uppercase) +- Avoid: `network_interface.rst` (underscore) + +Optional: Tutorial files may use a sequential numbering pattern: + +``` +part-1-get-started.rst +part-2-work-with-features.rst +part-3-advanced-concepts.rst +part-4-production-deployment.rst +``` + +How-to files: Use verb-first naming pattern: + +``` +add-configuration.rst +connect-editor.rst +forward-ports.rst +debug-issues.rst +resolve-conflicts.rst +``` + +Explanation files use noun-based naming: + +``` +concepts.rst +interface-concepts.rst +best-practices.rst +runtime-behavior.rst +``` + +Reference files match command structure: + +``` +command-launch.rst +command-connect.rst +build-tool.md +``` + +Filenames and directory names in the documentation repo should be in lowercase, +with dashes instead of spaces; the directory tree must be built in a way that +provides for readable, meaningful URLs: `/docs/howto/change-tyres`. + +--- + +## Page structure and metadata + +**Standard page structure** + +Every documentation page follows a consistent structure: +anchor label, metadata block, page title, opening paragraph, and section hierarchy. +Use the syntax references for exact markup and placement details. + +**Metadata block** + +Every page must have a metadata block immediately after the anchor label. +Use a brief, clear description (typically 1-2 lines), +wrapping at natural phrase boundaries. + +**Anchor labels** + +Use lowercase with underscores or dashes. + +Optional: Prefix labels with section type. +Use prefixes consistently across the docs. + +Prefixes: + +- `tut_` - Tutorial sections +- `how_` - How-to guides +- `exp_` - Explanation articles +- `ref_` - Reference documentation + +Examples: `tut_get_started`, `how_add_actions`, `exp_interface_concepts`, `ref_command_launch`. + +--- + +## Writing style and tone + +**Voice and audience** + +Target audience is developers and technical professionals seeking to: + +* Achieve specific goals without much overhead and roundabout musings +* Perform and conceive complex ad-hoc tasks and workflows that require precision and depth +* Attain understanding of the project's key capabilities beneficial for their scenarios + +Content follows the Diátaxis framework, providing: + +* Concise tutorials for common, starter-level actions and scenarios, eliminating the need to invent custom steps and allowing novice users to journey along the hot path effortlessly +* Elaborate explanations of the thinking behind the project's design, including design decisions, related concepts, and how it should be used +* Detailed how-to guides that address specific needs of advanced users and cover topics beyond basic entry-level operations +* Comprehensive reference of all options, settings, and details available to customize the project's operation in any desirable manner + +The tone is authoritative but relaxed, confident but approachable. Think water cooler conversation, not classroom session. + +Example: + +```text + is a tool for defining and handling development environments. + +List your dependencies and components in YAML to define an environment. The key pieces of a definition are components, independent but connectable units of functionality. The project simplifies experiments with your environment layout. +``` + +**Active and passive voice** + +Use active voice for user actions and cause-effect relationships. Use passive voice when the agent is unknown or the effect is the focus. + +Active voice examples (preferred for actions): +- "The server hosts all files" +- "You install apps with the App Center" +- "The validate library checks the form for errors" + +Passive voice examples (appropriate for effects): +- "Before upload, the form is checked for errors" +- "The files are deleted every time the script runs" + +Pattern recognition: Passive voice uses `is/are/was/were/been/being + past participle` (e.g., "is installed", "are checked"). + +**Direct instructions** + +Use imperative mood for instructions. Avoid "you can" or "you may" for required actions. + +Preferred: + +``` +Install the application using the `--classic` option: +``` + +Avoid: + +``` +You can install the application with: +``` + +**Paragraph length** + +Keep paragraphs focused and relatively short (2-5 sentences typically). Complex topics should be broken into multiple paragraphs. + +Example: + +```restructuredtext +Install the project, +upgrading the prerequisites if needed, +then ensure it runs. + +Authenticate to the package manager and install +using the required options: +``` + +**Paragraph structure (optional)** + +Where suitable, structure paragraphs using the Topic-Development-Example-Summary (TDES) pattern: + +1. **Topic**: Open with a clear statement of what the paragraph addresses +2. **Development**: Explain the concept or provide necessary context +3. **Example**: Illustrate with a concrete example +4. **Summary**: Close with the key takeaway or implication + +This pattern is particularly effective for explanatory content but should be applied flexibly; not every paragraph requires all four elements. + +Example applying TDES: + +```text +Interfaces enable communication between components and the host system. +Each interface defines a specific capability, such as network access or GPU usage. +For instance, the `network` interface allows a component to access external services, +while the `gpu` interface provides access to hardware acceleration. +Using interfaces, you can precisely control what resources each component can access. +``` + +In practice, simpler paragraphs may use just Topic-Example or Topic-Development, depending on the content's purpose and complexity. + +**Clarity over cleverness** + +- State prerequisites explicitly +- Define terms at first use +- Avoid assumptions about reader knowledge +- Use precise, unambiguous language + +**Words and phrases to avoid** + +Avoid clichés, violent metaphors, and jargon. Replace them with simpler alternatives: + +- **Clichés** + - `the ability to`, `is able to` → `can` + - `in order to` → `to` + - Avoid: `allow`, `going forward`, `not only...but also` + +- **Violent metaphors** + - `kill`, `terminate` → `stop` + - `execute` → `run` + - `eliminate` → `remove` + +- **Jargon** + - `leverage` → `use` + - `end user` → `user` + - `use case` → `example` or `scenario` + - Avoid: `ecosystem`, `form factor`, `harness`, `next level` + +This is not an exhaustive list; use your best judgment. + +**Latin words and phrases** + +Replace Latin phrases with English equivalents: + +- `e.g.` → `for example`, `such as` +- `i.e.` → `that is`, `in other words` +- `etc.` → `and so on` +- `via` → `through`, `with`, `using` +- `ad hoc` → `unscheduled`, `temporary`, `bespoke` +- `per se` → `necessarily`, `intrinsically` +- `versus`, `vs.` → `compared to`, `opposed to` +- `vice versa` → `the reverse`, `the other way around` +- `circa` → `around`, `near` +- `cf.` → `refer to` + +This is not an exhaustive list; use your best judgment. + +**Demonstrative pronouns** + +Avoid orphan "this", "these", "those", "that" when ambiguous. Pair with the noun for clarity. + +Good: "The `yaml` object is sourced from the `yamllib` library. This object is only available if..." + +Avoid: "The `yaml` object is sourced from the `yamllib` library. This is only available if..." (unclear if referring to object or library) + +**Language and spelling** + +Convention: Use US English spelling, grammar, and formatting conventions throughout the documentation. + +Common US/UK differences: +- Patterns: `-ize` (not `-ise`), `-or` (not `-our`), `-able` (not `-eable`) +- US: `license` (noun and verb), `defense`, `program`, `percent`, `skeptical`, `catalog`, `traveling`, `labeled` +- UK: `licence` (noun), `defence`, `programme` (non-IT), `per cent`, `sceptical`, `catalogue`, `travelling`, `labelled` + +Common technology terms: +- `email`, `online`, `website`, `internet` +- `setup` (noun), `set up` (verb) +- `backup` (noun), `back up` (verb) +- `login` (noun), `log in` (verb) +- `space-separated`, `comma-delimited` +- `open source` (noun), `open-source` (adjective) + +Examples: +- Good: `color`, `center`, `analyze`, `behavior` +- Avoid: `colour`, `centre`, `analyse`, `behaviour` +- Good: Use serial comma: "components, interfaces, and environments" +- Good: Double quotes for quotations: "The project is a tool" + +**Contractions** + +Acceptable: `aren't`, `can't`, `couldn't`, `didn't`, `doesn't`, `don't`, `hadn't`, `hasn't`, `haven't`, `isn't`, `it's`, `mustn't`, `wasn't`, `won't`, `wouldn't`, `you're`, `you've`, `you'll`, `we're`, `we've` + +Forbidden: `ain't` (colloquial), `gonna`, `gotta`, `something's` (confusion with possessive), `I'd`, `I'll` (avoid first person) + +**Dates and numbers** + +Date format: +- Single day: `1 January 2013` +- Range within month: `1-2 January 2013` +- Range across months: `1 January - 2 February 2013` + +Numbers: +- Spell out below 10: `seven servers` +- Use digits from 10 onwards: `15 containers` +- Exception: Always use digits for units of measurement: `5 GB`, `3 seconds` +- Use commas for thousands: `7,000` not `7000` + +--- + +## Semantic line breaks (optional) + +**Pattern** + +The documentation consistently uses semantic line breaks (one line per clause or significant phrase) in reStructuredText files. This improves version control diffs and editing precision. + +Rationale: Semantic breaks make git diffs more readable and help reviewers identify exactly what changed in a sentence or paragraph. + +**Implementation** + +Break lines at natural semantic boundaries: +- After each complete clause +- Before coordinating conjunctions (and, but, or) +- Before relative clauses (which, that, who) +- After introductory phrases + +Example: + +```restructuredtext +This is the first section of the :ref:`four-part series `; +a practical introduction +that takes you on a tour +of the essential |project_markup| activities. +``` + +MyST equivalent: + +````markdown +This is the first section of the {ref}`four-part series `; +a practical introduction +that takes you on a tour +of the essential |project_markup| activities. +```` + +**When to break** + +Break after: +- Complete independent clauses +- Introductory prepositional phrases +- Transitional phrases +- Items in a complex series + +Keep together: +- Short phrases that form a single unit +- Inline markup and its target word +- Cross-reference markup + +Example: + +```restructuredtext +Interfaces are a mechanism for communication and resource sharing. +It is an integral part of environment isolation, +ensuring that each environment operates in its own isolated context, +while still allowing controlled interactions among the components and with the host. +``` + +--- + +## Headings and titles + +**Capitalization** + +Pattern: Sentence case for all headings (capitalize only first word and proper nouns). + +Exception: Product names and proper nouns maintain their capitalization. + +**Heading constraints** + +- Headings must not end with a period +- Avoid links in headings +- Use `code` styling sparingly in headings (only when essential, such as command references) +- Headings must not be followed directly by a subheading (provide introductory content) +- Do not skip heading levels (e.g., h1 followed by h3) + +**How-to title pattern** + +How-to guides follow the pattern: "How to [action] [object]" and use imperatives, not gerunds: +- Good: "How to create an instance" (imperative) +- Avoid: "How to creating an instance" or "Creating an instance" (gerund) +- How to forward ports with tunneling +- How to fix connection conflicts +- How to debug issues in environments + +Linking exception: In navigation and links, drop "How to" prefix and use infinitive: + +```restructuredtext +How-to guides: + +* Debug issues in environments +* Connect IDE to an environment +``` + +--- + +## Markup and formatting policies + +Use the syntax references for markup details. +Apply the policies below for project-specific conventions. + +**Admonition placement:** Place admonitions at the end of the subsection they relate to, rather than interrupting the flow of text in the middle of a section. This is especially relevant for multiple admonitions per section. + +**Inline markup** + +Semantic markup preference: Use semantic markup roles (for example, sample values, environment variables, file paths, commands, GUI labels, and programs) instead of generic emphasis. Choose the most specific role that suits the purpose and use it consistently. + +Use italics sparingly to introduce new terms and for emphasis. Leave bold for product names and commands. + +Commands in command roles should be presented in their complete form and should not be used as verbs or nouns in the text. +Use non-breaking spaces to prevent longer compound commands from wrapping. + +End directory path names with a slash where possible and conventional to disambiguate directories from files. + +Format placeholders in uppercase within angle brackets, without underscores. + +**Non-breaking spaces:** Use non-breaking spaces for important proper names and compound commands where line breaks would be awkward. + +**"See also" sections (optional)** + +"See also" sections can appear on pages under any pillar and link to related content not immediately essential but potentially useful. Break link lists down by pillar, listing pillars and individual subsections in alphabetical order. + +**Tab headings** + +Pattern: Keep tab headings noun-based and consistent across related content. Avoid unintended "sticky toggling" (where tab state persists inappropriately across different contexts). + +**Sphinx extensions and roles** + +Preference: Use Sphinx-specific [roles](https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html) and [directives](https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html) over `docutils` generic equivalents. Use all their options and capabilities, listing options in alphabetical order. + +**Spacing and formatting** + +Section gaps: Include a non-cumulative two-line gap (two blank lines) after code samples, lists, tables, and before headings for visual clarity. + +--- + +**Simplified markup for GitHub** + +Use simplified markup for files that have special meaning on GitHub and need to be rendered there (such as `README.md`, `CONTRIBUTING.rst`, `SECURITY.rst`). For example, don't use `$` prompts in command samples for these files because GitHub doesn't prevent their selection during copying, which can confuse users. + +--- + +**Command prompts and code blocks** + +**DO NOT** use `$` or `#` prompts in code samples except when using the `console` lexer, which makes them non-selectable. Prompts cause problems for users who copy-paste code. + +It is ONLY acceptable to use the `$` prompt when it's non-selectable. The `console` lexer in `.. code-block::` automatically handles this, making the prompt non-selectable during copy operations. + +**Avoid inline comments** in bash code blocks. Use prose before, after, or between code blocks instead: + +Avoid: + +```bash +juju deploy wordpress +juju deploy ntp-master --to 2 # colocates with wordpress +``` + +Preferred: + +``` +Deploy wordpress, then colocate ntp-master with it: +``` + +```bash +juju deploy wordpress +juju deploy ntp-master --to 2 +``` + +**Code block length**: Limit code blocks to approximately 40 lines. Longer blocks are rarely read; consider breaking them up or offering as downloadable files. + +**Separate input and output**: Don't combine commands and their output in one block. Separate them with explanatory text: + +Avoid: + +``` +juju status +environment: gce3 +machines: +... +``` + +Preferred: + +``` +Check the current state: +``` + +```bash +juju status +``` + +``` +This returns the current state of each unit: +``` + +``` +environment: gce3 +machines: +... +``` + +**Placeholders** + +Use uppercase within angle brackets for placeholders: ``, `` + +For longer code blocks, consider defining placeholders as environment variables: + +```bash +CHANNEL=1.30/stable +``` + +Then use them in commands: + +```bash +juju download easyrsa --channel=$CHANNEL +juju download kubernetes-worker --channel=$CHANNEL +``` + +This approach: +- Separates user-supplied data from commands +- Enables blocks to be copied without modification +- Reduces the chance of user errors + +**UI interaction guidance** + +Don't use UI elements as verbs or nouns in prose. Link them to actions: + +- Good: "Click **Save** to save your settings" +- Avoid: "**Save** your settings" (using button text as verb) + +**Interaction verbs**: +- Use `Click` for buttons (or `Tap` for primarily mobile products) +- Use `Select` for dropdowns, multiple options, or menu navigation +- Use `Press` for keyboard shortcuts and keys (NOT `Click`) + +Examples: +- Click **Settings** to open user settings +- Select the machines you want to register, then click **Save** +- Press `Ctrl + C` to copy +- Press the `Enter` key to continue +- Select **Preferences > Languages > English** (using `>` for navigation) + +**Formatting**: +- Bold UI elements: **Save**, **File**, **Settings** +- Use `>` for menu navigation: **File > New > Document** +- Italics for quoted UI text: Click the link in _"You can register new computers..."_ + +**Checkboxes**: Use `Select`/`Clear` or `Check`/`Uncheck` (consistent pairs): +- Select the **Enable firewall** checkbox +- Clear the **Add bookmark** checkbox + +**Icons vs buttons**: Minimize use of "icon" and "button" terminology unless needed for clarity. When using images, provide alt text or write the name directly after. + +**Configuration examples** + +Always include caption when known. + +Indentation: Use commonly recognized formatting: +- YAML files: 2-space indentation +- JSON files: 4-space indentation + +**Multi-line shell commands** + +Use backslash continuation or explicit line breaks: + +--- + +## Cross-references and links + +**Internal cross-references** + +Prefer `:ref:`/`{ref}` with semantic anchor labels. +Use `:doc:`/`{doc}` only when no target exists and no target can be added, +such as pages intentionally designed without anchors (for example, index or release notes). + +**First mention pattern** + +Link important terms only at first mention on a page. Avoid excessive linking. + +**Reference label convention** + +Use the following pattern for anchor labels: `.. _{prefix}_{descriptive_name}:`. +Prefix indicates the section type (ref/how/exp/tut). + +--- + +**Command names** + +In example blocks, use exact subcommand syntax: + +``` +project-tool launch +project-tool connect +build-tool build +``` + +When inline, shorter references are acceptable: the `launch` command. + +**Command line terminology** + +Convention: Use [POSIX utility conventions](https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html) when discussing command-line syntax, options, arguments, and other CLI elements. + +--- + +## Documentation quality principles + +Use the following in addition to general Diátaxis guidelines: + +**Clarity** + +- State assumptions explicitly +- Define prerequisites clearly +- Avoid jargon without explanation +- Use consistent terminology + +**Usability** + +- Focus on actionable information +- Use direct imperatives for instructions +- Break complex tasks into clear steps +- Provide working examples + +**Precision** + +- Avoid ambiguous language +- Use exact commands and syntax +- Specify versions when relevant +- Maintain consistent structure diff --git a/.github/skills/documentation-verify/SKILL.md b/.github/skills/documentation-verify/SKILL.md new file mode 100644 index 000000000..18ee42434 --- /dev/null +++ b/.github/skills/documentation-verify/SKILL.md @@ -0,0 +1,80 @@ +--- +name: documentation-verify +description: "Verifies documentation accuracy by cross-referencing claims, CLI commands, API signatures, and configuration against source code. Use when validating documentation correctness or checking code-docs consistency. Flags unsupported or outdated claims." +--- + +# Documentation Accuracy Verification + +## Scope + +Accuracy verification only: cross-reference documentation claims, commands, API names, and configuration keys against source code in the same repository. Flag anything that cannot be verified. + +## Inputs + +- Changed documentation files (from `git diff`). If `git diff` is unavailable or empty, use files explicitly provided for review; otherwise, treat all documentation files under `docs/` as changed. +- Full codebase. +- Test files, configuration schemas. +- Documentation structure. + +--- + +## Workflow + +Follow this three-stage process to verify documentation accuracy: + +### Stage 1: Discovery Scan + +**Objective**: Identify documentation claims and form initial hypotheses. + +1. Run `git diff` to list changed documentation files. +2. Categorize changes into claim types (behaviour, CLI, API, config, examples, error messages, etc.). +3. Form initial hypotheses: Supported, Unsupported, Speculative, Ambiguous, or Outdated. + +**For detailed categorization and hypothesis formation procedures**, see `references/verification_procedures.md` → Discovery Scan. + +--- + +### Stage 2: Verification Pass + +**Objective**: Verify every hypothesis with code evidence. Code is the source of truth. + +**CRITICAL**: Complete verification before reporting any claims. + +1. Use **at least two search strategies** per claim (direct search, entrypoint tracing, test evidence, schema/validation search). +2. Apply claim-type-specific verification checklists (behaviour, CLI, API, config, examples, errors, terminology). +3. Document evidence for each finding (file paths, line numbers, search commands). +4. Reclassify hypotheses based on verification results. +5. Apply false-positive prevention rules. +6. Cross-check documentation coverage. + +**For comprehensive verification checklists and classification rules**, see `references/verification_procedures.md` → Verification Pass. + +--- + +### Stage 3: Report Generation + +**Objective**: Present verified findings with evidence and conservative recommendations. + +1. Group findings by final classification (unsupported, outdated, incorrect, imprecise, speculative, inconclusive, no issues). +2. Format each finding using the standard template (doc claim, verification checklist, code evidence, assessment, recommended action). +3. Provide conservative change suggestions aligned with code reality. +4. Link to specific code artifacts (files, functions, structs, line numbers). +5. Integrate with other analysis findings (e.g., Diataxis compliance). + +**For report formatting template and change suggestion guidelines**, see `references/report_format.md`. + +--- + +## Constraints + +- Complete the verification pass (Stage 2) before reporting any claims. +- Provide code evidence for all documentation consistency claims. +- Code is the source of truth: flag documentation that contradicts code behaviour, not vice versa. +- Do not claim documentation is "unsupported by code" without verification evidence (at least two search strategies with explicit code search and no-match confirmation). +- Do not report false positives. +- Do not prefer "unsupported" when docs are vague or imprecise; use accurate classifications. +- Do not recommend changing code to match docs as the primary action; only documentation should be adjusted to match code reality. + +## Output + +Only verified findings with evidence make it to the final report. Do not include intermediate hypotheses or reasoning. diff --git a/.github/skills/documentation-verify/references/report_format.md b/.github/skills/documentation-verify/references/report_format.md new file mode 100644 index 000000000..de6a96a3b --- /dev/null +++ b/.github/skills/documentation-verify/references/report_format.md @@ -0,0 +1,144 @@ +# Documentation Verification Report Format + +This reference defines how to structure and format the final verification report. + +--- + +## Report Structure + +### 1. Group Findings by Classification + +Organize findings into these categories: + +- **Confirmed unsupported**: Docs describe behaviour or options not present in code +- **Docs outdated**: Code exists but with different values or behaviour than documented +- **Docs incorrect**: Code contradicts doc claim +- **Docs imprecise**: Code behaviour more nuanced than docs suggest +- **Docs speculative**: Describes intended future behaviour (not yet implemented) +- **Inconclusive**: Cannot verify (requires human review) +- **No issues found**: All doc claims backed by code (state this explicitly) + +--- + +## Finding Format Template + +For each verified finding, use this structure: + +````markdown +**Doc Claim**: [File path:line] "[Quoted claim from docs]" + +**Verification Checklist**: +- [ ] Search strategies used: [list at least two strategies] +- [ ] Code location(s) checked: [file paths] +- [ ] Test evidence: [test file/function or "Not found"] +- [ ] Schema/validation: [struct/parser location or "Not found"] + +**Code Evidence**: +- **Expected**: [What docs claim should exist] +- **Found**: [What code actually shows, with file:line references] +- **Assessment**: [Supported | Unsupported | Outdated | Incorrect | Imprecise | Inconclusive] + +**Issue**: [Classification from list above] +- [Brief description of mismatch] + +**Recommended Action**: +- [File path]: [Specific minimal edit to restore correctness] +- Rationale: [Why this edit aligns docs with code] +- Alternative: [If docs are ahead of code, suggest opening an issue or reverting speculative claim] +```` + +--- + +## Conservative Change Suggestions + +When recommending actions, follow these guidelines: + +### For "Docs Outdated" +Update specific values or behaviour descriptions to match current code. + +**Example**: +``` +Recommended Action: +- docs/config.md:42: Change default value from `timeout: 30s` to `timeout: 60s` +- Rationale: Code shows default in config/defaults.go:15 is time.Duration(60 * time.Second) +``` + +### For "Docs Incorrect" +Correct the claim with precise wording from code. + +**Example**: +``` +Recommended Action: +- docs/api.md:78: Change "returns HTTP 200 on success" to "returns HTTP 201 on success" +- Rationale: Handler at handlers/create.go:45 returns http.StatusCreated (201) +``` + +### For "Docs Imprecise" +Add qualifiers, conditions, or edge-case notes. + +**Example**: +``` +Recommended Action: +- docs/cli.md:120: Add note "Only available when --experimental flag is enabled" +- Rationale: Feature gated behind ExperimentalMode check at cmd/feature.go:33 +``` + +### For "Confirmed Unsupported" +Revert or remove unsupported claim. If claim represents intended behaviour, change to future tense and add note. + +**Example**: +``` +Recommended Action: +- docs/features.md:89: Remove claim about auto-retry functionality +- Alternative: If feature is planned, change to "Auto-retry functionality is planned for future release" +``` + +### For "Docs Speculative" +Mark as future or intended, not current behaviour. + +**Example**: +``` +Recommended Action: +- docs/roadmap.md:15: Change "The system supports" to "The system will support (planned)" +- Rationale: No implementation found; appears to be planned feature +``` + +### For "Inconclusive" +Provide specific human review action. + +**Example**: +``` +Recommended Action: +- Request human review: Cannot locate implementation for documented retry logic +- Check: Is this implemented via external library? Search plugin system +``` + +--- + +## Linking to Code Artifacts + +Always reference: +- Specific files with line numbers (e.g., `auth/handler.go:145-167`) +- Function or struct names (e.g., `parseConfig()`, `ServerConfig struct`) +- Test files that exercise the behaviour (e.g., `auth_test.go:TestLoginSuccess`) +- Search commands used (e.g., `grep -r "timeout" config/`) + +--- + +## Report Integration + +- Prioritize code backing findings appropriately (blocking issues for incorrect or unsupported claims) +- Cross-reference with Diataxis compliance findings to identify if inaccuracies stem from category misalignment +- Prepare evidence-based recommendations with code references + +--- + +## Final Output Rule + +**Only verified findings with evidence make it to the final report.** + +Do NOT include: +- Intermediate hypotheses +- Reasoning process +- Unverified claims +- Speculation without code backing diff --git a/.github/skills/documentation-verify/references/verification_procedures.md b/.github/skills/documentation-verify/references/verification_procedures.md new file mode 100644 index 000000000..8f742844a --- /dev/null +++ b/.github/skills/documentation-verify/references/verification_procedures.md @@ -0,0 +1,178 @@ +# Documentation Verification Procedures + +This reference provides comprehensive procedures for verifying documentation accuracy against source code. + +--- + +## Table of Contents + +1. [Discovery Scan Procedures](#discovery-scan-procedures) +2. [Verification Pass Checklist](#verification-pass-checklist) +3. [Evidence Documentation](#evidence-documentation) +4. [Classification Rules](#classification-rules) +5. [False-Positive Prevention](#false-positive-prevention) + +--- + +## Discovery Scan Procedures + +### Step 1: Identify Changed Documentation Claims + +Run `git diff` to list changed documentation files. For each changed file, categorize changes into: + +**Claim Categories:** + +- **Behaviour claims**: Assertions about how the project, commands, or features behave. +- **Options/defaults/constraints**: Documented flags, configuration keys, default values, allowed values, validation rules. +- **Examples**: Code samples, command invocations, YAML/JSON configurations, expected outputs. +- **CLI surface**: Command names, subcommands, flags, help text, output formats. +- **API surface**: REST endpoints, request/response formats, client method signatures, schemas. +- **Error messages**: Documented error text, exit codes, diagnostic output. +- **Terminology/renames/deprecations**: Changed names, deprecated features, migration paths. +- **Interface/component behaviour**: Connection types, interaction mechanics, isolation rules. + +### Step 2: Form Initial Hypotheses + +For each claim, assign a preliminary classification: + +- **Supported**: Claim appears to match code structure (preliminary). +- **Unsupported**: Claim appears inconsistent with code (preliminary). +- **Speculative**: Claim describes future or intended behaviour without code backing. +- **Ambiguous**: Unclear whether claim matches code (needs deeper investigation). +- **Outdated**: Claim may describe previous code behaviour. + +--- + +## Verification Pass Checklist + +**CRITICAL**: Code is the source of truth. Complete verification before reporting any claims. + +### Search Strategies + +Use **at least two distinct search strategies** per claim: + +1. **Direct identifier search**: Search for exact names, keys, constants, struct fields using `grep -r`, `git grep`, ripgrep, or language-specific tools. +2. **Entrypoint tracing**: Follow from CLI, config, or API entrypoint to implementation. +3. **Test evidence search**: Locate tests that exercise the claimed behaviour in test files. +4. **Schema/validation search**: Find parsers, validators, schema generators, struct tags, validation functions. + +### Verification Checklist by Claim Type + +#### Behaviour Claims + +- [ ] Locate implementation code path +- [ ] Verify behaviour matches documented description +- [ ] Check for conditional behaviour (flags, modes, edge cases) +- [ ] Confirm error handling matches docs + +#### Options/Defaults/Constraints + +- [ ] Find struct field or config key definition +- [ ] Extract actual default value from code +- [ ] Find allowed values (enums, validation statements, regex patterns) +- [ ] Verify constraint enforcement + +#### Examples + +- [ ] Confirm example syntax matches actual parser expectations +- [ ] If example shows command output, verify against golden test files or actual execution +- [ ] Confirm field names, indentation, and structure match code expectations +- [ ] Check that referenced flags and options exist in code + +#### CLI Surface + +- [ ] Locate command definition in the CLI framework location +- [ ] Verify command name, aliases, subcommands match +- [ ] Check flag definitions (name, shorthand, type, default, help text) +- [ ] Confirm help text matches command definition +- [ ] Verify output formatting (column headers, sorting) + +#### API Surface + +- [ ] Find route definition +- [ ] Verify HTTP method, path, versioning +- [ ] Check request/response struct definitions +- [ ] Confirm client method signature +- [ ] Verify backward compatibility + +#### Error Messages + +- [ ] Search codebase for exact error text or pattern +- [ ] Verify error is returned in documented scenario +- [ ] Check error message format follows style guide + +#### Terminology/Renames/Deprecations + +- [ ] Search for old name to confirm it is truly deprecated or removed +- [ ] Find deprecation markers, aliases, or migration helpers +- [ ] Check changelog, release notes, or version gating logic +- [ ] Verify new name exists and is used consistently + +--- + +## Evidence Documentation + +### For Claims Supported by Code + +Document: +- File path, function or struct, and line range +- Assessment: `Supported (verified at [file:line])` + +### For Claims Not Supported by Code + +Document: +- Searches performed (at least two strategies with specific search terms) +- What was expected versus what was found +- Assessment: `Unsupported (expected [X], found [Y] at [file:line])` + +### For Inconclusive Claims + +Document: +- Search attempts performed +- What evidence is missing or ambiguous +- Assessment: `Inconclusive (needs human review: [specific check])` + +--- + +## Classification Rules + +Based on verification evidence, reclassify hypotheses using this decision matrix: + +| Original Hypothesis | Verification Outcome | Final Classification | +|---------------------|----------------------|----------------------| +| Unsupported | Found matching code | Retract claim (docs are correct) | +| Unsupported | Found code with different default | Docs outdated (needs value update) | +| Unsupported | No code evidence after thorough search | Confirmed unsupported (docs ahead of code) | +| Supported | Code contradicts doc claim | Docs incorrect (needs correction) | +| Ambiguous | Tests confirm behaviour | Supported (test-backed) | +| Ambiguous | Cannot locate relevant code | Inconclusive (flag for human review) | + +--- + +## False-Positive Prevention + +Apply these rules to avoid false positives: + +1. **Do not claim "unsupported" without documented code search evidence** + - Must have at least two strategies with explicit search terms + +2. **Prefer conservative classifications**: + - Prefer "inconclusive" over "unsupported" when code is complex or evidence is indirect + - Prefer "outdated" over "unsupported" when code exists but with different behaviour or values + - Prefer "imprecise" over "incorrect" when docs are vague but not technically wrong + - Retract claim entirely if verification confirms docs are accurate + +3. **Cross-check documentation coverage**: + - Before claiming "unsupported", verify the entity is not documented elsewhere + - Search `docs/` for related terms, alternative phrasings, synonyms + - If claim is supported elsewhere, classify as "present but undiscoverable" instead + +--- + +## Content Completeness Check + +Before finalizing the report, check that all relevant topics are covered, especially for reference documentation: + +- **CLI**: Verify command-line interface changes are reflected in CLI reference documentation +- **Configuration**: Check that new configuration options are documented in the reference section +- **APIs and schemas**: Validate that API and schema modifications are properly documented From eb052a9825816ba8aa07607f12e7dd863376728a Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:51:52 +0200 Subject: [PATCH 101/201] use python3 to run manage script instead of uv --- haproxy-route-policy/snap/hooks/configure | 5 ----- haproxy-route-policy/snap/scripts/bin/manage | 2 +- haproxy-route-policy/snap/snapcraft.yaml | 3 +-- 3 files changed, 2 insertions(+), 8 deletions(-) diff --git a/haproxy-route-policy/snap/hooks/configure b/haproxy-route-policy/snap/hooks/configure index ccc0f6441..75634aa49 100644 --- a/haproxy-route-policy/snap/hooks/configure +++ b/haproxy-route-policy/snap/hooks/configure @@ -48,10 +48,5 @@ export DJANGO_DATABASE_USER DJANGO_DATABASE_NAME="$(snapctl get database-name)" export DJANGO_DATABASE_NAME -if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJANGO_DATABASE_USER" ] || [ -z "$DJANGO_DATABASE_PASSWORD" ] || [ -z "$DJANGO_DATABASE_NAME" ]; then - >&2 echo "One or more database configuration values are missing. Please ensure database-host, database-port, database-user, database-password, and database-name are all set." - return 1 -fi - snapctl stop "$SNAP_INSTANCE_NAME" snapctl start "$SNAP_INSTANCE_NAME" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index f051b38c8..6f6c9fea8 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -32,4 +32,4 @@ if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJ return 1 fi -exec $SNAP/bin/uv run $SNAP/app/manage.py "$@" --settings=haproxy_route_policy.settings +exec $SNAP/app/venv/bin/python3 $SNAP/app/manage.py "$@" diff --git a/haproxy-route-policy/snap/snapcraft.yaml b/haproxy-route-policy/snap/snapcraft.yaml index 813eac1fb..7c63761f5 100644 --- a/haproxy-route-policy/snap/snapcraft.yaml +++ b/haproxy-route-policy/snap/snapcraft.yaml @@ -23,10 +23,9 @@ parts: source: . build-snaps: - astral-uv - stage-snaps: - - astral-uv override-build: | # Also copy the source code to the install directory for the manage.py script + UV_PROJECT_ENVIRONMENT=venv uv sync cp -r . $SNAPCRAFT_PART_INSTALL/app chown -R 584792:584792 $SNAPCRAFT_PART_INSTALL/app craftctl default From b976869f7d4e433591615fa9ff62d17876c4bd7c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 102/201] bump snap version --- haproxy-route-policy/snap/snapcraft.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/snap/snapcraft.yaml b/haproxy-route-policy/snap/snapcraft.yaml index 7c63761f5..c070370fd 100644 --- a/haproxy-route-policy/snap/snapcraft.yaml +++ b/haproxy-route-policy/snap/snapcraft.yaml @@ -3,7 +3,7 @@ name: haproxy-route-policy base: core24 -version: "0.1" +version: "0.2" license: Apache-2.0 summary: HAProxy Route Policy API description: | From 7a4b735142e3e937c92a39a935df7d6edd309d6c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 103/201] add spread test workflow (#420) --- .github/workflows/docs_spread.yaml | 18 +++++++++++++++ .gitignore | 2 ++ docs/tutorial/getting-started.md | 14 +++++++++++- spread.yaml | 35 ++++++++++++++++++++++++++++++ 4 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/docs_spread.yaml create mode 100644 spread.yaml diff --git a/.github/workflows/docs_spread.yaml b/.github/workflows/docs_spread.yaml new file mode 100644 index 000000000..323130614 --- /dev/null +++ b/.github/workflows/docs_spread.yaml @@ -0,0 +1,18 @@ +name: Automated spread testing + +on: + workflow_dispatch: + pull_request: + paths: + - 'docs/tutorial/getting-started.md' + schedule: + - cron: 0 9 * * 1 # At 09:00 UTC on Monday, aligned with the weekly haproxy stable release. + +jobs: + docs-checks: + uses: canonical/operator-workflows/.github/workflows/docs_spread.yaml@main + secrets: inherit + with: + input-file: docs/tutorial/getting-started.md + output-dir: tests/spread/tutorial + spread-job: github-ci:ubuntu-24.04:tests/ diff --git a/.gitignore b/.gitignore index c08ae9c7e..6326091fe 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,5 @@ terraform/**/*.tfstate* haproxy-route-policy/db.sqlite3 haproxy-route-policy/.python-version +tests/spread/tutorial/ +**/.spread-reuse* diff --git a/docs/tutorial/getting-started.md b/docs/tutorial/getting-started.md index 7cdeb1f64..9bdd156fa 100644 --- a/docs/tutorial/getting-started.md +++ b/docs/tutorial/getting-started.md @@ -9,6 +9,8 @@ In this tutorial we'll deploy the HAProxy charm to provide ingress to a backend You will need a working station, e.g., a laptop, with AMD64 architecture. Your working station should have at least 4 CPU cores, 8 GB of RAM, and 50 GB of disk space. + + ````{tip} You can use Multipass to create an isolated environment by running: ``` @@ -16,6 +18,8 @@ multipass launch 24.04 --name charm-tutorial-vm --cpus 4 --memory 8G --disk 50G ``` ```` + + This tutorial requires the following software to be installed on your working station (either locally or in the Multipass VM): @@ -69,6 +73,10 @@ juju integrate haproxy:certificates self-signed-certificates Once all the application has settled into an "Idle" state, we can verify by sending a request to the HAProxy's IP address: + + ```sh HAPROXY_IP=$(juju status --format json | jq -r '.applications.haproxy.units."haproxy/0"."public-address"') curl $HAPROXY_IP @@ -100,6 +108,10 @@ juju integrate pollen haproxy:haproxy-route Let's check that the request has been properly proxied to the backend service using the `pollinate` script: + + ```sh HAPROXY_IP=$(juju status --format json | jq -r '.applications.haproxy.units."haproxy/0"."public-address"') echo "$HAPROXY_IP pollen.internal" | sudo tee /etc/hosts @@ -125,7 +137,7 @@ Well done! You've successfully completed the HAProxy tutorial. To remove the model environment you created, use the following command: ``` -juju destroy-model haproxy-tutorial +juju destroy-model haproxy-tutorial --no-prompt ``` ## Next steps diff --git a/spread.yaml b/spread.yaml new file mode 100644 index 000000000..b15cc34e8 --- /dev/null +++ b/spread.yaml @@ -0,0 +1,35 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +project: haproxy-operator-tests + +backends: + github-ci: + type: adhoc + + allocate: | + echo "Allocating ad-hoc $SPREAD_SYSTEM" + if [ -z "${GITHUB_RUN_ID:-}" ]; then + FATAL "this back-end only works inside GitHub CI" + exit 1 + fi + echo 'ubuntu ALL=(ALL) NOPASSWD:ALL' | sudo tee /etc/sudoers.d/99-spread-users + ADDRESS localhost:22 + discard: | + echo "Discarding ad-hoc $SPREAD_SYSTEM" + systems: + # username and password are required because docs-spread.yaml creates a new user (ubuntu:ubuntu) + # Before tests are ran. + - ubuntu-24.04: + username: ubuntu + password: ubuntu + workers: 1 + +suites: + tests/spread/: + summary: Automated spread testing + systems: + - ubuntu-24.04 + +path: /home/spread/proj +kill-timeout: 1h From 3e84b1ff9fe9cdd8f97b41d189bb271e22574ca2 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 104/201] fix: add proper type to hosts in haproxy libraries (#383) --- docs/changelog.md | 4 ++++ docs/release-notes/artifacts/pr0383.yaml | 13 +++++++++++++ .../lib/charms/haproxy/v1/haproxy_route_tcp.py | 10 +++++----- .../lib/charms/haproxy/v2/haproxy_route.py | 8 ++++---- 4 files changed, 26 insertions(+), 9 deletions(-) create mode 100644 docs/release-notes/artifacts/pr0383.yaml diff --git a/docs/changelog.md b/docs/changelog.md index 6d2d081ef..658e6192b 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -8,6 +8,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). Each revision is versioned by the date of the revision. +## 2026-04-01 + +- Fixed hosts validation in haproxy-route and haproxy-route-tcp relation libraries. + ## 2026-01-19 - Fixed issues with the DDoS protection configurator charm found in staging. diff --git a/docs/release-notes/artifacts/pr0383.yaml b/docs/release-notes/artifacts/pr0383.yaml new file mode 100644 index 000000000..a5a01d8d3 --- /dev/null +++ b/docs/release-notes/artifacts/pr0383.yaml @@ -0,0 +1,13 @@ +version_schema: 2 +changes: + - title: Fix hosts validation in haproxy-route-tcp, and haproxy-route relation libraries + author: skatsaounis + type: bugfix + description: > + Fixed hosts validation in the haproxy-route-tcp and haproxy-route relation libraries, ensuring + that only valid IP addresses are accepted. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/383 + visibility: public + highlight: false diff --git a/haproxy-operator/lib/charms/haproxy/v1/haproxy_route_tcp.py b/haproxy-operator/lib/charms/haproxy/v1/haproxy_route_tcp.py index 48c2f8a6b..2eff32178 100644 --- a/haproxy-operator/lib/charms/haproxy/v1/haproxy_route_tcp.py +++ b/haproxy-operator/lib/charms/haproxy/v1/haproxy_route_tcp.py @@ -186,7 +186,7 @@ def _on_haproxy_route_data_available(self, event: EventBase) -> None: # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 1 +LIBPATCH = 2 logger = logging.getLogger(__name__) HAPROXY_ROUTE_TCP_RELATION_NAME = "haproxy-route-tcp" @@ -976,7 +976,7 @@ def __init__( *, port: Optional[int] = None, backend_port: Optional[int] = None, - hosts: Optional[list[str]] = None, + hosts: Optional[list[IPvAnyAddress]] = None, sni: Optional[str] = None, check_interval: Optional[int] = None, check_rise: Optional[int] = None, @@ -1102,7 +1102,7 @@ def provide_haproxy_route_tcp_requirements( *, port: int, backend_port: Optional[int] = None, - hosts: Optional[list[str]] = None, + hosts: Optional[list[IPvAnyAddress]] = None, sni: Optional[str] = None, check_interval: Optional[int] = None, check_rise: Optional[int] = None, @@ -1200,7 +1200,7 @@ def _generate_application_data( *, port: Optional[int] = None, backend_port: Optional[int] = None, - hosts: Optional[list[str]] = None, + hosts: Optional[list[IPvAnyAddress]] = None, sni: Optional[str] = None, check_interval: Optional[int] = None, check_rise: Optional[int] = None, @@ -1558,7 +1558,7 @@ def configure_backend_port(self, backend_port: int) -> "Self": self._application_data["backend_port"] = backend_port return self - def configure_hosts(self, hosts: Optional[list[int]] = None) -> "Self": + def configure_hosts(self, hosts: Optional[list[IPvAnyAddress]] = None) -> "Self": """Set backend hosts. Args: diff --git a/haproxy-operator/lib/charms/haproxy/v2/haproxy_route.py b/haproxy-operator/lib/charms/haproxy/v2/haproxy_route.py index e3ea4779a..795c815ad 100644 --- a/haproxy-operator/lib/charms/haproxy/v2/haproxy_route.py +++ b/haproxy-operator/lib/charms/haproxy/v2/haproxy_route.py @@ -154,7 +154,7 @@ def _on_haproxy_route_data_available(self, event: EventBase) -> None: # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 0 +LIBPATCH = 1 logger = logging.getLogger(__name__) HAPROXY_ROUTE_RELATION_NAME = "haproxy-route" @@ -1005,7 +1005,7 @@ def __init__( service: Optional[str] = None, ports: Optional[list[int]] = None, protocol: Literal["http", "https"] = "http", - hosts: Optional[list[str]] = None, + hosts: Optional[list[IPvAnyAddress]] = None, paths: Optional[list[str]] = None, hostname: Optional[str] = None, additional_hostnames: Optional[list[str]] = None, @@ -1144,7 +1144,7 @@ def provide_haproxy_route_requirements( service: str, ports: list[int], protocol: Literal["http", "https"] = "http", - hosts: Optional[list[str]] = None, + hosts: Optional[list[IPvAnyAddress]] = None, paths: Optional[list[str]] = None, hostname: Optional[str] = None, additional_hostnames: Optional[list[str]] = None, @@ -1258,7 +1258,7 @@ def _generate_application_data( # noqa: C901 service: Optional[str] = None, ports: Optional[list[int]] = None, protocol: Literal["http", "https"] = "http", - hosts: Optional[list[str]] = None, + hosts: Optional[list[IPvAnyAddress]] = None, paths: Optional[list[str]] = None, hostname: Optional[str] = None, additional_hostnames: Optional[list[str]] = None, From 16071b850ef512a69e0fd5e029466646ce8fd1e3 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 105/201] update docs for postgresql container --- haproxy-route-policy/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/README.md b/haproxy-route-policy/README.md index 539f4ab85..1fcb727a9 100644 --- a/haproxy-route-policy/README.md +++ b/haproxy-route-policy/README.md @@ -3,7 +3,7 @@ Start a PostgreSQL database: ``` -docker run -d --name postgres -p 127.0.0.1:5432:5432 -e POSTGRES_PASSWORD=postgres -e POSTGRES_USERNAME=postgres postgres:latest +docker run -d --name postgres -p 127.0.0.1:5432:5432 -e POSTGRES_PASSWORD=postgres -e POSTGRES_USER=postgres postgres:latest ``` Basic snap configurations: From 36beabfcb9f170fdd178f19323495e2acb405bf0 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 106/201] Update haproxy-route-policy/snap/hooks/configure MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Ali UĞUR <39213991+alithethird@users.noreply.github.com> --- haproxy-route-policy/snap/hooks/configure | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/snap/hooks/configure b/haproxy-route-policy/snap/hooks/configure index 75634aa49..34cea617d 100644 --- a/haproxy-route-policy/snap/hooks/configure +++ b/haproxy-route-policy/snap/hooks/configure @@ -30,7 +30,7 @@ case "$DJANGO_LOG_LEVEL" in "ERROR") ;; "CRITICAL") ;; *) - >&2 echo "'$DJANGO_LOG_LEVEL is not a supported value for debug. Possible values are debug, info, warning, error, critical" + >&2 echo "'$DJANGO_LOG_LEVEL is not a supported value for log-level. Possible values are debug, info, warning, error, critical" return 1 ;; esac From c1f4ca37d9b5924df753a85363e7256f80c01333 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 107/201] address comments --- haproxy-route-policy/haproxy_route_policy/settings.py | 2 +- haproxy-route-policy/snap/hooks/install | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 05b70ee77..2343aa328 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -78,7 +78,7 @@ DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql", - "PASSWORD": os.getenv("DJANGO_DATABASE_PASSWORD", ""), + "PASSWORD": os.getenv("DJANGO_DATABASE_PASSWORD", "postgres"), "HOST": os.getenv("DJANGO_DATABASE_HOST", "localhost"), "PORT": os.getenv("DJANGO_DATABASE_PORT", 5432), "USER": os.getenv("DJANGO_DATABASE_USER", "postgres"), diff --git a/haproxy-route-policy/snap/hooks/install b/haproxy-route-policy/snap/hooks/install index 60cc60e5a..5b80a85c0 100755 --- a/haproxy-route-policy/snap/hooks/install +++ b/haproxy-route-policy/snap/hooks/install @@ -3,7 +3,7 @@ # Copyright 2026 Canonical Ltd. # See LICENSE file for licensing details. -set -xe +set -e # set default configuration values snapctl set debug='false' From 3fb29552a1f58907e16c118b0de47414bd6d811e Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 108/201] add tests for snap --- .github/workflows/test.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fcc1406d7..b549ff17c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -36,6 +36,15 @@ jobs: uses: snapcore/action-build@v1 with: path: haproxy-route-policy + - run: | + sudo snap install --dangerous ${{ steps.snapcraft.outputs.snap }} + snap_services_output="$(sudo snap services haproxy-route-policy.gunicorn)" + echo "$snap_services_output" + current_status="$(echo "$snap_services_output" | awk '$1=="haproxy-route-policy.gunicorn" {print $3}')" + if [ "$current_status" != "active" ]; then + echo "Expected haproxy-route-policy.gunicorn to be active, got: ${current_status:-}" + exit 1 + fi - name: Upload Snap Artifact uses: actions/upload-artifact@v5 with: From 3f99f4310446b8dbee08bd765c5158a27d3adc87 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 109/201] shell script lint fix --- haproxy-route-policy/snap/scripts/bin/manage | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index 6f6c9fea8..6901bead8 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -32,4 +32,4 @@ if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJ return 1 fi -exec $SNAP/app/venv/bin/python3 $SNAP/app/manage.py "$@" +exec "$SNAP/app/venv/bin/python3" "$SNAP/app/manage.py" "$@" From 65251819f65d3d46eb298cbbf9ccf2375059af5f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:02 +0200 Subject: [PATCH 110/201] update manage script --- haproxy-route-policy/snap/scripts/bin/manage | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index 6901bead8..32258cad8 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -32,4 +32,4 @@ if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJ return 1 fi -exec "$SNAP/app/venv/bin/python3" "$SNAP/app/manage.py" "$@" +exec "$SNAP/app/venv/bin/python3" $SNAP/app/manage.py "$@" From f9141dc216df21d7a7a8c4fbae75d1be1d57a5e1 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 20:52:04 +0200 Subject: [PATCH 111/201] revert script change --- haproxy-route-policy/snap/scripts/bin/manage | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index 32258cad8..6f6c9fea8 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -32,4 +32,4 @@ if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJ return 1 fi -exec "$SNAP/app/venv/bin/python3" $SNAP/app/manage.py "$@" +exec $SNAP/app/venv/bin/python3 $SNAP/app/manage.py "$@" From bcfff538180024974d2491bca35d5c8219c96847 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 3 Apr 2026 10:10:33 +0200 Subject: [PATCH 112/201] fix shellcheck errors --- .github/workflows/test.yaml | 1 + haproxy-route-policy/snap/scripts/bin/manage | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b549ff17c..112da69fa 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -38,6 +38,7 @@ jobs: path: haproxy-route-policy - run: | sudo snap install --dangerous ${{ steps.snapcraft.outputs.snap }} + sleep 10 snap_services_output="$(sudo snap services haproxy-route-policy.gunicorn)" echo "$snap_services_output" current_status="$(echo "$snap_services_output" | awk '$1=="haproxy-route-policy.gunicorn" {print $3}')" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index 6f6c9fea8..6901bead8 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -32,4 +32,4 @@ if [ -z "$DJANGO_DATABASE_HOST" ] || [ -z "$DJANGO_DATABASE_PORT" ] || [ -z "$DJ return 1 fi -exec $SNAP/app/venv/bin/python3 $SNAP/app/manage.py "$@" +exec "$SNAP/app/venv/bin/python3" "$SNAP/app/manage.py" "$@" From 1619a24e74efba2f76522c47b8c18c73cc869e42 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 3 Apr 2026 10:13:24 +0200 Subject: [PATCH 113/201] Fix drifts from main --- .../policy/tests/test_models.py | 212 +++++++++++++++++- .../policy/tests/test_views.py | 73 ++++++ haproxy-route-policy/policy/views.py | 8 + 3 files changed, 284 insertions(+), 9 deletions(-) diff --git a/haproxy-route-policy/policy/tests/test_models.py b/haproxy-route-policy/policy/tests/test_models.py index 5ceccbb28..16ec291f5 100644 --- a/haproxy-route-policy/policy/tests/test_models.py +++ b/haproxy-route-policy/policy/tests/test_models.py @@ -45,7 +45,7 @@ def test_create_with_all_fields(self): class TestRuleModel(TestCase): - """Tests for Rule model creation, serialisation, and validation.""" + """Tests for Rule model creation, serialization, and validation.""" def test_create_rule_set_default_priority_and_comment(self): """Test that default values are set correctly.""" @@ -127,11 +127,205 @@ def test_valid_rule_data_accepted(self): "parameters": {"hostnames": ["example.com"], "paths": []}, "action": db_models.RULE_ACTION_ALLOW, }, - "action": db_models.RULE_ACTION_DENY, - "priority": 3, - "comment": "Block specific routes", - } - ) - self.assertTrue(serializer.is_valid(), serializer.errors) - rule = serializer.save() - self.assertIsNotNone(rule.id) + ), + ( + "both valid hostnames and paths", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": { + "hostnames": ["example.com", "app.example.com"], + "paths": ["/api", "/v1/health"], + }, + "action": db_models.RULE_ACTION_DENY, + "priority": 3, + "comment": "Block specific routes", + }, + ), + ] + for label, data in valid_cases: + with self.subTest(label=label): + serializer = serializers.RuleSerializer(data=data) + self.assertTrue(serializer.is_valid(), serializer.errors) + + def test_invalid_rule_data_rejected(self): + """Invalid rule data should fail serializer validation.""" + invalid_cases = [ + ( + "invalid kind", + { + "kind": "invalid_kind", + "parameters": 1, + "action": db_models.RULE_ACTION_ALLOW, + }, + {"field": "kind"}, + ), + ( + "invalid action", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": {"hostnames": ["example.com"], "paths": []}, + "action": "invalid_action", + }, + {"field": "action"}, + ), + ( + "parameters must be dict — string given", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": "not-a-dict", + "action": db_models.RULE_ACTION_DENY, + }, + { + "field": "non_field_errors", + "message": "parameters field must be a JSON object", + }, + ), + ( + "parameters must be dict — list given", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": ["not", "a", "dict"], + "action": db_models.RULE_ACTION_DENY, + }, + { + "field": "non_field_errors", + "message": "parameters field must be a JSON object", + }, + ), + ( + "parameters must be dict — int given", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": 42, + "action": db_models.RULE_ACTION_DENY, + }, + { + "field": "non_field_errors", + "message": "parameters field must be a JSON object", + }, + ), + ( + "invalid hostname", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": { + "hostnames": ["not a valid hostname!!!"], + "paths": [], + }, + "action": db_models.RULE_ACTION_DENY, + }, + {"message": "Invalid hostname"}, + ), + ( + "multiple invalid hostnames", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": { + "hostnames": ["valid.com", "bad host", "also bad!"], + "paths": [], + }, + "action": db_models.RULE_ACTION_DENY, + }, + {"message_contains": ["bad host", "also bad!"]}, + ), + ( + "path without leading slash", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": {"hostnames": ["example.com"], "paths": ["api/v1"]}, + "action": db_models.RULE_ACTION_DENY, + }, + {"message": "Invalid path"}, + ), + ( + "non-string path", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": {"hostnames": ["example.com"], "paths": [123]}, + "action": db_models.RULE_ACTION_DENY, + }, + {}, + ), + ( + "multiple invalid paths", + { + "kind": db_models.RULE_KIND_HOSTNAME_AND_PATH_MATCH, + "parameters": {"hostnames": [], "paths": ["no-slash", "also-bad"]}, + "action": db_models.RULE_ACTION_DENY, + }, + {"message_contains": ["no-slash", "also-bad"]}, + ), + ] + for label, data, checks in invalid_cases: + with self.subTest(label=label): + serializer = serializers.RuleSerializer(data=data) + self.assertFalse(serializer.is_valid()) + errors_str = str(serializer.errors) + if "field" in checks: + self.assertIn(checks["field"], serializer.errors) + if "message" in checks: + self.assertIn(checks["message"], errors_str) + if "message_contains" in checks: + for fragment in checks["message_contains"]: + self.assertIn(fragment, errors_str) + + +class TestValidatePort(TestCase): + """Tests for the validate_port validator.""" + + def test_valid_ports(self): + """Valid TCP port numbers should not raise.""" + valid_ports = [1, 80, 443, 8080, 65535] + for port in valid_ports: + with self.subTest(port=port): + db_models.validate_port(port) + + def test_invalid_ports(self): + """Out-of-range and wrong-type values should raise ValidationError.""" + invalid_ports = [ + (0, "below minimum"), + (-1, "negative"), + (65536, "above maximum"), + (100000, "way above maximum"), + ("443", "string"), + (44.3, "float"), + (None, "None"), + ] + for value, label in invalid_ports: + with self.subTest(value=value, label=label): + with self.assertRaises(ValidationError): + db_models.validate_port(value) + + +class TestValidatePaths(TestCase): + """Tests for the validate_paths validator.""" + + def test_valid_paths(self): + """Valid path lists should not raise.""" + valid_cases = [ + ([], "empty list"), + (["/"], "root path"), + (["/api"], "single path"), + (["/api", "/health", "/status"], "multiple paths"), + (["/api/v1/requests"], "nested path"), + ] + for paths, label in valid_cases: + with self.subTest(paths=paths, label=label): + db_models.validate_paths(paths) + + def test_invalid_paths(self): + """Invalid path values should raise ValidationError.""" + invalid_cases = [ + ("not-a-list", "string instead of list"), + (None, "None"), + (123, "integer"), + (["no-leading-slash"], "missing leading slash"), + (["api/v1"], "relative path"), + ([123], "non-string element"), + ([None], "None element"), + (["/valid", "invalid"], "mixed valid and invalid"), + ] + for value, label in invalid_cases: + with self.subTest(value=value, label=label): + with self.assertRaises(ValidationError): + db_models.validate_paths(value) diff --git a/haproxy-route-policy/policy/tests/test_views.py b/haproxy-route-policy/policy/tests/test_views.py index 9b987a090..f71a499ca 100644 --- a/haproxy-route-policy/policy/tests/test_views.py +++ b/haproxy-route-policy/policy/tests/test_views.py @@ -406,3 +406,76 @@ def test_delete_nonexistent(self): fake_id = uuid.uuid4() response = self.client.delete(f"/api/v1/rules/{fake_id}") self.assertEqual(response.status_code, 204) + + +class TestStatusFilterSanitization(TestCase): + """Tests for status query parameter validation on GET /api/v1/requests.""" + + def setUp(self): + """Set up the API client.""" + self.client = APIClient() + + def test_valid_status_filters(self): + """Valid status values should return 200.""" + valid_statuses = ["pending", "accepted", "rejected"] + for status in valid_statuses: + with self.subTest(status=status): + response = self.client.get(f"/api/v1/requests?status={status}") + self.assertEqual(response.status_code, 200) + + def test_invalid_status_filters(self): + """Invalid status values should return 400.""" + invalid_statuses = [ + "invalid", + "PENDING", + "Accepted", + "unknown", + "' OR 1=1 --", + "", + "pending; DROP TABLE", + ] + for status in invalid_statuses: + with self.subTest(status=status): + response = self.client.get(f"/api/v1/requests?status={status}") + self.assertEqual(response.status_code, 400) + self.assertIn("error", response.json()) + + +class TestPkValidation(TestCase): + """Tests for pk (UUID) validation on GET/DELETE /api/v1/requests/.""" + + def setUp(self): + """Set up the API client.""" + self.client = APIClient() + + def test_invalid_pk_returns_404(self): + """GET and DELETE with an invalid UUID pk should return 404.""" + invalid_pks = [ + "not-a-uuid", + "12345", + "' OR 1=1 --", + " ", + ] + # GET requests with invalid PKs + for pk in invalid_pks: + with self.subTest(pk=pk): + response = self.client.get(f"/api/v1/requests/{pk}") + self.assertEqual(response.status_code, 404) + + # GET rules with invalid PKs + for pk in invalid_pks: + with self.subTest(pk=pk): + response = self.client.get(f"/api/v1/rules/{pk}") + self.assertEqual(response.status_code, 404) + + # DELETE requests with invalid PKs + for pk in invalid_pks: + with self.subTest(pk=pk): + response = self.client.delete(f"/api/v1/requests/{pk}") + self.assertEqual(response.status_code, 404) + + # DELETE rules with invalid PKs + for pk in invalid_pks: + with self.subTest(pk=pk): + response = self.client.delete(f"/api/v1/rules/{pk}") + self.assertEqual(response.status_code, 404) diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 54ceec66c..b4483518c 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -4,6 +4,7 @@ """REST API views for backend requests and rules.""" from policy.db_models import BackendRequest, Rule +from typing import Type from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.status import ( @@ -138,3 +139,10 @@ def delete(self, request, pk): """Delete a rule by ID.""" Rule.objects.filter(pk=pk).delete() return Response(status=HTTP_204_NO_CONTENT) + + +def get_object(object_class: Type[Rule] | Type[BackendRequest], pk: str): + try: + return object_class.objects.get(pk=pk) + except object_class.DoesNotExist: + raise Http404 From 46b3ed5df2b328878e354ae51a621837c82295ac Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 3 Apr 2026 10:13:37 +0200 Subject: [PATCH 114/201] remove 10s sleep --- .github/workflows/test.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 112da69fa..b549ff17c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -38,7 +38,6 @@ jobs: path: haproxy-route-policy - run: | sudo snap install --dangerous ${{ steps.snapcraft.outputs.snap }} - sleep 10 snap_services_output="$(sudo snap services haproxy-route-policy.gunicorn)" echo "$snap_services_output" current_status="$(echo "$snap_services_output" | awk '$1=="haproxy-route-policy.gunicorn" {print $3}')" From cdc10e8fbf2122226e0b17d7a589098892f86a53 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 3 Apr 2026 10:18:55 +0200 Subject: [PATCH 115/201] wait for snap service to settle --- .github/workflows/test.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b549ff17c..112da69fa 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -38,6 +38,7 @@ jobs: path: haproxy-route-policy - run: | sudo snap install --dangerous ${{ steps.snapcraft.outputs.snap }} + sleep 10 snap_services_output="$(sudo snap services haproxy-route-policy.gunicorn)" echo "$snap_services_output" current_status="$(echo "$snap_services_output" | awk '$1=="haproxy-route-policy.gunicorn" {print $3}')" From 27b2339c1ec103ad1cdf0a7dc69d1b862bb647f3 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 3 Apr 2026 10:30:05 +0200 Subject: [PATCH 116/201] set config to start the snap --- .github/workflows/test.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 112da69fa..af49746f0 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -38,7 +38,12 @@ jobs: path: haproxy-route-policy - run: | sudo snap install --dangerous ${{ steps.snapcraft.outputs.snap }} - sleep 10 + sudo snap set haproxy-route-policy \ + database-host=localhost \ + database-port=5432 \ + database-user=postgres \ + database-password=postgres \ + database-name=postgres snap_services_output="$(sudo snap services haproxy-route-policy.gunicorn)" echo "$snap_services_output" current_status="$(echo "$snap_services_output" | awk '$1=="haproxy-route-policy.gunicorn" {print $3}')" From 4e7fd5ef88027c7235e43ff6bbf319776f09e1ae Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 02:02:37 +0200 Subject: [PATCH 117/201] add MVP for haproxy-roite-policy-operator --- .github/workflows/test.yaml | 2 + haproxy-route-policy-operator/README.md | 10 + haproxy-route-policy-operator/charmcraft.yaml | 64 + haproxy-route-policy-operator/pyproject.toml | 98 ++ haproxy-route-policy-operator/src/charm.py | 141 ++ haproxy-route-policy-operator/src/snap.py | 49 + .../tests/unit/test_charm.py | 78 + haproxy-route-policy-operator/tox.toml | 44 + haproxy-route-policy-operator/uv.lock | 1529 +++++++++++++++++ tox.toml | 1 + 10 files changed, 2016 insertions(+) create mode 100644 haproxy-route-policy-operator/README.md create mode 100644 haproxy-route-policy-operator/charmcraft.yaml create mode 100644 haproxy-route-policy-operator/pyproject.toml create mode 100644 haproxy-route-policy-operator/src/charm.py create mode 100644 haproxy-route-policy-operator/src/snap.py create mode 100644 haproxy-route-policy-operator/tests/unit/test_charm.py create mode 100644 haproxy-route-policy-operator/tox.toml create mode 100644 haproxy-route-policy-operator/uv.lock diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index af49746f0..57b72e99c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -16,6 +16,8 @@ jobs: working-directory: ./haproxy-ddos-protection-configurator - name: haproxy-route-policy working-directory: ./haproxy-route-policy + - name: haproxy-route-policy-operator + working-directory: ./haproxy-route-policy-operator name: Unit tests for ${{ matrix.charm.name }} uses: canonical/operator-workflows/.github/workflows/test.yaml@main secrets: inherit diff --git a/haproxy-route-policy-operator/README.md b/haproxy-route-policy-operator/README.md new file mode 100644 index 000000000..21f27443e --- /dev/null +++ b/haproxy-route-policy-operator/README.md @@ -0,0 +1,10 @@ +# HAProxy route policy operator + +Machine charm for the HAProxy Route Policy service. + +This charm: + +- requires a `postgresql` relation using `postgresql_client` +- installs and configures the `haproxy-route-policy` snap +- runs first-time database migrations +- starts and keeps the snap gunicorn service running diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml new file mode 100644 index 000000000..7d6f06461 --- /dev/null +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -0,0 +1,64 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. +type: charm +base: ubuntu@24.04 +build-base: ubuntu@24.04 + +platforms: + amd64: + +parts: + charm: + source: . + plugin: uv + build-snaps: + - astral-uv + +name: haproxy-route-policy +title: HAProxy route policy charm +summary: Policy service for HAProxy route requests. +description: | + A [Juju](https://juju.is/) [charm](https://juju.is/docs/olm/charmed-operators) + deploying and managing the HAProxy Route Policy API on machines. + + The charm installs the `haproxy-route-policy` snap, configures it with + PostgreSQL relation credentials, runs database migrations, and starts gunicorn. +links: + documentation: https://documentation.ubuntu.com/haproxy-charm/ + issues: https://github.com/canonical/haproxy-operator/issues + source: https://github.com/canonical/haproxy-operator + contact: + - https://launchpad.net/~canonical-is-devops + +assumes: + - juju >= 3.3 + +requires: + postgresql: + interface: postgresql_client + description: PostgreSQL database used by the route policy service. + limit: 1 + optional: false + +config: + options: + snap-channel: + type: string + default: latest/edge + description: Snap channel used to install haproxy-route-policy. + debug: + type: boolean + default: false + description: Enable Django debug mode for the route policy service. + log-level: + type: string + default: info + description: Log level for Django logs. + allowed-hosts: + type: string + default: '["*"]' + description: JSON array of allowed hosts for Django. + secret-key: + type: string + default: "" + description: Optional Django secret key. If empty, the charm generates one. diff --git a/haproxy-route-policy-operator/pyproject.toml b/haproxy-route-policy-operator/pyproject.toml new file mode 100644 index 000000000..3f0ddb443 --- /dev/null +++ b/haproxy-route-policy-operator/pyproject.toml @@ -0,0 +1,98 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +[project] +name = "haproxy-route-policy-operator" +version = "0.1.0" +description = "HAProxy route policy machine charm." +readme = "README.md" +requires-python = ">=3.12" +classifiers = [ + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", +] +dependencies = [ + "ops==3.5.2", + "requests==2.32.5", + "charmlibs-snap==1.0.1", +] + +[dependency-groups] +fmt = [ "ruff" ] +lint = [ "codespell", "mypy", "ops[testing]", "pytest", "ruff", "types-requests", "types-pyyaml" ] +unit = [ "coverage[toml]", "ops[testing]", "pytest" ] +coverage-report = [ "coverage[toml]", "pytest" ] +static = [ "bandit[toml]" ] +integration = [ "jubilant==1.7.0", "juju==3.6.1.3", "pytest", "pytest-operator" ] + +[tool.uv] +package = false + +[tool.ruff] +target-version = "py310" +line-length = 99 +lint.select = [ "A", "B", "C", "CPY", "D", "E", "F", "I", "N", "RUF", "S", "SIM", "TC", "UP", "W" ] +lint.ignore = [ + "B904", + "D107", + "D203", + "D204", + "D205", + "D213", + "D215", + "D400", + "D404", + "D406", + "D407", + "D408", + "D409", + "D413", + "E501", + "S105", + "S603", + "S607", + "TC002", + "TC006", + "UP006", + "UP007", + "UP035", + "UP045", +] +lint.per-file-ignores."tests/*" = [ "B011", "D100", "D101", "D102", "D103", "D104", "D212", "D415", "D417", "S" ] +lint.flake8-copyright.author = "Canonical Ltd." +lint.flake8-copyright.min-file-size = 1 +lint.flake8-copyright.notice-rgx = "Copyright\\s\\d{4}([-,]\\d{4})*\\s+" +lint.mccabe.max-complexity = 10 +lint.pydocstyle.convention = "google" + +[tool.codespell] +skip = "build,lib,venv,icon.svg,.tox,.git,.mypy_cache,.ruff_cache,.coverage,htmlcov,uv.lock" + +[tool.pytest.ini_options] +minversion = "6.0" +log_cli_level = "INFO" + +[tool.coverage.run] +branch = true + +[tool.coverage.report] +show_missing = true + +[tool.mypy] +check_untyped_defs = true +disallow_untyped_defs = true +explicit_package_bases = true +ignore_missing_imports = true +namespace_packages = true + +[[tool.mypy.overrides]] +disallow_untyped_defs = false +module = "tests.*" + +[tool.bandit] +exclude_dirs = [ "/venv/" ] + +[tool.bandit.assert_used] +skips = [ "*/*test.py", "*/test_*.py", "*tests/*.py" ] diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py new file mode 100644 index 000000000..92f7d1df8 --- /dev/null +++ b/haproxy-route-policy-operator/src/charm.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python3 + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""haproxy-route-policy-operator charm.""" + +from __future__ import annotations + +import json +import logging +import secrets +import subprocess +from typing import Any + +import ops +from charmlibs import snap as snap_lib + +import snap + +logger = logging.getLogger(__name__) + +POSTGRESQL_RELATION = "postgresql" +VALID_LOG_LEVELS = {"debug", "info", "warning", "error", "critical"} + + +class HaproxyRoutePolicyCharm(ops.CharmBase): + """Charm for HAProxy Route Policy service.""" + + _stored = ops.StoredState() + + def __init__(self, *args: Any): + super().__init__(*args) + self._stored.set_default(secret_key="") + + self.framework.observe(self.on.install, self._install) + self.framework.observe(self.on.upgrade_charm, self._install) + self.framework.observe(self.on.start, self._reconcile) + self.framework.observe(self.on.config_changed, self._reconcile) + self.framework.observe(self.on[POSTGRESQL_RELATION].relation_joined, self._reconcile) + self.framework.observe(self.on[POSTGRESQL_RELATION].relation_changed, self._reconcile) + self.framework.observe(self.on[POSTGRESQL_RELATION].relation_broken, self._reconcile) + + self.unit.open_port("tcp", 8080) + + def _install(self, _: ops.EventBase) -> None: + """Install the route-policy snap.""" + channel = str(self.model.config["snap-channel"]) + self.unit.status = ops.MaintenanceStatus("installing haproxy-route-policy snap") + try: + snap.install_snap(channel=channel) + except snap_lib.SnapError as exc: + logger.exception("Failed to install haproxy-route-policy snap") + self.unit.status = ops.BlockedStatus(f"snap installation failed: {exc}") + return + self._reconcile(_) + + def _reconcile(self, _: ops.EventBase) -> None: + """Reconcile snap configuration and service state.""" + credentials = self._get_postgresql_credentials() + if not credentials: + self.unit.status = ops.WaitingStatus("waiting for postgresql relation data") + return + + try: + snap_config = { + "secret-key": self._get_secret_key(), + "debug": bool(self.model.config["debug"]), + "allowed-hosts": self._validated_allowed_hosts(), + "log-level": self._validated_log_level(), + **credentials, + } + self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") + snap.configure_snap(snap_config) + self.unit.status = ops.MaintenanceStatus("running database migrations") + snap.run_migrations() + self.unit.status = ops.MaintenanceStatus("starting gunicorn service") + snap.start_gunicorn_service() + except (ValueError, snap_lib.SnapError, subprocess.CalledProcessError) as exc: + logger.exception("Failed to reconcile haproxy-route-policy service") + self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") + return + + self.unit.status = ops.ActiveStatus() + + def _get_secret_key(self) -> str: + """Get a stable secret key for Django.""" + config_secret_key = str(self.model.config["secret-key"]).strip() + if config_secret_key: + return config_secret_key + if self._stored.secret_key: + return self._stored.secret_key + self._stored.secret_key = secrets.token_urlsafe(48) + return self._stored.secret_key + + def _validated_allowed_hosts(self) -> str: + """Validate allowed-hosts config and return it in JSON string form.""" + raw_value = str(self.model.config["allowed-hosts"]) + parsed = json.loads(raw_value) + if not isinstance(parsed, list) or not all(isinstance(host, str) for host in parsed): + raise ValueError("allowed-hosts must be a JSON array of strings") + return raw_value + + def _validated_log_level(self) -> str: + """Validate log-level config.""" + log_level = str(self.model.config["log-level"]).lower() + if log_level not in VALID_LOG_LEVELS: + raise ValueError(f"log-level must be one of {', '.join(sorted(VALID_LOG_LEVELS))}") + return log_level + + def _get_postgresql_credentials(self) -> dict[str, str] | None: + """Read PostgreSQL credentials from relation databag.""" + relation = self.model.get_relation(POSTGRESQL_RELATION) + if relation is None or relation.app is None: + return None + + relation_data = relation.data[relation.app] + endpoints = relation_data.get("endpoints") + database = relation_data.get("database") + username = relation_data.get("username") + password = relation_data.get("password") + + if not all([endpoints, database, username, password]): + return None + + endpoint = str(endpoints).split(",")[0].strip() + host, _, port = endpoint.partition(":") + if not port: + port = "5432" + + return { + "database-host": host, + "database-port": port, + "database-user": str(username), + "database-password": str(password), + "database-name": str(database), + } + + +if __name__ == "__main__": # pragma: nocover + ops.main(HaproxyRoutePolicyCharm) diff --git a/haproxy-route-policy-operator/src/snap.py b/haproxy-route-policy-operator/src/snap.py new file mode 100644 index 000000000..e4b96426a --- /dev/null +++ b/haproxy-route-policy-operator/src/snap.py @@ -0,0 +1,49 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Helpers for managing the haproxy-route-policy snap.""" + +from __future__ import annotations + +import subprocess # nosec +from typing import Any + +from charmlibs import snap + +SNAP_NAME = "haproxy-route-policy" + + +def install_snap(channel: str) -> None: + """Install or refresh the route-policy snap.""" + cache = snap.SnapCache() + package = cache[SNAP_NAME] + package.ensure(snap.SnapState.Latest, channel=channel) + + +def configure_snap(config: dict[str, str | bool]) -> None: + """Apply snap configuration if any value changed.""" + package = snap.SnapCache()[SNAP_NAME] + existing = package.get(None, typed=True) + to_set: dict[str, Any] = {} + for key, value in config.items(): + if existing.get(key) != value: + to_set[key] = value + if to_set: + package.set(to_set, typed=True) + + +def run_migrations() -> None: + """Run first-time and subsequent database migrations.""" + subprocess.run( # nosec + [f"{SNAP_NAME}.manage", "migrate", "--noinput"], + check=True, + encoding="utf-8", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + + +def start_gunicorn_service() -> None: + """Ensure the snap gunicorn app is running.""" + package = snap.SnapCache()[SNAP_NAME] + package.start() diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py new file mode 100644 index 000000000..065de7ad8 --- /dev/null +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -0,0 +1,78 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for haproxy-route-policy-operator charm.""" + +from unittest.mock import patch + +from ops import testing + +from charm import HaproxyRoutePolicyCharm + + +def _postgresql_relation() -> testing.Relation: + """Build a postgresql relation carrying complete credentials.""" + return testing.Relation( + "postgresql", + remote_app_data={ + "endpoints": "10.0.0.10:5432", + "database": "haproxy_route_policy", + "username": "policy", + "password": "secret", + }, + ) + + +def test_install_without_relation_sets_waiting_status(): + """ + arrange: create charm context without postgresql relation. + act: run install event. + assert: snap install is invoked and unit waits for postgresql relation data. + """ + ctx = testing.Context(HaproxyRoutePolicyCharm) + state = testing.State(config={"snap-channel": "latest/edge"}) + + with patch("charm.snap.install_snap") as install_snap_mock: + out = ctx.run(ctx.on.install(), state) + + install_snap_mock.assert_called_once_with(channel="latest/edge") + assert isinstance(out.unit_status, testing.WaitingStatus) + + +def test_config_changed_reconciles_snap_with_postgresql_credentials(): + """ + arrange: create charm context with valid postgresql relation credentials. + act: run config-changed event. + assert: snap is configured, migrations run, and service is started. + """ + ctx = testing.Context(HaproxyRoutePolicyCharm) + state = testing.State(relations=[_postgresql_relation()]) + + with ( + patch("charm.snap.configure_snap") as configure_mock, + patch("charm.snap.run_migrations") as migrate_mock, + patch("charm.snap.start_gunicorn_service") as start_mock, + ): + out = ctx.run(ctx.on.config_changed(), state) + + assert out.unit_status == testing.ActiveStatus() + configure_mock.assert_called_once() + migrate_mock.assert_called_once() + start_mock.assert_called_once() + + +def test_config_changed_with_invalid_log_level_sets_blocked_status(): + """ + arrange: create charm context with relation and invalid log-level config. + act: run config-changed event. + assert: charm sets blocked status. + """ + ctx = testing.Context(HaproxyRoutePolicyCharm) + state = testing.State( + relations=[_postgresql_relation()], + config={"log-level": "invalid"}, + ) + + out = ctx.run(ctx.on.config_changed(), state) + + assert isinstance(out.unit_status, testing.BlockedStatus) diff --git a/haproxy-route-policy-operator/tox.toml b/haproxy-route-policy-operator/tox.toml new file mode 100644 index 000000000..8bce38a1c --- /dev/null +++ b/haproxy-route-policy-operator/tox.toml @@ -0,0 +1,44 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +skipsdist = true +skip_missing_interpreters = true +requires = [ "tox>=4.21" ] +no_package = true + +[env_run_base] +passenv = [ "PYTHONPATH", "CHARM_BUILD_DIR", "MODEL_SETTINGS" ] +runner = "uv-venv-lock-runner" + +[env_run_base.setenv] +PYTHONPATH = "{toxinidir}:{toxinidir}/lib:{[vars]src_path}" +PYTHONBREAKPOINT = "ipdb.set_trace" +PY_COLORS = "1" + +[env.fmt] +description = "Apply coding style standards to code" +commands = [ + [ "ruff", "check", "--fix", "--select", "I", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], + [ "ruff", "format", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], +] +dependency_groups = [ "fmt" ] + +[env.lint] +description = "Check code against coding style standards" +commands = [ + [ "codespell", "{toxinidir}" ], + [ "ruff", "format", "--check", "--diff", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], + [ "ruff", "check", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], + [ "mypy", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], +] +dependency_groups = [ "lint" ] + +[env.unit] +description = "Run unit tests" +commands = [ [ "pytest", "-v", "{[vars]tst_path}unit" ] ] +dependency_groups = [ "unit" ] + +[vars] +src_path = "{toxinidir}/src/" +tst_path = "{toxinidir}/tests/" +all_path = [ "{toxinidir}/src", "{toxinidir}/tests" ] diff --git a/haproxy-route-policy-operator/uv.lock b/haproxy-route-policy-operator/uv.lock new file mode 100644 index 000000000..6ea5a1dad --- /dev/null +++ b/haproxy-route-policy-operator/uv.lock @@ -0,0 +1,1529 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "asttokens" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" }, +] + +[[package]] +name = "backports-datetime-fromisoformat" +version = "2.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/81/eff3184acb1d9dc3ce95a98b6f3c81a49b4be296e664db8e1c2eeabef3d9/backports_datetime_fromisoformat-2.0.3.tar.gz", hash = "sha256:b58edc8f517b66b397abc250ecc737969486703a66eb97e01e6d51291b1a139d", size = 23588, upload-time = "2024-12-28T20:18:15.017Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/69bbdde2e1e57c09b5f01788804c50e68b29890aada999f2b1a40519def9/backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66ce47ee1ba91e146149cf40565c3d750ea1be94faf660ca733d8601e0848147", size = 27630, upload-time = "2024-12-28T20:17:19.442Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1d/1c84a50c673c87518b1adfeafcfd149991ed1f7aedc45d6e5eac2f7d19d7/backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8b7e069910a66b3bba61df35b5f879e5253ff0821a70375b9daf06444d046fa4", size = 34707, upload-time = "2024-12-28T20:17:21.79Z" }, + { url = "https://files.pythonhosted.org/packages/71/44/27eae384e7e045cda83f70b551d04b4a0b294f9822d32dea1cbf1592de59/backports_datetime_fromisoformat-2.0.3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:a3b5d1d04a9e0f7b15aa1e647c750631a873b298cdd1255687bb68779fe8eb35", size = 27280, upload-time = "2024-12-28T20:17:24.503Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7a/a4075187eb6bbb1ff6beb7229db5f66d1070e6968abeb61e056fa51afa5e/backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1b95986430e789c076610aea704db20874f0781b8624f648ca9fb6ef67c6e1", size = 55094, upload-time = "2024-12-28T20:17:25.546Z" }, + { url = "https://files.pythonhosted.org/packages/71/03/3fced4230c10af14aacadc195fe58e2ced91d011217b450c2e16a09a98c8/backports_datetime_fromisoformat-2.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffe5f793db59e2f1d45ec35a1cf51404fdd69df9f6952a0c87c3060af4c00e32", size = 55605, upload-time = "2024-12-28T20:17:29.208Z" }, + { url = "https://files.pythonhosted.org/packages/f6/0a/4b34a838c57bd16d3e5861ab963845e73a1041034651f7459e9935289cfd/backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:620e8e73bd2595dfff1b4d256a12b67fce90ece3de87b38e1dde46b910f46f4d", size = 55353, upload-time = "2024-12-28T20:17:32.433Z" }, + { url = "https://files.pythonhosted.org/packages/d9/68/07d13c6e98e1cad85606a876367ede2de46af859833a1da12c413c201d78/backports_datetime_fromisoformat-2.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4cf9c0a985d68476c1cabd6385c691201dda2337d7453fb4da9679ce9f23f4e7", size = 55298, upload-time = "2024-12-28T20:17:34.919Z" }, + { url = "https://files.pythonhosted.org/packages/60/33/45b4d5311f42360f9b900dea53ab2bb20a3d61d7f9b7c37ddfcb3962f86f/backports_datetime_fromisoformat-2.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:d144868a73002e6e2e6fef72333e7b0129cecdd121aa8f1edba7107fd067255d", size = 29375, upload-time = "2024-12-28T20:17:36.018Z" }, +] + +[[package]] +name = "bandit" +version = "1.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "pyyaml" }, + { name = "rich" }, + { name = "stevedore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/c3/0cb80dfe0f3076e5da7e4c5ad8e57bac6ac357ff4a6406205501cade4965/bandit-1.9.4.tar.gz", hash = "sha256:b589e5de2afe70bd4d53fa0c1da6199f4085af666fde00e8a034f152a52cd628", size = 4242677, upload-time = "2026-02-25T06:44:15.503Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/a4/a26d5b25671d27e03afb5401a0be5899d94ff8fab6a698b1ac5be3ec29ef/bandit-1.9.4-py3-none-any.whl", hash = "sha256:f89ffa663767f5a0585ea075f01020207e966a9c0f2b9ef56a57c7963a3f6f8e", size = 134741, upload-time = "2026-02-25T06:44:13.694Z" }, +] + +[[package]] +name = "bcrypt" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" }, + { url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" }, + { url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" }, + { url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" }, + { url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" }, + { url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" }, + { url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" }, + { url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" }, + { url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" }, + { url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" }, + { url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" }, + { url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" }, + { url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" }, + { url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" }, + { url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" }, + { url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" }, + { url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" }, + { url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" }, + { url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" }, + { url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" }, + { url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" }, + { url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "charmlibs-snap" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/c1/376fc9b55f632e0578a3b1720a3a5a6570fcdb57caac3e41b49bf435ebb9/charmlibs_snap-1.0.1.tar.gz", hash = "sha256:07a13935909ea4b82c74b8e890b311894bcab2dd1c447246ee392bfd02a74f9c", size = 29324, upload-time = "2025-11-04T13:02:10.525Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/eb/630987d299b92fc6a17455878e9365e1c08b89bd34a022e9a7b7e5a2d1ec/charmlibs_snap-1.0.1-py3-none-any.whl", hash = "sha256:42bae5f55f6dbee014c94f8ab95eef3e249d4903f02412bef05ac5b324aa2783", size = 15359, upload-time = "2025-11-04T13:02:09.235Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/60/e3bec1881450851b087e301bedc3daa9377a4d45f1c26aa90b0b235e38aa/charset_normalizer-3.4.6.tar.gz", hash = "sha256:1ae6b62897110aa7c79ea2f5dd38d1abca6db663687c0b1ad9aed6f6bae3d9d6", size = 143363, upload-time = "2026-03-15T18:53:25.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/62/c0815c992c9545347aeea7859b50dc9044d147e2e7278329c6e02ac9a616/charset_normalizer-3.4.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ef7fedc7a6ecbe99969cd09632516738a97eeb8bd7258bf8a0f23114c057dab", size = 295154, upload-time = "2026-03-15T18:50:50.88Z" }, + { url = "https://files.pythonhosted.org/packages/a8/37/bdca6613c2e3c58c7421891d80cc3efa1d32e882f7c4a7ee6039c3fc951a/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4ea868bc28109052790eb2b52a9ab33f3aa7adc02f96673526ff47419490e21", size = 199191, upload-time = "2026-03-15T18:50:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/6c/92/9934d1bbd69f7f398b38c5dae1cbf9cc672e7c34a4adf7b17c0a9c17d15d/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:836ab36280f21fc1a03c99cd05c6b7af70d2697e374c7af0b61ed271401a72a2", size = 218674, upload-time = "2026-03-15T18:50:54.102Z" }, + { url = "https://files.pythonhosted.org/packages/af/90/25f6ab406659286be929fd89ab0e78e38aa183fc374e03aa3c12d730af8a/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f1ce721c8a7dfec21fcbdfe04e8f68174183cf4e8188e0645e92aa23985c57ff", size = 215259, upload-time = "2026-03-15T18:50:55.616Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ef/79a463eb0fff7f96afa04c1d4c51f8fc85426f918db467854bfb6a569ce3/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e28d62a8fc7a1fa411c43bd65e346f3bce9716dc51b897fbe930c5987b402d5", size = 207276, upload-time = "2026-03-15T18:50:57.054Z" }, + { url = "https://files.pythonhosted.org/packages/f7/72/d0426afec4b71dc159fa6b4e68f868cd5a3ecd918fec5813a15d292a7d10/charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:530d548084c4a9f7a16ed4a294d459b4f229db50df689bfe92027452452943a0", size = 195161, upload-time = "2026-03-15T18:50:58.686Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/c82b06a68bfcb6ce55e508225d210c7e6a4ea122bfc0748892f3dc4e8e11/charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30f445ae60aad5e1f8bdbb3108e39f6fbc09f4ea16c815c66578878325f8f15a", size = 203452, upload-time = "2026-03-15T18:51:00.196Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/0c25979b92f8adafdbb946160348d8d44aa60ce99afdc27df524379875cb/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ac2393c73378fea4e52aa56285a3d64be50f1a12395afef9cce47772f60334c2", size = 202272, upload-time = "2026-03-15T18:51:01.703Z" }, + { url = "https://files.pythonhosted.org/packages/2e/3d/7fea3e8fe84136bebbac715dd1221cc25c173c57a699c030ab9b8900cbb7/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:90ca27cd8da8118b18a52d5f547859cc1f8354a00cd1e8e5120df3e30d6279e5", size = 195622, upload-time = "2026-03-15T18:51:03.526Z" }, + { url = "https://files.pythonhosted.org/packages/57/8a/d6f7fd5cb96c58ef2f681424fbca01264461336d2a7fc875e4446b1f1346/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e5a94886bedca0f9b78fecd6afb6629142fd2605aa70a125d49f4edc6037ee6", size = 220056, upload-time = "2026-03-15T18:51:05.269Z" }, + { url = "https://files.pythonhosted.org/packages/16/50/478cdda782c8c9c3fb5da3cc72dd7f331f031e7f1363a893cdd6ca0f8de0/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:695f5c2823691a25f17bc5d5ffe79fa90972cc34b002ac6c843bb8a1720e950d", size = 203751, upload-time = "2026-03-15T18:51:06.858Z" }, + { url = "https://files.pythonhosted.org/packages/75/fc/cc2fcac943939c8e4d8791abfa139f685e5150cae9f94b60f12520feaa9b/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:231d4da14bcd9301310faf492051bee27df11f2bc7549bc0bb41fef11b82daa2", size = 216563, upload-time = "2026-03-15T18:51:08.564Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b7/a4add1d9a5f68f3d037261aecca83abdb0ab15960a3591d340e829b37298/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a056d1ad2633548ca18ffa2f85c202cfb48b68615129143915b8dc72a806a923", size = 209265, upload-time = "2026-03-15T18:51:10.312Z" }, + { url = "https://files.pythonhosted.org/packages/6c/18/c094561b5d64a24277707698e54b7f67bd17a4f857bbfbb1072bba07c8bf/charset_normalizer-3.4.6-cp312-cp312-win32.whl", hash = "sha256:c2274ca724536f173122f36c98ce188fd24ce3dad886ec2b7af859518ce008a4", size = 144229, upload-time = "2026-03-15T18:51:11.694Z" }, + { url = "https://files.pythonhosted.org/packages/ab/20/0567efb3a8fd481b8f34f739ebddc098ed062a59fed41a8d193a61939e8f/charset_normalizer-3.4.6-cp312-cp312-win_amd64.whl", hash = "sha256:c8ae56368f8cc97c7e40a7ee18e1cedaf8e780cd8bc5ed5ac8b81f238614facb", size = 154277, upload-time = "2026-03-15T18:51:13.004Z" }, + { url = "https://files.pythonhosted.org/packages/15/57/28d79b44b51933119e21f65479d0864a8d5893e494cf5daab15df0247c17/charset_normalizer-3.4.6-cp312-cp312-win_arm64.whl", hash = "sha256:899d28f422116b08be5118ef350c292b36fc15ec2daeb9ea987c89281c7bb5c4", size = 142817, upload-time = "2026-03-15T18:51:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1d/4fdabeef4e231153b6ed7567602f3b68265ec4e5b76d6024cf647d43d981/charset_normalizer-3.4.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:11afb56037cbc4b1555a34dd69151e8e069bee82e613a73bef6e714ce733585f", size = 294823, upload-time = "2026-03-15T18:51:15.755Z" }, + { url = "https://files.pythonhosted.org/packages/47/7b/20e809b89c69d37be748d98e84dce6820bf663cf19cf6b942c951a3e8f41/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423fb7e748a08f854a08a222b983f4df1912b1daedce51a72bd24fe8f26a1843", size = 198527, upload-time = "2026-03-15T18:51:17.177Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/4f8d27527d59c039dce6f7622593cdcd3d70a8504d87d09eb11e9fdc6062/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d73beaac5e90173ac3deb9928a74763a6d230f494e4bfb422c217a0ad8e629bf", size = 218388, upload-time = "2026-03-15T18:51:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9b/4770ccb3e491a9bacf1c46cc8b812214fe367c86a96353ccc6daf87b01ec/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d60377dce4511655582e300dc1e5a5f24ba0cb229005a1d5c8d0cb72bb758ab8", size = 214563, upload-time = "2026-03-15T18:51:20.374Z" }, + { url = "https://files.pythonhosted.org/packages/2b/58/a199d245894b12db0b957d627516c78e055adc3a0d978bc7f65ddaf7c399/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:530e8cebeea0d76bdcf93357aa5e41336f48c3dc709ac52da2bb167c5b8271d9", size = 206587, upload-time = "2026-03-15T18:51:21.807Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/3def227f1ec56f5c69dfc8392b8bd63b11a18ca8178d9211d7cc5e5e4f27/charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:a26611d9987b230566f24a0a125f17fe0de6a6aff9f25c9f564aaa2721a5fb88", size = 194724, upload-time = "2026-03-15T18:51:23.508Z" }, + { url = "https://files.pythonhosted.org/packages/58/ab/9318352e220c05efd31c2779a23b50969dc94b985a2efa643ed9077bfca5/charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:34315ff4fc374b285ad7f4a0bf7dcbfe769e1b104230d40f49f700d4ab6bbd84", size = 202956, upload-time = "2026-03-15T18:51:25.239Z" }, + { url = "https://files.pythonhosted.org/packages/75/13/f3550a3ac25b70f87ac98c40d3199a8503676c2f1620efbf8d42095cfc40/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ddd609f9e1af8c7bd6e2aca279c931aefecd148a14402d4e368f3171769fd", size = 201923, upload-time = "2026-03-15T18:51:26.682Z" }, + { url = "https://files.pythonhosted.org/packages/1b/db/c5c643b912740b45e8eec21de1bbab8e7fc085944d37e1e709d3dcd9d72f/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:80d0a5615143c0b3225e5e3ef22c8d5d51f3f72ce0ea6fb84c943546c7b25b6c", size = 195366, upload-time = "2026-03-15T18:51:28.129Z" }, + { url = "https://files.pythonhosted.org/packages/5a/67/3b1c62744f9b2448443e0eb160d8b001c849ec3fef591e012eda6484787c/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:92734d4d8d187a354a556626c221cd1a892a4e0802ccb2af432a1d85ec012194", size = 219752, upload-time = "2026-03-15T18:51:29.556Z" }, + { url = "https://files.pythonhosted.org/packages/f6/98/32ffbaf7f0366ffb0445930b87d103f6b406bc2c271563644bde8a2b1093/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:613f19aa6e082cf96e17e3ffd89383343d0d589abda756b7764cf78361fd41dc", size = 203296, upload-time = "2026-03-15T18:51:30.921Z" }, + { url = "https://files.pythonhosted.org/packages/41/12/5d308c1bbe60cabb0c5ef511574a647067e2a1f631bc8634fcafaccd8293/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2b1a63e8224e401cafe7739f77efd3f9e7f5f2026bda4aead8e59afab537784f", size = 215956, upload-time = "2026-03-15T18:51:32.399Z" }, + { url = "https://files.pythonhosted.org/packages/53/e9/5f85f6c5e20669dbe56b165c67b0260547dea97dba7e187938833d791687/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6cceb5473417d28edd20c6c984ab6fee6c6267d38d906823ebfe20b03d607dc2", size = 208652, upload-time = "2026-03-15T18:51:34.214Z" }, + { url = "https://files.pythonhosted.org/packages/f1/11/897052ea6af56df3eef3ca94edafee410ca699ca0c7b87960ad19932c55e/charset_normalizer-3.4.6-cp313-cp313-win32.whl", hash = "sha256:d7de2637729c67d67cf87614b566626057e95c303bc0a55ffe391f5205e7003d", size = 143940, upload-time = "2026-03-15T18:51:36.15Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5c/724b6b363603e419829f561c854b87ed7c7e31231a7908708ac086cdf3e2/charset_normalizer-3.4.6-cp313-cp313-win_amd64.whl", hash = "sha256:572d7c822caf521f0525ba1bce1a622a0b85cf47ffbdae6c9c19e3b5ac3c4389", size = 154101, upload-time = "2026-03-15T18:51:37.876Z" }, + { url = "https://files.pythonhosted.org/packages/01/a5/7abf15b4c0968e47020f9ca0935fb3274deb87cb288cd187cad92e8cdffd/charset_normalizer-3.4.6-cp313-cp313-win_arm64.whl", hash = "sha256:a4474d924a47185a06411e0064b803c68be044be2d60e50e8bddcc2649957c1f", size = 143109, upload-time = "2026-03-15T18:51:39.565Z" }, + { url = "https://files.pythonhosted.org/packages/25/6f/ffe1e1259f384594063ea1869bfb6be5cdb8bc81020fc36c3636bc8302a1/charset_normalizer-3.4.6-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:9cc6e6d9e571d2f863fa77700701dae73ed5f78881efc8b3f9a4398772ff53e8", size = 294458, upload-time = "2026-03-15T18:51:41.134Z" }, + { url = "https://files.pythonhosted.org/packages/56/60/09bb6c13a8c1016c2ed5c6a6488e4ffef506461aa5161662bd7636936fb1/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef5960d965e67165d75b7c7ffc60a83ec5abfc5c11b764ec13ea54fbef8b4421", size = 199277, upload-time = "2026-03-15T18:51:42.953Z" }, + { url = "https://files.pythonhosted.org/packages/00/50/dcfbb72a5138bbefdc3332e8d81a23494bf67998b4b100703fd15fa52d81/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b3694e3f87f8ac7ce279d4355645b3c878d24d1424581b46282f24b92f5a4ae2", size = 218758, upload-time = "2026-03-15T18:51:44.339Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/d79a9a191bb75f5aa81f3aaaa387ef29ce7cb7a9e5074ba8ea095cc073c2/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d11595abf8dd942a77883a39d81433739b287b6aa71620f15164f8096221b30", size = 215299, upload-time = "2026-03-15T18:51:45.871Z" }, + { url = "https://files.pythonhosted.org/packages/76/7e/bc8911719f7084f72fd545f647601ea3532363927f807d296a8c88a62c0d/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7bda6eebafd42133efdca535b04ccb338ab29467b3f7bf79569883676fc628db", size = 206811, upload-time = "2026-03-15T18:51:47.308Z" }, + { url = "https://files.pythonhosted.org/packages/e2/40/c430b969d41dda0c465aa36cc7c2c068afb67177bef50905ac371b28ccc7/charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:bbc8c8650c6e51041ad1be191742b8b421d05bbd3410f43fa2a00c8db87678e8", size = 193706, upload-time = "2026-03-15T18:51:48.849Z" }, + { url = "https://files.pythonhosted.org/packages/48/15/e35e0590af254f7df984de1323640ef375df5761f615b6225ba8deb9799a/charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:22c6f0c2fbc31e76c3b8a86fba1a56eda6166e238c29cdd3d14befdb4a4e4815", size = 202706, upload-time = "2026-03-15T18:51:50.257Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bd/f736f7b9cc5e93a18b794a50346bb16fbfd6b37f99e8f306f7951d27c17c/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7edbed096e4a4798710ed6bc75dcaa2a21b68b6c356553ac4823c3658d53743a", size = 202497, upload-time = "2026-03-15T18:51:52.012Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ba/2cc9e3e7dfdf7760a6ed8da7446d22536f3d0ce114ac63dee2a5a3599e62/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:7f9019c9cb613f084481bd6a100b12e1547cf2efe362d873c2e31e4035a6fa43", size = 193511, upload-time = "2026-03-15T18:51:53.723Z" }, + { url = "https://files.pythonhosted.org/packages/9e/cb/5be49b5f776e5613be07298c80e1b02a2d900f7a7de807230595c85a8b2e/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:58c948d0d086229efc484fe2f30c2d382c86720f55cd9bc33591774348ad44e0", size = 220133, upload-time = "2026-03-15T18:51:55.333Z" }, + { url = "https://files.pythonhosted.org/packages/83/43/99f1b5dad345accb322c80c7821071554f791a95ee50c1c90041c157ae99/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:419a9d91bd238052642a51938af8ac05da5b3343becde08d5cdeab9046df9ee1", size = 203035, upload-time = "2026-03-15T18:51:56.736Z" }, + { url = "https://files.pythonhosted.org/packages/87/9a/62c2cb6a531483b55dddff1a68b3d891a8b498f3ca555fbcf2978e804d9d/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5273b9f0b5835ff0350c0828faea623c68bfa65b792720c453e22b25cc72930f", size = 216321, upload-time = "2026-03-15T18:51:58.17Z" }, + { url = "https://files.pythonhosted.org/packages/6e/79/94a010ff81e3aec7c293eb82c28f930918e517bc144c9906a060844462eb/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0e901eb1049fdb80f5bd11ed5ea1e498ec423102f7a9b9e4645d5b8204ff2815", size = 208973, upload-time = "2026-03-15T18:51:59.998Z" }, + { url = "https://files.pythonhosted.org/packages/2a/57/4ecff6d4ec8585342f0c71bc03efaa99cb7468f7c91a57b105bcd561cea8/charset_normalizer-3.4.6-cp314-cp314-win32.whl", hash = "sha256:b4ff1d35e8c5bd078be89349b6f3a845128e685e751b6ea1169cf2160b344c4d", size = 144610, upload-time = "2026-03-15T18:52:02.213Z" }, + { url = "https://files.pythonhosted.org/packages/80/94/8434a02d9d7f168c25767c64671fead8d599744a05d6a6c877144c754246/charset_normalizer-3.4.6-cp314-cp314-win_amd64.whl", hash = "sha256:74119174722c4349af9708993118581686f343adc1c8c9c007d59be90d077f3f", size = 154962, upload-time = "2026-03-15T18:52:03.658Z" }, + { url = "https://files.pythonhosted.org/packages/46/4c/48f2cdbfd923026503dfd67ccea45c94fd8fe988d9056b468579c66ed62b/charset_normalizer-3.4.6-cp314-cp314-win_arm64.whl", hash = "sha256:e5bcc1a1ae744e0bb59641171ae53743760130600da8db48cbb6e4918e186e4e", size = 143595, upload-time = "2026-03-15T18:52:05.123Z" }, + { url = "https://files.pythonhosted.org/packages/31/93/8878be7569f87b14f1d52032946131bcb6ebbd8af3e20446bc04053dc3f1/charset_normalizer-3.4.6-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ad8faf8df23f0378c6d527d8b0b15ea4a2e23c89376877c598c4870d1b2c7866", size = 314828, upload-time = "2026-03-15T18:52:06.831Z" }, + { url = "https://files.pythonhosted.org/packages/06/b6/fae511ca98aac69ecc35cde828b0a3d146325dd03d99655ad38fc2cc3293/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f5ea69428fa1b49573eef0cc44a1d43bebd45ad0c611eb7d7eac760c7ae771bc", size = 208138, upload-time = "2026-03-15T18:52:08.239Z" }, + { url = "https://files.pythonhosted.org/packages/54/57/64caf6e1bf07274a1e0b7c160a55ee9e8c9ec32c46846ce59b9c333f7008/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:06a7e86163334edfc5d20fe104db92fcd666e5a5df0977cb5680a506fe26cc8e", size = 224679, upload-time = "2026-03-15T18:52:10.043Z" }, + { url = "https://files.pythonhosted.org/packages/aa/cb/9ff5a25b9273ef160861b41f6937f86fae18b0792fe0a8e75e06acb08f1d/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e1f6e2f00a6b8edb562826e4632e26d063ac10307e80f7461f7de3ad8ef3f077", size = 223475, upload-time = "2026-03-15T18:52:11.854Z" }, + { url = "https://files.pythonhosted.org/packages/fc/97/440635fc093b8d7347502a377031f9605a1039c958f3cd18dcacffb37743/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b52c68d64c1878818687a473a10547b3292e82b6f6fe483808fb1468e2f52f", size = 215230, upload-time = "2026-03-15T18:52:13.325Z" }, + { url = "https://files.pythonhosted.org/packages/cd/24/afff630feb571a13f07c8539fbb502d2ab494019492aaffc78ef41f1d1d0/charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:7504e9b7dc05f99a9bbb4525c67a2c155073b44d720470a148b34166a69c054e", size = 199045, upload-time = "2026-03-15T18:52:14.752Z" }, + { url = "https://files.pythonhosted.org/packages/e5/17/d1399ecdaf7e0498c327433e7eefdd862b41236a7e484355b8e0e5ebd64b/charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:172985e4ff804a7ad08eebec0a1640ece87ba5041d565fff23c8f99c1f389484", size = 211658, upload-time = "2026-03-15T18:52:16.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/38/16baa0affb957b3d880e5ac2144caf3f9d7de7bc4a91842e447fbb5e8b67/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4be9f4830ba8741527693848403e2c457c16e499100963ec711b1c6f2049b7c7", size = 210769, upload-time = "2026-03-15T18:52:17.782Z" }, + { url = "https://files.pythonhosted.org/packages/05/34/c531bc6ac4c21da9ddfddb3107be2287188b3ea4b53b70fc58f2a77ac8d8/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:79090741d842f564b1b2827c0b82d846405b744d31e84f18d7a7b41c20e473ff", size = 201328, upload-time = "2026-03-15T18:52:19.553Z" }, + { url = "https://files.pythonhosted.org/packages/fa/73/a5a1e9ca5f234519c1953608a03fe109c306b97fdfb25f09182babad51a7/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:87725cfb1a4f1f8c2fc9890ae2f42094120f4b44db9360be5d99a4c6b0e03a9e", size = 225302, upload-time = "2026-03-15T18:52:21.043Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f6/cd782923d112d296294dea4bcc7af5a7ae0f86ab79f8fefbda5526b6cfc0/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fcce033e4021347d80ed9c66dcf1e7b1546319834b74445f561d2e2221de5659", size = 211127, upload-time = "2026-03-15T18:52:22.491Z" }, + { url = "https://files.pythonhosted.org/packages/0e/c5/0b6898950627af7d6103a449b22320372c24c6feda91aa24e201a478d161/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:ca0276464d148c72defa8bb4390cce01b4a0e425f3b50d1435aa6d7a18107602", size = 222840, upload-time = "2026-03-15T18:52:24.113Z" }, + { url = "https://files.pythonhosted.org/packages/7d/25/c4bba773bef442cbdc06111d40daa3de5050a676fa26e85090fc54dd12f0/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:197c1a244a274bb016dd8b79204850144ef77fe81c5b797dc389327adb552407", size = 216890, upload-time = "2026-03-15T18:52:25.541Z" }, + { url = "https://files.pythonhosted.org/packages/35/1a/05dacadb0978da72ee287b0143097db12f2e7e8d3ffc4647da07a383b0b7/charset_normalizer-3.4.6-cp314-cp314t-win32.whl", hash = "sha256:2a24157fa36980478dd1770b585c0f30d19e18f4fb0c47c13aa568f871718579", size = 155379, upload-time = "2026-03-15T18:52:27.05Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7a/d269d834cb3a76291651256f3b9a5945e81d0a49ab9f4a498964e83c0416/charset_normalizer-3.4.6-cp314-cp314t-win_amd64.whl", hash = "sha256:cd5e2801c89992ed8c0a3f0293ae83c159a60d9a5d685005383ef4caca77f2c4", size = 169043, upload-time = "2026-03-15T18:52:28.502Z" }, + { url = "https://files.pythonhosted.org/packages/23/06/28b29fba521a37a8932c6a84192175c34d49f84a6d4773fa63d05f9aff22/charset_normalizer-3.4.6-cp314-cp314t-win_arm64.whl", hash = "sha256:47955475ac79cc504ef2704b192364e51d0d473ad452caedd0002605f780101c", size = 148523, upload-time = "2026-03-15T18:52:29.956Z" }, + { url = "https://files.pythonhosted.org/packages/2a/68/687187c7e26cb24ccbd88e5069f5ef00eba804d36dde11d99aad0838ab45/charset_normalizer-3.4.6-py3-none-any.whl", hash = "sha256:947cf925bc916d90adba35a64c82aace04fa39b46b52d4630ece166655905a69", size = 61455, upload-time = "2026-03-15T18:53:23.833Z" }, +] + +[[package]] +name = "codespell" +version = "2.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/9d/1d0903dff693160f893ca6abcabad545088e7a2ee0a6deae7c24e958be69/codespell-2.4.2.tar.gz", hash = "sha256:3c33be9ae34543807f088aeb4832dfad8cb2dae38da61cac0a7045dd376cfdf3", size = 352058, upload-time = "2026-03-05T18:10:42.936Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/a1/52fa05533e95fe45bcc09bcf8a503874b1c08f221a4e35608017e0938f55/codespell-2.4.2-py3-none-any.whl", hash = "sha256:97e0c1060cf46bd1d5db89a936c98db8c2b804e1fdd4b5c645e82a1ec6b1f886", size = 353715, upload-time = "2026-03-05T18:10:41.398Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" }, + { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" }, + { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" }, + { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" }, + { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/74/8c/74fedc9663dcf168b0a059d4ea756ecae4da77a489048f94b5f512a8d0b3/coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1", size = 219576, upload-time = "2026-03-17T10:31:09.045Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c9/44fb661c55062f0818a6ffd2685c67aa30816200d5f2817543717d4b92eb/coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3", size = 219942, upload-time = "2026-03-17T10:31:10.708Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/93419671cee82b780bab7ea96b67c8ef448f5f295f36bf5031154ec9a790/coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26", size = 250935, upload-time = "2026-03-17T10:31:12.392Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/1666e3a4462f8202d836920114fa7a5ee9275d1fa45366d336c551a162dd/coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3", size = 253541, upload-time = "2026-03-17T10:31:14.247Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5e/3ee3b835647be646dcf3c65a7c6c18f87c27326a858f72ab22c12730773d/coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b", size = 254780, upload-time = "2026-03-17T10:31:16.193Z" }, + { url = "https://files.pythonhosted.org/packages/44/b3/cb5bd1a04cfcc49ede6cd8409d80bee17661167686741e041abc7ee1b9a9/coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a", size = 256912, upload-time = "2026-03-17T10:31:17.89Z" }, + { url = "https://files.pythonhosted.org/packages/1b/66/c1dceb7b9714473800b075f5c8a84f4588f887a90eb8645282031676e242/coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969", size = 251165, upload-time = "2026-03-17T10:31:19.605Z" }, + { url = "https://files.pythonhosted.org/packages/b7/62/5502b73b97aa2e53ea22a39cf8649ff44827bef76d90bf638777daa27a9d/coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161", size = 252908, upload-time = "2026-03-17T10:31:21.312Z" }, + { url = "https://files.pythonhosted.org/packages/7d/37/7792c2d69854397ca77a55c4646e5897c467928b0e27f2d235d83b5d08c6/coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15", size = 250873, upload-time = "2026-03-17T10:31:23.565Z" }, + { url = "https://files.pythonhosted.org/packages/a3/23/bc866fb6163be52a8a9e5d708ba0d3b1283c12158cefca0a8bbb6e247a43/coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1", size = 255030, upload-time = "2026-03-17T10:31:25.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8b/ef67e1c222ef49860701d346b8bbb70881bef283bd5f6cbba68a39a086c7/coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6", size = 250694, upload-time = "2026-03-17T10:31:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/46/0d/866d1f74f0acddbb906db212e096dee77a8e2158ca5e6bb44729f9d93298/coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17", size = 252469, upload-time = "2026-03-17T10:31:29.472Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f5/be742fec31118f02ce42b21c6af187ad6a344fed546b56ca60caacc6a9a0/coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85", size = 222112, upload-time = "2026-03-17T10:31:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/66/40/7732d648ab9d069a46e686043241f01206348e2bbf128daea85be4d6414b/coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b", size = 222923, upload-time = "2026-03-17T10:31:33.633Z" }, + { url = "https://files.pythonhosted.org/packages/48/af/fea819c12a095781f6ccd504890aaddaf88b8fab263c4940e82c7b770124/coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664", size = 221540, upload-time = "2026-03-17T10:31:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/23/d2/17879af479df7fbbd44bd528a31692a48f6b25055d16482fdf5cdb633805/coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d", size = 220262, upload-time = "2026-03-17T10:31:37.184Z" }, + { url = "https://files.pythonhosted.org/packages/5b/4c/d20e554f988c8f91d6a02c5118f9abbbf73a8768a3048cb4962230d5743f/coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0", size = 220617, upload-time = "2026-03-17T10:31:39.245Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/f9f5277b95184f764b24e7231e166dfdb5780a46d408a2ac665969416d61/coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806", size = 261912, upload-time = "2026-03-17T10:31:41.324Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f6/7f1ab39393eeb50cfe4747ae8ef0e4fc564b989225aa1152e13a180d74f8/coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3", size = 263987, upload-time = "2026-03-17T10:31:43.724Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/62c084fb489ed9c6fbdf57e006752e7c516ea46fd690e5ed8b8617c7d52e/coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9", size = 266416, upload-time = "2026-03-17T10:31:45.769Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f6/df63d8660e1a0bff6125947afda112a0502736f470d62ca68b288ea762d8/coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd", size = 267558, upload-time = "2026-03-17T10:31:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/5b/02/353ca81d36779bd108f6d384425f7139ac3c58c750dcfaafe5d0bee6436b/coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606", size = 261163, upload-time = "2026-03-17T10:31:50.125Z" }, + { url = "https://files.pythonhosted.org/packages/2c/16/2e79106d5749bcaf3aee6d309123548e3276517cd7851faa8da213bc61bf/coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e", size = 263981, upload-time = "2026-03-17T10:31:51.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/c7/c29e0c59ffa6942030ae6f50b88ae49988e7e8da06de7ecdbf49c6d4feae/coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0", size = 261604, upload-time = "2026-03-17T10:31:53.872Z" }, + { url = "https://files.pythonhosted.org/packages/40/48/097cdc3db342f34006a308ab41c3a7c11c3f0d84750d340f45d88a782e00/coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87", size = 265321, upload-time = "2026-03-17T10:31:55.997Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/4994af354689e14fd03a75f8ec85a9a68d94e0188bbdab3fc1516b55e512/coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479", size = 260502, upload-time = "2026-03-17T10:31:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/22/c6/9bb9ef55903e628033560885f5c31aa227e46878118b63ab15dc7ba87797/coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2", size = 262688, upload-time = "2026-03-17T10:32:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/14/4f/f5df9007e50b15e53e01edea486814783a7f019893733d9e4d6caad75557/coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a", size = 222788, upload-time = "2026-03-17T10:32:02.246Z" }, + { url = "https://files.pythonhosted.org/packages/e1/98/aa7fccaa97d0f3192bec013c4e6fd6d294a6ed44b640e6bb61f479e00ed5/coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819", size = 223851, upload-time = "2026-03-17T10:32:04.416Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8b/e5c469f7352651e5f013198e9e21f97510b23de957dd06a84071683b4b60/coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911", size = 222104, upload-time = "2026-03-17T10:32:06.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/77/39703f0d1d4b478bfd30191d3c14f53caf596fac00efb3f8f6ee23646439/coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f", size = 219621, upload-time = "2026-03-17T10:32:08.589Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3e/51dff36d99ae14639a133d9b164d63e628532e2974d8b1edb99dd1ebc733/coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e", size = 219953, upload-time = "2026-03-17T10:32:10.507Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6c/1f1917b01eb647c2f2adc9962bd66c79eb978951cab61bdc1acab3290c07/coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a", size = 250992, upload-time = "2026-03-17T10:32:12.41Z" }, + { url = "https://files.pythonhosted.org/packages/22/e5/06b1f88f42a5a99df42ce61208bdec3bddb3d261412874280a19796fc09c/coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510", size = 253503, upload-time = "2026-03-17T10:32:14.449Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/2a148a51e5907e504fa7b85490277734e6771d8844ebcc48764a15e28155/coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247", size = 254852, upload-time = "2026-03-17T10:32:16.56Z" }, + { url = "https://files.pythonhosted.org/packages/61/77/50e8d3d85cc0b7ebe09f30f151d670e302c7ff4a1bf6243f71dd8b0981fa/coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6", size = 257161, upload-time = "2026-03-17T10:32:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c4/b5fd1d4b7bf8d0e75d997afd3925c59ba629fc8616f1b3aae7605132e256/coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0", size = 251021, upload-time = "2026-03-17T10:32:21.344Z" }, + { url = "https://files.pythonhosted.org/packages/f8/66/6ea21f910e92d69ef0b1c3346ea5922a51bad4446c9126db2ae96ee24c4c/coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882", size = 252858, upload-time = "2026-03-17T10:32:23.506Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ea/879c83cb5d61aa2a35fb80e72715e92672daef8191b84911a643f533840c/coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740", size = 250823, upload-time = "2026-03-17T10:32:25.516Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fb/616d95d3adb88b9803b275580bdeee8bd1b69a886d057652521f83d7322f/coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16", size = 255099, upload-time = "2026-03-17T10:32:27.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/93/25e6917c90ec1c9a56b0b26f6cad6408e5f13bb6b35d484a0d75c9cf000d/coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0", size = 250638, upload-time = "2026-03-17T10:32:29.914Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7b/dc1776b0464145a929deed214aef9fb1493f159b59ff3c7eeeedf91eddd0/coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0", size = 252295, upload-time = "2026-03-17T10:32:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fb/99cbbc56a26e07762a2740713f3c8f9f3f3106e3a3dd8cc4474954bccd34/coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc", size = 222360, upload-time = "2026-03-17T10:32:34.233Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b7/4758d4f73fb536347cc5e4ad63662f9d60ba9118cb6785e9616b2ce5d7fa/coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633", size = 223174, upload-time = "2026-03-17T10:32:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f2/24d84e1dfe70f8ac9fdf30d338239860d0d1d5da0bda528959d0ebc9da28/coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8", size = 221739, upload-time = "2026-03-17T10:32:38.736Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/4a168591057b3668c2428bff25dd3ebc21b629d666d90bcdfa0217940e84/coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b", size = 220351, upload-time = "2026-03-17T10:32:41.196Z" }, + { url = "https://files.pythonhosted.org/packages/f5/21/1fd5c4dbfe4a58b6b99649125635df46decdfd4a784c3cd6d410d303e370/coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c", size = 220612, upload-time = "2026-03-17T10:32:43.204Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fe/2a924b3055a5e7e4512655a9d4609781b0d62334fa0140c3e742926834e2/coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9", size = 261985, upload-time = "2026-03-17T10:32:45.514Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/c8928f2bd518c45990fe1a2ab8db42e914ef9b726c975facc4282578c3eb/coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29", size = 264107, upload-time = "2026-03-17T10:32:47.971Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ae/4ae35bbd9a0af9d820362751f0766582833c211224b38665c0f8de3d487f/coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607", size = 266513, upload-time = "2026-03-17T10:32:50.1Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/d326174c55af36f74eac6ae781612d9492f060ce8244b570bb9d50d9d609/coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90", size = 267650, upload-time = "2026-03-17T10:32:52.391Z" }, + { url = "https://files.pythonhosted.org/packages/7a/5e/31484d62cbd0eabd3412e30d74386ece4a0837d4f6c3040a653878bfc019/coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3", size = 261089, upload-time = "2026-03-17T10:32:54.544Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d8/49a72d6de146eebb0b7e48cc0f4bc2c0dd858e3d4790ab2b39a2872b62bd/coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab", size = 263982, upload-time = "2026-03-17T10:32:56.803Z" }, + { url = "https://files.pythonhosted.org/packages/06/3b/0351f1bd566e6e4dd39e978efe7958bde1d32f879e85589de147654f57bb/coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562", size = 261579, upload-time = "2026-03-17T10:32:59.466Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/796a2a2f4017f554d7810f5c573449b35b1e46788424a548d4d19201b222/coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2", size = 265316, upload-time = "2026-03-17T10:33:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/3d/16/d5ae91455541d1a78bc90abf495be600588aff8f6db5c8b0dae739fa39c9/coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea", size = 260427, upload-time = "2026-03-17T10:33:03.945Z" }, + { url = "https://files.pythonhosted.org/packages/48/11/07f413dba62db21fb3fad5d0de013a50e073cc4e2dc4306e770360f6dfc8/coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a", size = 262745, upload-time = "2026-03-17T10:33:06.285Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/d792371332eb4663115becf4bad47e047d16234b1aff687b1b18c58d60ae/coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215", size = 223146, upload-time = "2026-03-17T10:33:08.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/51/37221f59a111dca5e85be7dbf09696323b5b9f13ff65e0641d535ed06ea8/coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43", size = 224254, upload-time = "2026-03-17T10:33:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/54/83/6acacc889de8987441aa7d5adfbdbf33d288dad28704a67e574f1df9bcbb/coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45", size = 222276, upload-time = "2026-03-17T10:33:13.466Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" }, + { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" }, + { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" }, + { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" }, + { url = "https://files.pythonhosted.org/packages/01/41/3a578f7fd5c70611c0aacba52cd13cb364a5dee895a5c1d467208a9380b0/cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275", size = 7117147, upload-time = "2026-03-25T23:33:48.249Z" }, + { url = "https://files.pythonhosted.org/packages/fa/87/887f35a6fca9dde90cad08e0de0c89263a8e59b2d2ff904fd9fcd8025b6f/cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4", size = 4266221, upload-time = "2026-03-25T23:33:49.874Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a8/0a90c4f0b0871e0e3d1ed126aed101328a8a57fd9fd17f00fb67e82a51ca/cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b", size = 4408952, upload-time = "2026-03-25T23:33:52.128Z" }, + { url = "https://files.pythonhosted.org/packages/16/0b/b239701eb946523e4e9f329336e4ff32b1247e109cbab32d1a7b61da8ed7/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707", size = 4270141, upload-time = "2026-03-25T23:33:54.11Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a8/976acdd4f0f30df7b25605f4b9d3d89295351665c2091d18224f7ad5cdbf/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361", size = 4904178, upload-time = "2026-03-25T23:33:55.725Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1b/bf0e01a88efd0e59679b69f42d4afd5bced8700bb5e80617b2d63a3741af/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b", size = 4441812, upload-time = "2026-03-25T23:33:57.364Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/11df86de2ea389c65aa1806f331cae145f2ed18011f30234cc10ca253de8/cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca", size = 3963923, upload-time = "2026-03-25T23:33:59.361Z" }, + { url = "https://files.pythonhosted.org/packages/91/e0/207fb177c3a9ef6a8108f234208c3e9e76a6aa8cf20d51932916bd43bda0/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013", size = 4269695, upload-time = "2026-03-25T23:34:00.909Z" }, + { url = "https://files.pythonhosted.org/packages/21/5e/19f3260ed1e95bced52ace7501fabcd266df67077eeb382b79c81729d2d3/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4", size = 4869785, upload-time = "2026-03-25T23:34:02.796Z" }, + { url = "https://files.pythonhosted.org/packages/10/38/cd7864d79aa1d92ef6f1a584281433419b955ad5a5ba8d1eb6c872165bcb/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a", size = 4441404, upload-time = "2026-03-25T23:34:04.35Z" }, + { url = "https://files.pythonhosted.org/packages/09/0a/4fe7a8d25fed74419f91835cf5829ade6408fd1963c9eae9c4bce390ecbb/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d", size = 4397549, upload-time = "2026-03-25T23:34:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a0/7d738944eac6513cd60a8da98b65951f4a3b279b93479a7e8926d9cd730b/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736", size = 4651874, upload-time = "2026-03-25T23:34:07.916Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f1/c2326781ca05208845efca38bf714f76939ae446cd492d7613808badedf1/cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed", size = 3001511, upload-time = "2026-03-25T23:34:09.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/fe4a23eb549ac9d903bd4698ffda13383808ef0876cc912bcb2838799ece/cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4", size = 3471692, upload-time = "2026-03-25T23:34:11.613Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" }, + { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" }, + { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" }, + { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" }, + { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" }, + { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" }, + { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "executing" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, +] + +[[package]] +name = "google-auth" +version = "2.49.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyasn1-modules" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/80/6a696a07d3d3b0a92488933532f03dbefa4a24ab80fb231395b9a2a1be77/google_auth-2.49.1.tar.gz", hash = "sha256:16d40da1c3c5a0533f57d268fe72e0ebb0ae1cc3b567024122651c045d879b64", size = 333825, upload-time = "2026-03-12T19:30:58.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/eb/c6c2478d8a8d633460be40e2a8a6f8f429171997a35a96f81d3b680dec83/google_auth-2.49.1-py3-none-any.whl", hash = "sha256:195ebe3dca18eddd1b3db5edc5189b76c13e96f29e73043b923ebcf3f1a860f7", size = 240737, upload-time = "2026-03-12T19:30:53.159Z" }, +] + +[[package]] +name = "haproxy-route-policy-operator" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "charmlibs-snap" }, + { name = "ops" }, + { name = "requests" }, +] + +[package.dev-dependencies] +coverage-report = [ + { name = "coverage" }, + { name = "pytest" }, +] +fmt = [ + { name = "ruff" }, +] +integration = [ + { name = "jubilant" }, + { name = "juju" }, + { name = "pytest" }, + { name = "pytest-operator" }, +] +lint = [ + { name = "codespell" }, + { name = "mypy" }, + { name = "ops", extra = ["testing"] }, + { name = "pytest" }, + { name = "ruff" }, + { name = "types-pyyaml" }, + { name = "types-requests" }, +] +static = [ + { name = "bandit" }, +] +unit = [ + { name = "coverage" }, + { name = "ops", extra = ["testing"] }, + { name = "pytest" }, +] + +[package.metadata] +requires-dist = [ + { name = "charmlibs-snap", specifier = "==1.0.1" }, + { name = "ops", specifier = "==3.5.2" }, + { name = "requests", specifier = "==2.32.5" }, +] + +[package.metadata.requires-dev] +coverage-report = [ + { name = "coverage", extras = ["toml"] }, + { name = "pytest" }, +] +fmt = [{ name = "ruff" }] +integration = [ + { name = "jubilant", specifier = "==1.7.0" }, + { name = "juju", specifier = "==3.6.1.3" }, + { name = "pytest" }, + { name = "pytest-operator" }, +] +lint = [ + { name = "codespell" }, + { name = "mypy" }, + { name = "ops", extras = ["testing"] }, + { name = "pytest" }, + { name = "ruff" }, + { name = "types-pyyaml" }, + { name = "types-requests" }, +] +static = [{ name = "bandit", extras = ["toml"] }] +unit = [ + { name = "coverage", extras = ["toml"] }, + { name = "ops", extras = ["testing"] }, + { name = "pytest" }, +] + +[[package]] +name = "hvac" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e2/57/b46c397fb3842cfb02a44609aa834c887f38dd75f290c2fc5a34da4b2fee/hvac-2.4.0.tar.gz", hash = "sha256:e0056ad9064e7923e874e6769015b032580b639e29246f5ab1044f7959c1c7e0", size = 332543, upload-time = "2025-10-30T12:57:47.512Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/33/71e45a6bd6875f44a26f99da31c63b6840123e88bedf2c0b1ce429b8be12/hvac-2.4.0-py3-none-any.whl", hash = "sha256:008db5efd8c2f77bd37d2368ea5f713edceae1c65f11fd608393179478649e0f", size = 155921, upload-time = "2025-10-30T12:57:46.253Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "invoke" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/de/bd/b461d3424a24c80490313fd77feeb666ca4f6a28c7e72713e3d9095719b4/invoke-2.2.1.tar.gz", hash = "sha256:515bf49b4a48932b79b024590348da22f39c4942dff991ad1fb8b8baea1be707", size = 304762, upload-time = "2025-10-11T00:36:35.172Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/4b/b99e37f88336009971405cbb7630610322ed6fbfa31e1d7ab3fbf3049a2d/invoke-2.2.1-py3-none-any.whl", hash = "sha256:2413bc441b376e5cd3f55bb5d364f973ad8bdd7bf87e53c79de3c11bf3feecc8", size = 160287, upload-time = "2025-10-11T00:36:33.703Z" }, +] + +[[package]] +name = "ipdb" +version = "0.13.13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "decorator" }, + { name = "ipython" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/1b/7e07e7b752017f7693a0f4d41c13e5ca29ce8cbcfdcc1fd6c4ad8c0a27a0/ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726", size = 17042, upload-time = "2023-03-09T15:40:57.487Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/4c/b075da0092003d9a55cf2ecc1cae9384a1ca4f650d51b00fc59875fe76f6/ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4", size = 12130, upload-time = "2023-03-09T15:40:55.021Z" }, +] + +[[package]] +name = "ipython" +version = "9.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "decorator" }, + { name = "ipython-pygments-lexers" }, + { name = "jedi" }, + { name = "matplotlib-inline" }, + { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit" }, + { name = "pygments" }, + { name = "stack-data" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/73/7114f80a8f9cabdb13c27732dce24af945b2923dcab80723602f7c8bc2d8/ipython-9.12.0.tar.gz", hash = "sha256:01daa83f504b693ba523b5a407246cabde4eb4513285a3c6acaff11a66735ee4", size = 4428879, upload-time = "2026-03-27T09:42:45.312Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/22/906c8108974c673ebef6356c506cebb6870d48cedea3c41e949e2dd556bb/ipython-9.12.0-py3-none-any.whl", hash = "sha256:0f2701e8ee86e117e37f50563205d36feaa259d2e08d4a6bc6b6d74b18ce128d", size = 625661, upload-time = "2026-03-27T09:42:42.831Z" }, +] + +[[package]] +name = "ipython-pygments-lexers" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, +] + +[[package]] +name = "jedi" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "parso" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jubilant" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/0b/275edac8b57b0aac34f84073997660ebf536f97d2fa0d85a2cc3321047b6/jubilant-1.7.0.tar.gz", hash = "sha256:46b7c29a4f3336ab16d77d88418dbf8c9d0746e3f80ef42ee4c2d103eff79650", size = 32455, upload-time = "2026-01-29T02:40:10.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/d5/5b95ae9ab5abf283e33c802d286045abda7d826396ba417d5d3a20201b24/jubilant-1.7.0-py3-none-any.whl", hash = "sha256:1dcd70eb10299a95ae9fab405a3ce5f01a15513776b7f8eb4cf7b02808c93cdf", size = 33396, upload-time = "2026-01-29T02:40:09.222Z" }, +] + +[[package]] +name = "juju" +version = "3.6.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-datetime-fromisoformat" }, + { name = "hvac" }, + { name = "kubernetes" }, + { name = "macaroonbakery" }, + { name = "packaging" }, + { name = "paramiko" }, + { name = "pyasn1" }, + { name = "pyyaml" }, + { name = "toposort" }, + { name = "typing-extensions" }, + { name = "typing-inspect" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/ac/42ed7565d1b031856fb7e8884089acb3eab5aa6a2dabfee3fcf09660f885/juju-3.6.1.3.tar.gz", hash = "sha256:2fcf510fa35b387abb382da3a8b2227f38852ae7e9dc1058afb228588e1aec51", size = 305052, upload-time = "2025-07-11T02:15:59.237Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/13/e31f9b9c24e723a161bc3b75729acb109d80bf3f75f937e4c3d408f19e5a/juju-3.6.1.3-py3-none-any.whl", hash = "sha256:87469500a0a4e6a3976ddf0595e316379868d5cea96e15af2d2d4b94188f76e5", size = 287075, upload-time = "2025-07-11T02:15:57.118Z" }, +] + +[[package]] +name = "kubernetes" +version = "30.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "google-auth" }, + { name = "oauthlib" }, + { name = "python-dateutil" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "requests-oauthlib" }, + { name = "six" }, + { name = "urllib3" }, + { name = "websocket-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/3c/9f29f6cab7f35df8e54f019e5719465fa97b877be2454e99f989270b4f34/kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc", size = 887810, upload-time = "2024-06-06T15:58:30.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/2027ddede72d33be2effc087580aeba07e733a7360780ae87226f1f91bd8/kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d", size = 1706042, upload-time = "2024-06-06T15:58:27.13Z" }, +] + +[[package]] +name = "librt" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/21/d39b0a87ac52fc98f621fb6f8060efb017a767ebbbac2f99fbcbc9ddc0d7/librt-0.8.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a28f2612ab566b17f3698b0da021ff9960610301607c9a5e8eaca62f5e1c350a", size = 66516, upload-time = "2026-02-17T16:11:41.604Z" }, + { url = "https://files.pythonhosted.org/packages/69/f1/46375e71441c43e8ae335905e069f1c54febee63a146278bcee8782c84fd/librt-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:60a78b694c9aee2a0f1aaeaa7d101cf713e92e8423a941d2897f4fa37908dab9", size = 68634, upload-time = "2026-02-17T16:11:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/c510de7f93bf1fa19e13423a606d8189a02624a800710f6e6a0a0f0784b3/librt-0.8.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:758509ea3f1eba2a57558e7e98f4659d0ea7670bff49673b0dde18a3c7e6c0eb", size = 198941, upload-time = "2026-02-17T16:11:44.28Z" }, + { url = "https://files.pythonhosted.org/packages/dd/36/e725903416409a533d92398e88ce665476f275081d0d7d42f9c4951999e5/librt-0.8.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:039b9f2c506bd0ab0f8725aa5ba339c6f0cd19d3b514b50d134789809c24285d", size = 209991, upload-time = "2026-02-17T16:11:45.462Z" }, + { url = "https://files.pythonhosted.org/packages/30/7a/8d908a152e1875c9f8eac96c97a480df425e657cdb47854b9efaa4998889/librt-0.8.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bb54f1205a3a6ab41a6fd71dfcdcbd278670d3a90ca502a30d9da583105b6f7", size = 224476, upload-time = "2026-02-17T16:11:46.542Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b8/a22c34f2c485b8903a06f3fe3315341fe6876ef3599792344669db98fcff/librt-0.8.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:05bd41cdee35b0c59c259f870f6da532a2c5ca57db95b5f23689fcb5c9e42440", size = 217518, upload-time = "2026-02-17T16:11:47.746Z" }, + { url = "https://files.pythonhosted.org/packages/79/6f/5c6fea00357e4f82ba44f81dbfb027921f1ab10e320d4a64e1c408d035d9/librt-0.8.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adfab487facf03f0d0857b8710cf82d0704a309d8ffc33b03d9302b4c64e91a9", size = 225116, upload-time = "2026-02-17T16:11:49.298Z" }, + { url = "https://files.pythonhosted.org/packages/f2/a0/95ced4e7b1267fe1e2720a111685bcddf0e781f7e9e0ce59d751c44dcfe5/librt-0.8.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:153188fe98a72f206042be10a2c6026139852805215ed9539186312d50a8e972", size = 217751, upload-time = "2026-02-17T16:11:50.49Z" }, + { url = "https://files.pythonhosted.org/packages/93/c2/0517281cb4d4101c27ab59472924e67f55e375bc46bedae94ac6dc6e1902/librt-0.8.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dd3c41254ee98604b08bd5b3af5bf0a89740d4ee0711de95b65166bf44091921", size = 218378, upload-time = "2026-02-17T16:11:51.783Z" }, + { url = "https://files.pythonhosted.org/packages/43/e8/37b3ac108e8976888e559a7b227d0ceac03c384cfd3e7a1c2ee248dbae79/librt-0.8.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e0d138c7ae532908cbb342162b2611dbd4d90c941cd25ab82084aaf71d2c0bd0", size = 241199, upload-time = "2026-02-17T16:11:53.561Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/35812d041c53967fedf551a39399271bbe4257e681236a2cf1a69c8e7fa1/librt-0.8.1-cp312-cp312-win32.whl", hash = "sha256:43353b943613c5d9c49a25aaffdba46f888ec354e71e3529a00cca3f04d66a7a", size = 54917, upload-time = "2026-02-17T16:11:54.758Z" }, + { url = "https://files.pythonhosted.org/packages/de/d1/fa5d5331b862b9775aaf2a100f5ef86854e5d4407f71bddf102f4421e034/librt-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff8baf1f8d3f4b6b7257fcb75a501f2a5499d0dda57645baa09d4d0d34b19444", size = 62017, upload-time = "2026-02-17T16:11:55.748Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7c/c614252f9acda59b01a66e2ddfd243ed1c7e1deab0293332dfbccf862808/librt-0.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f2ae3725904f7377e11cc37722d5d401e8b3d5851fb9273d7f4fe04f6b3d37d", size = 52441, upload-time = "2026-02-17T16:11:56.801Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3c/f614c8e4eaac7cbf2bbdf9528790b21d89e277ee20d57dc6e559c626105f/librt-0.8.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e6bad1cd94f6764e1e21950542f818a09316645337fd5ab9a7acc45d99a8f35", size = 66529, upload-time = "2026-02-17T16:11:57.809Z" }, + { url = "https://files.pythonhosted.org/packages/ab/96/5836544a45100ae411eda07d29e3d99448e5258b6e9c8059deb92945f5c2/librt-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cf450f498c30af55551ba4f66b9123b7185362ec8b625a773b3d39aa1a717583", size = 68669, upload-time = "2026-02-17T16:11:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/06/53/f0b992b57af6d5531bf4677d75c44f095f2366a1741fb695ee462ae04b05/librt-0.8.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eca45e982fa074090057132e30585a7e8674e9e885d402eae85633e9f449ce6c", size = 199279, upload-time = "2026-02-17T16:11:59.862Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/4848cc16e268d14280d8168aee4f31cea92bbd2b79ce33d3e166f2b4e4fc/librt-0.8.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c3811485fccfda840861905b8c70bba5ec094e02825598bb9d4ca3936857a04", size = 210288, upload-time = "2026-02-17T16:12:00.954Z" }, + { url = "https://files.pythonhosted.org/packages/52/05/27fdc2e95de26273d83b96742d8d3b7345f2ea2bdbd2405cc504644f2096/librt-0.8.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e4af413908f77294605e28cfd98063f54b2c790561383971d2f52d113d9c363", size = 224809, upload-time = "2026-02-17T16:12:02.108Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d0/78200a45ba3240cb042bc597d6f2accba9193a2c57d0356268cbbe2d0925/librt-0.8.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5212a5bd7fae98dae95710032902edcd2ec4dc994e883294f75c857b83f9aba0", size = 218075, upload-time = "2026-02-17T16:12:03.631Z" }, + { url = "https://files.pythonhosted.org/packages/af/72/a210839fa74c90474897124c064ffca07f8d4b347b6574d309686aae7ca6/librt-0.8.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e692aa2d1d604e6ca12d35e51fdc36f4cda6345e28e36374579f7ef3611b3012", size = 225486, upload-time = "2026-02-17T16:12:04.725Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c1/a03cc63722339ddbf087485f253493e2b013039f5b707e8e6016141130fa/librt-0.8.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4be2a5c926b9770c9e08e717f05737a269b9d0ebc5d2f0060f0fe3fe9ce47acb", size = 218219, upload-time = "2026-02-17T16:12:05.828Z" }, + { url = "https://files.pythonhosted.org/packages/58/f5/fff6108af0acf941c6f274a946aea0e484bd10cd2dc37610287ce49388c5/librt-0.8.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fd1a720332ea335ceb544cf0a03f81df92abd4bb887679fd1e460976b0e6214b", size = 218750, upload-time = "2026-02-17T16:12:07.09Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/5a387bfef30ec1e4b4f30562c8586566faf87e47d696768c19feb49e3646/librt-0.8.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2af9e01e0ef80d95ae3c720be101227edae5f2fe7e3dc63d8857fadfc5a1d", size = 241624, upload-time = "2026-02-17T16:12:08.43Z" }, + { url = "https://files.pythonhosted.org/packages/d4/be/24f8502db11d405232ac1162eb98069ca49c3306c1d75c6ccc61d9af8789/librt-0.8.1-cp313-cp313-win32.whl", hash = "sha256:086a32dbb71336627e78cc1d6ee305a68d038ef7d4c39aaff41ae8c9aa46e91a", size = 54969, upload-time = "2026-02-17T16:12:09.633Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/c9fdf6cb2a529c1a092ce769a12d88c8cca991194dfe641b6af12fa964d2/librt-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:e11769a1dbda4da7b00a76cfffa67aa47cfa66921d2724539eee4b9ede780b79", size = 62000, upload-time = "2026-02-17T16:12:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/68f80ca3ac4924f250cdfa6e20142a803e5e50fca96ef5148c52ee8c10ea/librt-0.8.1-cp313-cp313-win_arm64.whl", hash = "sha256:924817ab3141aca17893386ee13261f1d100d1ef410d70afe4389f2359fea4f0", size = 52495, upload-time = "2026-02-17T16:12:11.633Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6cfa7fe54fd4d1f47130017351a959fe5804bda7a0bc7e07a2cdbc3fdd28d34f", size = 66081, upload-time = "2026-02-17T16:12:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:228c2409c079f8c11fb2e5d7b277077f694cb93443eb760e00b3b83cb8b3176c", size = 68309, upload-time = "2026-02-17T16:12:13.756Z" }, + { url = "https://files.pythonhosted.org/packages/a4/36/46820d03f058cfb5a9de5940640ba03165ed8aded69e0733c417bb04df34/librt-0.8.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7aae78ab5e3206181780e56912d1b9bb9f90a7249ce12f0e8bf531d0462dd0fc", size = 196804, upload-time = "2026-02-17T16:12:14.818Z" }, + { url = "https://files.pythonhosted.org/packages/59/18/5dd0d3b87b8ff9c061849fbdb347758d1f724b9a82241aa908e0ec54ccd0/librt-0.8.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:172d57ec04346b047ca6af181e1ea4858086c80bdf455f61994c4aa6fc3f866c", size = 206907, upload-time = "2026-02-17T16:12:16.513Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3", size = 221217, upload-time = "2026-02-17T16:12:17.906Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/7e01f2dda84a8f5d280637a2e5827210a8acca9a567a54507ef1c75b342d/librt-0.8.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:10c42e1f6fd06733ef65ae7bebce2872bcafd8d6e6b0a08fe0a05a23b044fb14", size = 214622, upload-time = "2026-02-17T16:12:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/1e/8c/5b093d08a13946034fed57619742f790faf77058558b14ca36a6e331161e/librt-0.8.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4c8dfa264b9193c4ee19113c985c95f876fae5e51f731494fc4e0cf594990ba7", size = 221987, upload-time = "2026-02-17T16:12:20.331Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cc/86b0b3b151d40920ad45a94ce0171dec1aebba8a9d72bb3fa00c73ab25dd/librt-0.8.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:01170b6729a438f0dedc4a26ed342e3dc4f02d1000b4b19f980e1877f0c297e6", size = 215132, upload-time = "2026-02-17T16:12:21.54Z" }, + { url = "https://files.pythonhosted.org/packages/fc/be/8588164a46edf1e69858d952654e216a9a91174688eeefb9efbb38a9c799/librt-0.8.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:7b02679a0d783bdae30d443025b94465d8c3dc512f32f5b5031f93f57ac32071", size = 215195, upload-time = "2026-02-17T16:12:23.073Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f2/0b9279bea735c734d69344ecfe056c1ba211694a72df10f568745c899c76/librt-0.8.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:190b109bb69592a3401fe1ffdea41a2e73370ace2ffdc4a0e8e2b39cdea81b78", size = 237946, upload-time = "2026-02-17T16:12:24.275Z" }, + { url = "https://files.pythonhosted.org/packages/e9/cc/5f2a34fbc8aeb35314a3641f9956fa9051a947424652fad9882be7a97949/librt-0.8.1-cp314-cp314-win32.whl", hash = "sha256:e70a57ecf89a0f64c24e37f38d3fe217a58169d2fe6ed6d70554964042474023", size = 50689, upload-time = "2026-02-17T16:12:25.766Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/cd4d010ab2147339ca2b93e959c3686e964edc6de66ddacc935c325883d7/librt-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:7e2f3edca35664499fbb36e4770650c4bd4a08abc1f4458eab9df4ec56389730", size = 57875, upload-time = "2026-02-17T16:12:27.465Z" }, + { url = "https://files.pythonhosted.org/packages/84/0f/2143cb3c3ca48bd3379dcd11817163ca50781927c4537345d608b5045998/librt-0.8.1-cp314-cp314-win_arm64.whl", hash = "sha256:0d2f82168e55ddefd27c01c654ce52379c0750ddc31ee86b4b266bcf4d65f2a3", size = 48058, upload-time = "2026-02-17T16:12:28.556Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/9b23a87e37baf00311c3efe6b48d6b6c168c29902dfc3f04c338372fd7db/librt-0.8.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c74a2da57a094bd48d03fa5d196da83d2815678385d2978657499063709abe1", size = 68313, upload-time = "2026-02-17T16:12:29.659Z" }, + { url = "https://files.pythonhosted.org/packages/db/9a/859c41e5a4f1c84200a7d2b92f586aa27133c8243b6cac9926f6e54d01b9/librt-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a355d99c4c0d8e5b770313b8b247411ed40949ca44e33e46a4789b9293a907ee", size = 70994, upload-time = "2026-02-17T16:12:31.516Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/10605366ee599ed34223ac2bf66404c6fb59399f47108215d16d5ad751a8/librt-0.8.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2eb345e8b33fb748227409c9f1233d4df354d6e54091f0e8fc53acdb2ffedeb7", size = 220770, upload-time = "2026-02-17T16:12:33.294Z" }, + { url = "https://files.pythonhosted.org/packages/af/8d/16ed8fd452dafae9c48d17a6bc1ee3e818fd40ef718d149a8eff2c9f4ea2/librt-0.8.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9be2f15e53ce4e83cc08adc29b26fb5978db62ef2a366fbdf716c8a6c8901040", size = 235409, upload-time = "2026-02-17T16:12:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/7bdf3e49349c134b25db816e4a3db6b94a47ac69d7d46b1e682c2c4949be/librt-0.8.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:785ae29c1f5c6e7c2cde2c7c0e148147f4503da3abc5d44d482068da5322fd9e", size = 246473, upload-time = "2026-02-17T16:12:36.656Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8a/91fab8e4fd2a24930a17188c7af5380eb27b203d72101c9cc000dbdfd95a/librt-0.8.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d3a7da44baf692f0c6aeb5b2a09c5e6fc7a703bca9ffa337ddd2e2da53f7732", size = 238866, upload-time = "2026-02-17T16:12:37.849Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e0/c45a098843fc7c07e18a7f8a24ca8496aecbf7bdcd54980c6ca1aaa79a8e/librt-0.8.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fc48998000cbc39ec0d5311312dda93ecf92b39aaf184c5e817d5d440b29624", size = 250248, upload-time = "2026-02-17T16:12:39.445Z" }, + { url = "https://files.pythonhosted.org/packages/82/30/07627de23036640c952cce0c1fe78972e77d7d2f8fd54fa5ef4554ff4a56/librt-0.8.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e96baa6820280077a78244b2e06e416480ed859bbd8e5d641cf5742919d8beb4", size = 240629, upload-time = "2026-02-17T16:12:40.889Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/55bfe1ee3542eba055616f9098eaf6eddb966efb0ca0f44eaa4aba327307/librt-0.8.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:31362dbfe297b23590530007062c32c6f6176f6099646bb2c95ab1b00a57c382", size = 239615, upload-time = "2026-02-17T16:12:42.446Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/191d3d28abc26c9099b19852e6c99f7f6d400b82fa5a4e80291bd3803e19/librt-0.8.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cc3656283d11540ab0ea01978378e73e10002145117055e03722417aeab30994", size = 263001, upload-time = "2026-02-17T16:12:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/b9/eb/7697f60fbe7042ab4e88f4ee6af496b7f222fffb0a4e3593ef1f29f81652/librt-0.8.1-cp314-cp314t-win32.whl", hash = "sha256:738f08021b3142c2918c03692608baed43bc51144c29e35807682f8070ee2a3a", size = 51328, upload-time = "2026-02-17T16:12:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/7c/72/34bf2eb7a15414a23e5e70ecb9440c1d3179f393d9349338a91e2781c0fb/librt-0.8.1-cp314-cp314t-win_amd64.whl", hash = "sha256:89815a22daf9c51884fb5dbe4f1ef65ee6a146e0b6a8df05f753e2e4a9359bf4", size = 58722, upload-time = "2026-02-17T16:12:46.85Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, +] + +[[package]] +name = "macaroonbakery" +version = "1.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, + { name = "pymacaroons" }, + { name = "pynacl" }, + { name = "pyrfc3339" }, + { name = "requests" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/ae/59f5ab870640bd43673b708e5f24aed592dc2673cc72caa49b0053b4af37/macaroonbakery-1.3.4.tar.gz", hash = "sha256:41ca993a23e4f8ef2fe7723b5cd4a30c759735f1d5021e990770c8a0e0f33970", size = 82143, upload-time = "2023-12-13T14:22:22.539Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/42/227f748dc222b7a1c5cb40c7c74ab4162c7fc146b88980776b490ab673a1/macaroonbakery-1.3.4-py2.py3-none-any.whl", hash = "sha256:1e952a189f5c1e96ef82b081b2852c770d7daa20987e2088e762dd5689fb253b", size = 103184, upload-time = "2023-12-13T14:22:20.159Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "matplotlib-inline" +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c7/74/97e72a36efd4ae2bccb3463284300f8953f199b5ffbc04cbbb0ec78f74b1/matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe", size = 8110, upload-time = "2025-10-23T09:00:22.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "oauthlib" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, +] + +[[package]] +name = "ops" +version = "3.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "pyyaml" }, + { name = "websocket-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/72/50bdb29831b8ed92034e9657fec89301d8df3aa8da3da1d37ecbdf1baab6/ops-3.5.2.tar.gz", hash = "sha256:849c9ed85eadf265b8a927d5e857cd112221dd71b35e4b13329ccb938c3afd18", size = 578181, upload-time = "2026-02-11T01:49:48.345Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/5c/84b41a67c2dc904f92f424e981eb65641ce095936fb9e6d7b4a315072d1a/ops-3.5.2-py3-none-any.whl", hash = "sha256:c715128a51ddcdf0fff463428b0f56a93e5963187e599b66594b4fc74458781b", size = 211688, upload-time = "2026-02-11T01:49:43.935Z" }, +] + +[package.optional-dependencies] +testing = [ + { name = "ops-scenario" }, +] + +[[package]] +name = "ops-scenario" +version = "8.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ops" }, + { name = "pyyaml" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/27/a999aa877a34fc1b2c07b0f51cb1dc58a89e23bcaf4f626e28bec39825fd/ops_scenario-8.5.2.tar.gz", hash = "sha256:ebcdc4f8837f9a1cd42624f49d9d8b2502ebeeedad552516225b3420f989c369", size = 71693, upload-time = "2026-02-11T01:49:49.59Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/b1/57816b48087fa391d0b113e067ca80fdd36a2103c57cf9cf28fe5a82f52e/ops_scenario-8.5.2-py3-none-any.whl", hash = "sha256:79125d82ca753394d9d9e4a53c55931d3d0114421c1b745f5611cb5827d37012", size = 64241, upload-time = "2026-02-11T01:49:45.753Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "paramiko" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bcrypt" }, + { name = "cryptography" }, + { name = "invoke" }, + { name = "pynacl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/e7/81fdcbc7f190cdb058cffc9431587eb289833bdd633e2002455ca9bb13d4/paramiko-4.0.0.tar.gz", hash = "sha256:6a25f07b380cc9c9a88d2b920ad37167ac4667f8d9886ccebd8f90f654b5d69f", size = 1630743, upload-time = "2025-08-04T01:02:03.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/90/a744336f5af32c433bd09af7854599682a383b37cfd78f7de263de6ad6cb/paramiko-4.0.0-py3-none-any.whl", hash = "sha256:0e20e00ac666503bf0b4eda3b6d833465a2b7aff2e2b3d79a8bba5ef144ee3b9", size = 223932, upload-time = "2025-08-04T01:02:02.029Z" }, +] + +[[package]] +name = "parso" +version = "0.8.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/81/76/a1e769043c0c0c9fe391b702539d594731a4362334cdf4dc25d0c09761e7/parso-0.8.6.tar.gz", hash = "sha256:2b9a0332696df97d454fa67b81618fd69c35a7b90327cbe6ba5c92d2c68a7bfd", size = 401621, upload-time = "2026-02-09T15:45:24.425Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/61/fae042894f4296ec49e3f193aff5d7c18440da9e48102c3315e1bc4519a7/parso-0.8.6-py2.py3-none-any.whl", hash = "sha256:2c549f800b70a5c4952197248825584cb00f033b29c692671d3bf08bf380baff", size = 106894, upload-time = "2026-02-09T15:45:21.391Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, +] + +[[package]] +name = "protobuf" +version = "7.34.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/6b/a0e95cad1ad7cc3f2c6821fcab91671bd5b78bd42afb357bb4765f29bc41/protobuf-7.34.1.tar.gz", hash = "sha256:9ce42245e704cc5027be797c1db1eb93184d44d1cdd71811fb2d9b25ad541280", size = 454708, upload-time = "2026-03-20T17:34:47.036Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/11/3325d41e6ee15bf1125654301211247b042563bcc898784351252549a8ad/protobuf-7.34.1-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8b2cc79c4d8f62b293ad9b11ec3aebce9af481fa73e64556969f7345ebf9fc7", size = 429247, upload-time = "2026-03-20T17:34:37.024Z" }, + { url = "https://files.pythonhosted.org/packages/eb/9d/aa69df2724ff63efa6f72307b483ce0827f4347cc6d6df24b59e26659fef/protobuf-7.34.1-cp310-abi3-manylinux2014_aarch64.whl", hash = "sha256:5185e0e948d07abe94bb76ec9b8416b604cfe5da6f871d67aad30cbf24c3110b", size = 325753, upload-time = "2026-03-20T17:34:38.751Z" }, + { url = "https://files.pythonhosted.org/packages/92/e8/d174c91fd48e50101943f042b09af9029064810b734e4160bbe282fa1caa/protobuf-7.34.1-cp310-abi3-manylinux2014_s390x.whl", hash = "sha256:403b093a6e28a960372b44e5eb081775c9b056e816a8029c61231743d63f881a", size = 340198, upload-time = "2026-03-20T17:34:39.871Z" }, + { url = "https://files.pythonhosted.org/packages/53/1b/3b431694a4dc6d37b9f653f0c64b0a0d9ec074ee810710c0c3da21d67ba7/protobuf-7.34.1-cp310-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ff40ce8cd688f7265326b38d5a1bed9bfdf5e6723d49961432f83e21d5713e4", size = 324267, upload-time = "2026-03-20T17:34:41.1Z" }, + { url = "https://files.pythonhosted.org/packages/85/29/64de04a0ac142fb685fd09999bc3d337943fb386f3a0ec57f92fd8203f97/protobuf-7.34.1-cp310-abi3-win32.whl", hash = "sha256:34b84ce27680df7cca9f231043ada0daa55d0c44a2ddfaa58ec1d0d89d8bf60a", size = 426628, upload-time = "2026-03-20T17:34:42.536Z" }, + { url = "https://files.pythonhosted.org/packages/4d/87/cb5e585192a22b8bd457df5a2c16a75ea0db9674c3a0a39fc9347d84e075/protobuf-7.34.1-cp310-abi3-win_amd64.whl", hash = "sha256:e97b55646e6ce5cbb0954a8c28cd39a5869b59090dfaa7df4598a7fba869468c", size = 437901, upload-time = "2026-03-20T17:34:44.112Z" }, + { url = "https://files.pythonhosted.org/packages/88/95/608f665226bca68b736b79e457fded9a2a38c4f4379a4a7614303d9db3bc/protobuf-7.34.1-py3-none-any.whl", hash = "sha256:bb3812cd53aefea2b028ef42bd780f5b96407247f20c6ef7c679807e9d188f11", size = 170715, upload-time = "2026-03-20T17:34:45.384Z" }, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pygments" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, +] + +[[package]] +name = "pymacaroons" +version = "0.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pynacl" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/37/b4/52ff00b59e91c4817ca60210c33caf11e85a7f68f7b361748ca2eb50923e/pymacaroons-0.13.0.tar.gz", hash = "sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8", size = 21083, upload-time = "2018-02-21T18:07:49.045Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/87/fd9b54258216e3f19671f6e9dd76da1ebc49e93ea0107c986b1071dd3068/pymacaroons-0.13.0-py2.py3-none-any.whl", hash = "sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907", size = 19463, upload-time = "2018-02-21T18:07:47.085Z" }, +] + +[[package]] +name = "pynacl" +version = "1.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d9/9a/4019b524b03a13438637b11538c82781a5eda427394380381af8f04f467a/pynacl-1.6.2.tar.gz", hash = "sha256:018494d6d696ae03c7e656e5e74cdfd8ea1326962cc401bcf018f1ed8436811c", size = 3511692, upload-time = "2026-01-01T17:48:10.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/79/0e3c34dc3c4671f67d251c07aa8eb100916f250ee470df230b0ab89551b4/pynacl-1.6.2-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594", size = 390064, upload-time = "2026-01-01T17:31:57.264Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1c/23a26e931736e13b16483795c8a6b2f641bf6a3d5238c22b070a5112722c/pynacl-1.6.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0", size = 809370, upload-time = "2026-01-01T17:31:59.198Z" }, + { url = "https://files.pythonhosted.org/packages/87/74/8d4b718f8a22aea9e8dcc8b95deb76d4aae380e2f5b570cc70b5fd0a852d/pynacl-1.6.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9", size = 1408304, upload-time = "2026-01-01T17:32:01.162Z" }, + { url = "https://files.pythonhosted.org/packages/fd/73/be4fdd3a6a87fe8a4553380c2b47fbd1f7f58292eb820902f5c8ac7de7b0/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574", size = 844871, upload-time = "2026-01-01T17:32:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/55/ad/6efc57ab75ee4422e96b5f2697d51bbcf6cdcc091e66310df91fbdc144a8/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634", size = 1446356, upload-time = "2026-01-01T17:32:04.452Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/928ee9c4779caa0a915844311ab9fb5f99585621c5d6e4574538a17dca07/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88", size = 826814, upload-time = "2026-01-01T17:32:06.078Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a9/1bdba746a2be20f8809fee75c10e3159d75864ef69c6b0dd168fc60e485d/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14", size = 1411742, upload-time = "2026-01-01T17:32:07.651Z" }, + { url = "https://files.pythonhosted.org/packages/f3/2f/5e7ea8d85f9f3ea5b6b87db1d8388daa3587eed181bdeb0306816fdbbe79/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444", size = 801714, upload-time = "2026-01-01T17:32:09.558Z" }, + { url = "https://files.pythonhosted.org/packages/06/ea/43fe2f7eab5f200e40fb10d305bf6f87ea31b3bbc83443eac37cd34a9e1e/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b", size = 1372257, upload-time = "2026-01-01T17:32:11.026Z" }, + { url = "https://files.pythonhosted.org/packages/4d/54/c9ea116412788629b1347e415f72195c25eb2f3809b2d3e7b25f5c79f13a/pynacl-1.6.2-cp314-cp314t-win32.whl", hash = "sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145", size = 231319, upload-time = "2026-01-01T17:32:12.46Z" }, + { url = "https://files.pythonhosted.org/packages/ce/04/64e9d76646abac2dccf904fccba352a86e7d172647557f35b9fe2a5ee4a1/pynacl-1.6.2-cp314-cp314t-win_amd64.whl", hash = "sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590", size = 244044, upload-time = "2026-01-01T17:32:13.781Z" }, + { url = "https://files.pythonhosted.org/packages/33/33/7873dc161c6a06f43cda13dec67b6fe152cb2f982581151956fa5e5cdb47/pynacl-1.6.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2", size = 188740, upload-time = "2026-01-01T17:32:15.083Z" }, + { url = "https://files.pythonhosted.org/packages/be/7b/4845bbf88e94586ec47a432da4e9107e3fc3ce37eb412b1398630a37f7dd/pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465", size = 388458, upload-time = "2026-01-01T17:32:16.829Z" }, + { url = "https://files.pythonhosted.org/packages/1e/b4/e927e0653ba63b02a4ca5b4d852a8d1d678afbf69b3dbf9c4d0785ac905c/pynacl-1.6.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0", size = 800020, upload-time = "2026-01-01T17:32:18.34Z" }, + { url = "https://files.pythonhosted.org/packages/7f/81/d60984052df5c97b1d24365bc1e30024379b42c4edcd79d2436b1b9806f2/pynacl-1.6.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4", size = 1399174, upload-time = "2026-01-01T17:32:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/68/f7/322f2f9915c4ef27d140101dd0ed26b479f7e6f5f183590fd32dfc48c4d3/pynacl-1.6.2-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87", size = 835085, upload-time = "2026-01-01T17:32:22.24Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d0/f301f83ac8dbe53442c5a43f6a39016f94f754d7a9815a875b65e218a307/pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c", size = 1437614, upload-time = "2026-01-01T17:32:23.766Z" }, + { url = "https://files.pythonhosted.org/packages/c4/58/fc6e649762b029315325ace1a8c6be66125e42f67416d3dbd47b69563d61/pynacl-1.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130", size = 818251, upload-time = "2026-01-01T17:32:25.69Z" }, + { url = "https://files.pythonhosted.org/packages/c9/a8/b917096b1accc9acd878819a49d3d84875731a41eb665f6ebc826b1af99e/pynacl-1.6.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6", size = 1402859, upload-time = "2026-01-01T17:32:27.215Z" }, + { url = "https://files.pythonhosted.org/packages/85/42/fe60b5f4473e12c72f977548e4028156f4d340b884c635ec6b063fe7e9a5/pynacl-1.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e", size = 791926, upload-time = "2026-01-01T17:32:29.314Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f9/e40e318c604259301cc091a2a63f237d9e7b424c4851cafaea4ea7c4834e/pynacl-1.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577", size = 1363101, upload-time = "2026-01-01T17:32:31.263Z" }, + { url = "https://files.pythonhosted.org/packages/48/47/e761c254f410c023a469284a9bc210933e18588ca87706ae93002c05114c/pynacl-1.6.2-cp38-abi3-win32.whl", hash = "sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa", size = 227421, upload-time = "2026-01-01T17:32:33.076Z" }, + { url = "https://files.pythonhosted.org/packages/41/ad/334600e8cacc7d86587fe5f565480fde569dfb487389c8e1be56ac21d8ac/pynacl-1.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0", size = 239754, upload-time = "2026-01-01T17:32:34.557Z" }, + { url = "https://files.pythonhosted.org/packages/29/7d/5945b5af29534641820d3bd7b00962abbbdfee84ec7e19f0d5b3175f9a31/pynacl-1.6.2-cp38-abi3-win_arm64.whl", hash = "sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c", size = 184801, upload-time = "2026-01-01T17:32:36.309Z" }, +] + +[[package]] +name = "pyrfc3339" +version = "1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/52/75ea0ae249ba885c9429e421b4f94bc154df68484847f1ac164287d978d7/pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a", size = 5290, upload-time = "2018-06-11T00:26:31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/7a/725f5c16756ec6211b1e7eeac09f469084595513917ea069bc023c40a5e2/pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4", size = 5669, upload-time = "2018-06-11T00:22:40.934Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/53/57663d99acaac2fcdafdc697e52a9b1b7d6fcf36616281ff9768a44e7ff3/pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45", size = 30656, upload-time = "2024-04-29T13:23:24.738Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/ce/1e4b53c213dce25d6e8b163697fbce2d43799d76fa08eea6ad270451c370/pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b", size = 13368, upload-time = "2024-04-29T13:23:23.126Z" }, +] + +[[package]] +name = "pytest-operator" +version = "0.43.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ipdb" }, + { name = "jinja2" }, + { name = "juju" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/48/880facbcaca080b852854f60fd46b37ca2985b27dc7f1713857d9de6d469/pytest_operator-0.43.2.tar.gz", hash = "sha256:3db34dcd9c114a2e41a9bc61da72daf1264e7644fd5b92e855f250cb337e01c3", size = 149628, upload-time = "2025-10-04T14:38:45.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/a0/22b018287f4ff7a36cdb79745c196f28897184d27e18b90177f5dfc2a0f4/pytest_operator-0.43.2-py3-none-any.whl", hash = "sha256:d7d01ffe35d14b75577fd80a07c34f0a9f4835cfc6d373b8e2f995bcb4146bda", size = 48322, upload-time = "2025-10-04T14:38:43.763Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "pytz" +version = "2026.1.post1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/db/b8721d71d945e6a8ac63c0fc900b2067181dbb50805958d4d4661cf7d277/pytz-2026.1.post1.tar.gz", hash = "sha256:3378dde6a0c3d26719182142c56e60c7f9af7e968076f31aae569d72a0358ee1", size = 321088, upload-time = "2026-03-03T07:47:50.683Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl", hash = "sha256:f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a", size = 510489, upload-time = "2026-03-03T07:47:49.167Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "oauthlib" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650, upload-time = "2024-03-22T20:32:29.939Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/14/b0/73cf7550861e2b4824950b8b52eebdcc5adc792a00c514406556c5b80817/ruff-0.15.8.tar.gz", hash = "sha256:995f11f63597ee362130d1d5a327a87cb6f3f5eae3094c620bcc632329a4d26e", size = 4610921, upload-time = "2026-03-26T18:39:38.675Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/92/c445b0cd6da6e7ae51e954939cb69f97e008dbe750cfca89b8cedc081be7/ruff-0.15.8-py3-none-linux_armv6l.whl", hash = "sha256:cbe05adeba76d58162762d6b239c9056f1a15a55bd4b346cfd21e26cd6ad7bc7", size = 10527394, upload-time = "2026-03-26T18:39:41.566Z" }, + { url = "https://files.pythonhosted.org/packages/eb/92/f1c662784d149ad1414cae450b082cf736430c12ca78367f20f5ed569d65/ruff-0.15.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d3e3d0b6ba8dca1b7ef9ab80a28e840a20070c4b62e56d675c24f366ef330570", size = 10905693, upload-time = "2026-03-26T18:39:30.364Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f2/7a631a8af6d88bcef997eb1bf87cc3da158294c57044aafd3e17030613de/ruff-0.15.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ee3ae5c65a42f273f126686353f2e08ff29927b7b7e203b711514370d500de3", size = 10323044, upload-time = "2026-03-26T18:39:33.37Z" }, + { url = "https://files.pythonhosted.org/packages/67/18/1bf38e20914a05e72ef3b9569b1d5c70a7ef26cd188d69e9ca8ef588d5bf/ruff-0.15.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdce027ada77baa448077ccc6ebb2fa9c3c62fd110d8659d601cf2f475858d94", size = 10629135, upload-time = "2026-03-26T18:39:44.142Z" }, + { url = "https://files.pythonhosted.org/packages/d2/e9/138c150ff9af60556121623d41aba18b7b57d95ac032e177b6a53789d279/ruff-0.15.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12e617fc01a95e5821648a6df341d80456bd627bfab8a829f7cfc26a14a4b4a3", size = 10348041, upload-time = "2026-03-26T18:39:52.178Z" }, + { url = "https://files.pythonhosted.org/packages/02/f1/5bfb9298d9c323f842c5ddeb85f1f10ef51516ac7a34ba446c9347d898df/ruff-0.15.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:432701303b26416d22ba696c39f2c6f12499b89093b61360abc34bcc9bf07762", size = 11121987, upload-time = "2026-03-26T18:39:55.195Z" }, + { url = "https://files.pythonhosted.org/packages/10/11/6da2e538704e753c04e8d86b1fc55712fdbdcc266af1a1ece7a51fff0d10/ruff-0.15.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d910ae974b7a06a33a057cb87d2a10792a3b2b3b35e33d2699fdf63ec8f6b17a", size = 11951057, upload-time = "2026-03-26T18:39:19.18Z" }, + { url = "https://files.pythonhosted.org/packages/83/f0/c9208c5fd5101bf87002fed774ff25a96eea313d305f1e5d5744698dc314/ruff-0.15.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2033f963c43949d51e6fdccd3946633c6b37c484f5f98c3035f49c27395a8ab8", size = 11464613, upload-time = "2026-03-26T18:40:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/f8/22/d7f2fabdba4fae9f3b570e5605d5eb4500dcb7b770d3217dca4428484b17/ruff-0.15.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f29b989a55572fb885b77464cf24af05500806ab4edf9a0fd8977f9759d85b1", size = 11257557, upload-time = "2026-03-26T18:39:57.972Z" }, + { url = "https://files.pythonhosted.org/packages/71/8c/382a9620038cf6906446b23ce8632ab8c0811b8f9d3e764f58bedd0c9a6f/ruff-0.15.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:ac51d486bf457cdc985a412fb1801b2dfd1bd8838372fc55de64b1510eff4bec", size = 11169440, upload-time = "2026-03-26T18:39:22.205Z" }, + { url = "https://files.pythonhosted.org/packages/4d/0d/0994c802a7eaaf99380085e4e40c845f8e32a562e20a38ec06174b52ef24/ruff-0.15.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c9861eb959edab053c10ad62c278835ee69ca527b6dcd72b47d5c1e5648964f6", size = 10605963, upload-time = "2026-03-26T18:39:46.682Z" }, + { url = "https://files.pythonhosted.org/packages/19/aa/d624b86f5b0aad7cef6bbf9cd47a6a02dfdc4f72c92a337d724e39c9d14b/ruff-0.15.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8d9a5b8ea13f26ae90838afc33f91b547e61b794865374f114f349e9036835fb", size = 10357484, upload-time = "2026-03-26T18:39:49.176Z" }, + { url = "https://files.pythonhosted.org/packages/35/c3/e0b7835d23001f7d999f3895c6b569927c4d39912286897f625736e1fd04/ruff-0.15.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c2a33a529fb3cbc23a7124b5c6ff121e4d6228029cba374777bd7649cc8598b8", size = 10830426, upload-time = "2026-03-26T18:40:03.702Z" }, + { url = "https://files.pythonhosted.org/packages/f0/51/ab20b322f637b369383adc341d761eaaa0f0203d6b9a7421cd6e783d81b9/ruff-0.15.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:75e5cd06b1cf3f47a3996cfc999226b19aa92e7cce682dcd62f80d7035f98f49", size = 11345125, upload-time = "2026-03-26T18:39:27.799Z" }, + { url = "https://files.pythonhosted.org/packages/37/e6/90b2b33419f59d0f2c4c8a48a4b74b460709a557e8e0064cf33ad894f983/ruff-0.15.8-py3-none-win32.whl", hash = "sha256:bc1f0a51254ba21767bfa9a8b5013ca8149dcf38092e6a9eb704d876de94dc34", size = 10571959, upload-time = "2026-03-26T18:39:36.117Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a2/ef467cb77099062317154c63f234b8a7baf7cb690b99af760c5b68b9ee7f/ruff-0.15.8-py3-none-win_amd64.whl", hash = "sha256:04f79eff02a72db209d47d665ba7ebcad609d8918a134f86cb13dd132159fc89", size = 11743893, upload-time = "2026-03-26T18:39:25.01Z" }, + { url = "https://files.pythonhosted.org/packages/15/e2/77be4fff062fa78d9b2a4dea85d14785dac5f1d0c1fb58ed52331f0ebe28/ruff-0.15.8-py3-none-win_arm64.whl", hash = "sha256:cf891fa8e3bb430c0e7fac93851a5978fc99c8fa2c053b57b118972866f8e5f2", size = 11048175, upload-time = "2026-03-26T18:40:01.06Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens" }, + { name = "executing" }, + { name = "pure-eval" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, +] + +[[package]] +name = "stevedore" +version = "5.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6d/90764092216fa560f6587f83bb70113a8ba510ba436c6476a2b47359057c/stevedore-5.7.0.tar.gz", hash = "sha256:31dd6fe6b3cbe921e21dcefabc9a5f1cf848cf538a1f27543721b8ca09948aa3", size = 516200, upload-time = "2026-02-20T13:27:06.765Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/06/36d260a695f383345ab5bbc3fd447249594ae2fa8dfd19c533d5ae23f46b/stevedore-5.7.0-py3-none-any.whl", hash = "sha256:fd25efbb32f1abb4c9e502f385f0018632baac11f9ee5d1b70f88cc5e22ad4ed", size = 54483, upload-time = "2026-02-20T13:27:05.561Z" }, +] + +[[package]] +name = "toposort" +version = "1.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/19/8e955d90985ecbd3b9adb2a759753a6840da2dff3c569d412b2c9217678b/toposort-1.10.tar.gz", hash = "sha256:bfbb479c53d0a696ea7402601f4e693c97b0367837c8898bc6471adfca37a6bd", size = 11132, upload-time = "2023-02-27T13:59:51.834Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/17/57b444fd314d5e1593350b9a31d000e7411ba8e17ce12dc7ad54ca76b810/toposort-1.10-py3-none-any.whl", hash = "sha256:cbdbc0d0bee4d2695ab2ceec97fe0679e9c10eab4b2a87a9372b929e70563a87", size = 8500, upload-time = "2023-02-25T20:07:06.538Z" }, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250915" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, +] + +[[package]] +name = "types-requests" +version = "2.33.0.20260327" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/5f/2e3dbae6e21be6ae026563bad96cbf76602d73aa85ea09f13419ddbdabb4/types_requests-2.33.0.20260327.tar.gz", hash = "sha256:f4f74f0b44f059e3db420ff17bd1966e3587cdd34062fe38a23cda97868f8dd8", size = 23804, upload-time = "2026-03-27T04:23:38.737Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/55/951e733616c92cb96b57554746d2f65f4464d080cc2cc093605f897aba89/types_requests-2.33.0.20260327-py3-none-any.whl", hash = "sha256:fde0712be6d7c9a4d490042d6323115baf872d9a71a22900809d0432de15776e", size = 20737, upload-time = "2026-03-27T04:23:37.813Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "wcwidth" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, +] + +[[package]] +name = "websocket-client" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] diff --git a/tox.toml b/tox.toml index 9badf371d..00b5eea65 100644 --- a/tox.toml +++ b/tox.toml @@ -88,6 +88,7 @@ commands = [ "--ignore={toxinidir}/haproxy-spoe-auth-operator", "--ignore={toxinidir}/haproxy-operator", "--ignore={toxinidir}/haproxy-route-policy", + "--ignore={toxinidir}/haproxy-route-policy-operator", "--log-cli-level=INFO", "-s", { replace = "posargs", extend = "true" }, From 09a8ee2fd34f2aabbb7a3242295aab76ea57d7b6 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 02:18:02 +0200 Subject: [PATCH 118/201] simplify charm mbp --- haproxy-route-policy-operator/charmcraft.yaml | 23 --------- haproxy-route-policy-operator/src/charm.py | 47 ++----------------- haproxy-route-policy-operator/src/snap.py | 2 +- .../tests/unit/test_charm.py | 21 +-------- 4 files changed, 6 insertions(+), 87 deletions(-) diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 7d6f06461..2956a0c9e 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -39,26 +39,3 @@ requires: description: PostgreSQL database used by the route policy service. limit: 1 optional: false - -config: - options: - snap-channel: - type: string - default: latest/edge - description: Snap channel used to install haproxy-route-policy. - debug: - type: boolean - default: false - description: Enable Django debug mode for the route policy service. - log-level: - type: string - default: info - description: Log level for Django logs. - allowed-hosts: - type: string - default: '["*"]' - description: JSON array of allowed hosts for Django. - secret-key: - type: string - default: "" - description: Optional Django secret key. If empty, the charm generates one. diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 92f7d1df8..691b45649 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -5,11 +5,7 @@ """haproxy-route-policy-operator charm.""" -from __future__ import annotations - -import json import logging -import secrets import subprocess from typing import Any @@ -21,17 +17,13 @@ logger = logging.getLogger(__name__) POSTGRESQL_RELATION = "postgresql" -VALID_LOG_LEVELS = {"debug", "info", "warning", "error", "critical"} class HaproxyRoutePolicyCharm(ops.CharmBase): """Charm for HAProxy Route Policy service.""" - _stored = ops.StoredState() - def __init__(self, *args: Any): super().__init__(*args) - self._stored.set_default(secret_key="") self.framework.observe(self.on.install, self._install) self.framework.observe(self.on.upgrade_charm, self._install) @@ -45,10 +37,9 @@ def __init__(self, *args: Any): def _install(self, _: ops.EventBase) -> None: """Install the route-policy snap.""" - channel = str(self.model.config["snap-channel"]) self.unit.status = ops.MaintenanceStatus("installing haproxy-route-policy snap") try: - snap.install_snap(channel=channel) + snap.install_snap() except snap_lib.SnapError as exc: logger.exception("Failed to install haproxy-route-policy snap") self.unit.status = ops.BlockedStatus(f"snap installation failed: {exc}") @@ -63,51 +54,19 @@ def _reconcile(self, _: ops.EventBase) -> None: return try: - snap_config = { - "secret-key": self._get_secret_key(), - "debug": bool(self.model.config["debug"]), - "allowed-hosts": self._validated_allowed_hosts(), - "log-level": self._validated_log_level(), - **credentials, - } self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") - snap.configure_snap(snap_config) + snap.configure_snap(credentials) self.unit.status = ops.MaintenanceStatus("running database migrations") snap.run_migrations() self.unit.status = ops.MaintenanceStatus("starting gunicorn service") snap.start_gunicorn_service() - except (ValueError, snap_lib.SnapError, subprocess.CalledProcessError) as exc: + except (snap_lib.SnapError, subprocess.CalledProcessError) as exc: logger.exception("Failed to reconcile haproxy-route-policy service") self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") return self.unit.status = ops.ActiveStatus() - def _get_secret_key(self) -> str: - """Get a stable secret key for Django.""" - config_secret_key = str(self.model.config["secret-key"]).strip() - if config_secret_key: - return config_secret_key - if self._stored.secret_key: - return self._stored.secret_key - self._stored.secret_key = secrets.token_urlsafe(48) - return self._stored.secret_key - - def _validated_allowed_hosts(self) -> str: - """Validate allowed-hosts config and return it in JSON string form.""" - raw_value = str(self.model.config["allowed-hosts"]) - parsed = json.loads(raw_value) - if not isinstance(parsed, list) or not all(isinstance(host, str) for host in parsed): - raise ValueError("allowed-hosts must be a JSON array of strings") - return raw_value - - def _validated_log_level(self) -> str: - """Validate log-level config.""" - log_level = str(self.model.config["log-level"]).lower() - if log_level not in VALID_LOG_LEVELS: - raise ValueError(f"log-level must be one of {', '.join(sorted(VALID_LOG_LEVELS))}") - return log_level - def _get_postgresql_credentials(self) -> dict[str, str] | None: """Read PostgreSQL credentials from relation databag.""" relation = self.model.get_relation(POSTGRESQL_RELATION) diff --git a/haproxy-route-policy-operator/src/snap.py b/haproxy-route-policy-operator/src/snap.py index e4b96426a..dd8e16e6e 100644 --- a/haproxy-route-policy-operator/src/snap.py +++ b/haproxy-route-policy-operator/src/snap.py @@ -13,7 +13,7 @@ SNAP_NAME = "haproxy-route-policy" -def install_snap(channel: str) -> None: +def install_snap(channel: str = "latest/edge") -> None: """Install or refresh the route-policy snap.""" cache = snap.SnapCache() package = cache[SNAP_NAME] diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index 065de7ad8..02fafa6bf 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -30,12 +30,12 @@ def test_install_without_relation_sets_waiting_status(): assert: snap install is invoked and unit waits for postgresql relation data. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(config={"snap-channel": "latest/edge"}) + state = testing.State() with patch("charm.snap.install_snap") as install_snap_mock: out = ctx.run(ctx.on.install(), state) - install_snap_mock.assert_called_once_with(channel="latest/edge") + install_snap_mock.assert_called_once_with() assert isinstance(out.unit_status, testing.WaitingStatus) @@ -59,20 +59,3 @@ def test_config_changed_reconciles_snap_with_postgresql_credentials(): configure_mock.assert_called_once() migrate_mock.assert_called_once() start_mock.assert_called_once() - - -def test_config_changed_with_invalid_log_level_sets_blocked_status(): - """ - arrange: create charm context with relation and invalid log-level config. - act: run config-changed event. - assert: charm sets blocked status. - """ - ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State( - relations=[_postgresql_relation()], - config={"log-level": "invalid"}, - ) - - out = ctx.run(ctx.on.config_changed(), state) - - assert isinstance(out.unit_status, testing.BlockedStatus) From 3c94b13dadf906d01db4753980b4af8dbbdefee9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 17:36:10 +0200 Subject: [PATCH 119/201] properly handles postgresql charm state and fix lint errors --- haproxy-route-policy-operator/charmcraft.yaml | 8 +- .../data_platform_libs/v0/data_interfaces.py | 5782 +++++++++++++++++ haproxy-route-policy-operator/pyproject.toml | 1 + haproxy-route-policy-operator/src/charm.py | 94 +- .../src/{snap.py => policy.py} | 10 +- .../src/state/database.py | 87 + haproxy-route-policy-operator/uv.lock | 109 + 7 files changed, 6023 insertions(+), 68 deletions(-) create mode 100644 haproxy-route-policy-operator/lib/charms/data_platform_libs/v0/data_interfaces.py rename haproxy-route-policy-operator/src/{snap.py => policy.py} (78%) create mode 100644 haproxy-route-policy-operator/src/state/database.py diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 2956a0c9e..a44df3f51 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -34,8 +34,12 @@ assumes: - juju >= 3.3 requires: - postgresql: + database: interface: postgresql_client - description: PostgreSQL database used by the route policy service. + description: Database used by the haproxy-route-policy service. limit: 1 optional: false + +charm-libs: + - lib: data_platform_libs.data_interfaces + version: "0" diff --git a/haproxy-route-policy-operator/lib/charms/data_platform_libs/v0/data_interfaces.py b/haproxy-route-policy-operator/lib/charms/data_platform_libs/v0/data_interfaces.py new file mode 100644 index 000000000..5be1d9315 --- /dev/null +++ b/haproxy-route-policy-operator/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -0,0 +1,5782 @@ +# Copyright 2023 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r"""Library to manage the relation for the data-platform products. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +### Database (MySQL, Postgresql, MongoDB, and Redis) + +#### Requires Charm +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +Following an example of using the DatabaseCreatedEvent, in the context of the +application charm code: + +```python + +from charms.data_platform_libs.v0.data_interfaces import ( + DatabaseCreatedEvent, + DatabaseRequires, + DatabaseEntityCreatedEvent, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + # Charm events defined in the database requires charm library. + self.database = DatabaseRequires(self, relation_name="database", database_name="database") + self.framework.observe(self.database.on.database_created, self._on_database_created) + self.framework.observe(self.database.on.database_entity_created, self._on_database_entity_created) + + def _on_database_created(self, event: DatabaseCreatedEvent) -> None: + # Handle the created database + + # Create configuration file for app + config_file = self._render_app_config_file( + event.username, + event.password, + event.endpoints, + ) + + # Start application with rendered configuration + self._start_application(config_file) + + # Set active status + self.unit.status = ActiveStatus("received database credentials") + + def _on_database_entity_created(self, event: DatabaseEntityCreatedEvent) -> None: + # Handle the created entity + ... +``` + +As shown above, the library provides some custom events to handle specific situations, +which are listed below: + +- database_created: event emitted when the requested database is created. +- database_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_database_created(self, event: DatabaseCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v0.data_interfaces import ( + DatabaseCreatedEvent, + DatabaseRequires, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + # Define the cluster aliases and one handler for each cluster database created event. + self.database = DatabaseRequires( + self, + relation_name="database", + database_name="database", + relations_aliases = ["cluster1", "cluster2"], + ) + self.framework.observe( + self.database.on.cluster1_database_created, self._on_cluster1_database_created + ) + self.framework.observe( + self.database.on.cluster2_database_created, self._on_cluster2_database_created + ) + + def _on_cluster1_database_created(self, event: DatabaseCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.username, + event.password, + event.endpoints, + ) + ... + + def _on_cluster2_database_created(self, event: DatabaseCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.username, + event.password, + event.endpoints, + ) + ... +``` + +When it's needed to check whether a plugin (extension) is enabled on the PostgreSQL +charm, you can use the is_postgresql_plugin_enabled method. To use that, you need to +add the following dependency to your charmcraft.yaml file: + +```yaml + +parts: + charm: + charm-binary-python-packages: + - psycopg[binary] +``` + +### Provider Charm + +Following an example of using the DatabaseRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v0.data_interfaces import DatabaseProvides + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = DatabaseProvides(self, relation_name="database") + self.framework.observe(self.provided_database.on.database_requested, + self._on_database_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_database_requested(self, event: DatabaseRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.database + # generate a new user credential + username = self.database.generate_user() + password = self.database.generate_password() + # set the credentials for the relation + self.provided_database.set_credentials(event.relation.id, username, password) + # set other variables for the relation event.set_tls("False") +``` + +As shown above, the library provides a custom event (database_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +### Kafka + +This library is the interface to use and interact with the Kafka charm. This library contains +custom events that add convenience to manage Kafka, and provides methods to consume the +application related data. + +#### Requirer Charm + +```python + +from charms.data_platform_libs.v0.data_interfaces import ( + BootstrapServerChangedEvent, + KafkaRequires, + TopicCreatedEvent, + TopicEntityCreatedEvent, +) + +class ApplicationCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + self.kafka = KafkaRequires(self, "kafka_client", "test-topic") + self.framework.observe( + self.kafka.on.bootstrap_server_changed, self._on_kafka_bootstrap_server_changed + ) + self.framework.observe( + self.kafka.on.topic_created, self._on_kafka_topic_created + ) + self.framework.observe( + self.kafka.on.topic_entity_created, self._on_kafka_topic_entity_created + ) + + def _on_kafka_bootstrap_server_changed(self, event: BootstrapServerChangedEvent): + # Event triggered when a bootstrap server was changed for this application + + new_bootstrap_server = event.bootstrap_server + ... + + def _on_kafka_topic_created(self, event: TopicCreatedEvent): + # Event triggered when a topic was created for this application + username = event.username + password = event.password + tls = event.tls + tls_ca= event.tls_ca + bootstrap_server event.bootstrap_server + consumer_group_prefic = event.consumer_group_prefix + zookeeper_uris = event.zookeeper_uris + ... + + def _on_kafka_topic_entity_created(self, event: TopicEntityCreatedEvent): + # Event triggered when an entity was created for this application + ... +``` + +As shown above, the library provides some custom events to handle specific situations, +which are listed below: + +- topic_created: event emitted when the requested topic is created. +- bootstrap_server_changed: event emitted when the bootstrap server have changed. +- credential_changed: event emitted when the credentials of Kafka changed. + +### Provider Charm + +Following the previous example, this is an example of the provider charm. + +```python +class SampleCharm(CharmBase): + +from charms.data_platform_libs.v0.data_interfaces import ( + KafkaProvides, + TopicRequestedEvent, +) + + def __init__(self, *args): + super().__init__(*args) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + + # Charm events defined in the Kafka Provides charm library. + self.kafka_provider = KafkaProvides(self, relation_name="kafka_client") + self.framework.observe(self.kafka_provider.on.topic_requested, self._on_topic_requested) + self.framework.observe(self.kafka_provider.on.topic_entity_requested, self._on_entity_requested) + # Kafka generic helper + self.kafka = KafkaHelper() + + def _on_topic_requested(self, event: TopicRequestedEvent): + # Handle the on_topic_requested event. + + topic = event.topic + relation_id = event.relation.id + # set connection info in the databag relation + self.kafka_provider.set_bootstrap_server(relation_id, self.kafka.get_bootstrap_server()) + self.kafka_provider.set_credentials(relation_id, username=username, password=password) + self.kafka_provider.set_consumer_group_prefix(relation_id, ...) + self.kafka_provider.set_tls(relation_id, "False") + self.kafka_provider.set_zookeeper_uris(relation_id, ...) + + def _on_entity_requested(self, event: EntityRequestedEvent): + # Handle the on_topic_entity_requested event. + ... +``` +As shown above, the library provides a custom event (topic_requested) to handle +the situation when an application charm requests a new topic to be created. +It is preferred to subscribe to this event instead of relation changed event to avoid +creating a new topic when other information other than a topic name is +exchanged in the relation databag. + +### Karapace + +This library is the interface to use and interact with the Karapace charm. This library contains +custom events that add convenience to manage Karapace, and provides methods to consume the +application related data. + +#### Requirer Charm + +```python + +from charms.data_platform_libs.v0.data_interfaces import ( + EndpointsChangedEvent, + KarapaceRequires, + SubjectAllowedEvent, +) + +class ApplicationCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + self.karapace = KarapaceRequires(self, relation_name="karapace_client", subject="test-subject") + self.framework.observe( + self.karapace.on.server_changed, self._on_karapace_server_changed + ) + self.framework.observe( + self.karapace.on.subject_allowed, self._on_karapace_subject_allowed + ) + self.framework.observe( + self.karapace.on.subject_entity_created, self._on_subject_entity_created + ) + + + def _on_karapace_server_changed(self, event: EndpointsChangedEvent): + # Event triggered when a server endpoint was changed for this application + new_server = event.endpoints + ... + + def _on_karapace_subject_allowed(self, event: SubjectAllowedEvent): + # Event triggered when a subject was allowed for this application + username = event.username + password = event.password + tls = event.tls + endpoints = event.endpoints + ... + + def _on_subject_entity_created(self, event: SubjectEntityCreatedEvent): + # Event triggered when a subject entity was created this application + entity_name = event.entity_name + entity_password = event.entity_password + ... +``` + +As shown above, the library provides some custom events to handle specific situations, +which are listed below: + +- subject_allowed: event emitted when the requested subject is allowed. +- server_changed: event emitted when the server endpoints have changed. + +#### Provider Charm + +Following the previous example, this is an example of the provider charm. + +```python +class SampleCharm(CharmBase): + +from charms.data_platform_libs.v0.data_interfaces import ( + KarapaceProvides, + SubjectRequestedEvent, +) + + def __init__(self, *args): + super().__init__(*args) + + # Default charm events. + self.framework.observe(self.on.start, self._on_start) + + # Charm events defined in the Karapace Provides charm library. + self.karapace_provider = KarapaceProvides(self, relation_name="karapace_client") + self.framework.observe(self.karapace_provider.on.subject_requested, self._on_subject_requested) + # Karapace generic helper + self.karapace = KarapaceHelper() + + def _on_subject_requested(self, event: SubjectRequestedEvent): + # Handle the on_subject_requested event. + + subject = event.subject + relation_id = event.relation.id + # set connection info in the databag relation + self.karapace_provider.set_endpoint(relation_id, self.karapace.get_endpoint()) + self.karapace_provider.set_credentials(relation_id, username=username, password=password) + self.karapace_provider.set_tls(relation_id, "False") +``` + +As shown above, the library provides a custom event (subject_requested) to handle +the situation when an application charm requests a new subject to be created. +It is preferred to subscribe to this event instead of relation changed event to avoid +creating a new subject when other information other than a subject name is +exchanged in the relation databag. +""" + +import copy +import json +import logging +from abc import ABC, abstractmethod +from collections import UserDict, namedtuple +from dataclasses import asdict, dataclass +from datetime import datetime +from enum import Enum +from os import PathLike +from pathlib import Path +from typing import ( + Callable, + Dict, + Final, + ItemsView, + KeysView, + List, + Optional, + Set, + Tuple, + TypedDict, + Union, + ValuesView, + overload, +) + +from ops import JujuVersion, Model, Secret, SecretInfo, SecretNotFoundError +from ops.charm import ( + CharmBase, + CharmEvents, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + SecretChangedEvent, +) +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit + +# The unique Charmhub library identifier, never change it +LIBID = "6c3e6b6680d64e9c89e611d1a15f65be" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 58 + +PYDEPS = ["ops>=2.0.0"] + +# Starting from what LIBPATCH number to apply legacy solutions +# v0.17 was the last version without secrets +LEGACY_SUPPORT_FROM = 17 + +logger = logging.getLogger(__name__) + +Diff = namedtuple("Diff", "added changed deleted") +Diff.__doc__ = """ +A tuple for storing the diff between two data mappings. + +added - keys that were added +changed - keys that still exist but have new values +deleted - key that were deleted""" + +OptionalPathLike = Optional[Union[PathLike, str]] + +ENTITY_USER = "USER" +ENTITY_GROUP = "GROUP" + +PROV_SECRET_PREFIX = "secret-" +PROV_SECRET_FIELDS = "provided-secrets" +REQ_SECRET_FIELDS = "requested-secrets" +STATUS_FIELD = "status" +GROUP_MAPPING_FIELD = "secret_group_mapping" +GROUP_SEPARATOR = "@" + +MODEL_ERRORS = { + "not_leader": "this unit is not the leader", + "no_label_and_uri": "ERROR either URI or label should be used for getting an owned secret but not both", + "owner_no_refresh": "ERROR secret owner cannot use --refresh", +} + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class SecretsIllegalUpdateError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +class PrematureDataAccessError(DataInterfacesError): + """To be raised when the Relation Data may be accessed (written) before protocol init complete.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + +############################################################################## +# Databag handling and comparison methods +############################################################################## + + +def get_encoded_dict( + relation: Relation, member: Union[Unit, Application], field: str +) -> Optional[Dict[str, str]]: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "{}")) + if isinstance(data, dict): + return data + logger.error("Unexpected datatype for %s instead of dict.", str(data)) + + +def get_encoded_list( + relation: Relation, member: Union[Unit, Application], field: str +) -> Optional[List[str]]: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, "[]")) + if isinstance(data, list): + return data + logger.error("Unexpected datatype for %s instead of list.", str(data)) + + +def set_encoded_field( + relation: Relation, + member: Union[Unit, Application], + field: str, + value: Union[str, list, Dict[str, str]], +) -> None: + """Set an encoded field from relation data.""" + relation.data[member].update({field: json.dumps(value)}) + + +def diff(event: RelationChangedEvent, bucket: Optional[Union[Unit, Application]]) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + bucket: bucket of the databag (app or unit) + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + # Retrieve the old data from the data key in the application relation databag. + if not bucket: + return Diff([], [], []) + + old_data = get_encoded_dict(event.relation, bucket, "data") + + if not old_data: + old_data = {} + + # Retrieve the new data from the event relation databag. + new_data = ( + {key: value for key, value in event.relation.data[event.app].items() if key != "data"} + if event.app + else {} + ) + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() # pyright: ignore [reportAssignmentType] + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() # pyright: ignore [reportAssignmentType] + # These are the keys that already existed in the databag, + # but had their values changed. + changed = { + key + for key in old_data.keys() & new_data.keys() # pyright: ignore [reportAssignmentType] + if old_data[key] != new_data[key] # pyright: ignore [reportAssignmentType] + } + # Convert the new_data to a serializable format and save it for a next diff check. + set_encoded_field(event.relation, bucket, "data", new_data) + + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +############################################################################## +# Module decorators +############################################################################## + + +def leader_only(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self.local_app and not self.local_unit.is_leader(): + logger.error( + "This operation (%s()) can only be performed by the leader unit", f.__name__ + ) + return + return f(self, *args, **kwargs) + + wrapper.leader_only = True + return wrapper + + +def juju_secrets_only(f): + """Decorator to ensure that certain operations would be only executed on Juju3.""" + + def wrapper(self, *args, **kwargs): + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + return f(self, *args, **kwargs) + + return wrapper + + +def dynamic_secrets_only(f): + """Decorator to ensure that certain operations would be only executed when NO static secrets are defined.""" + + def wrapper(self, *args, **kwargs): + if self.static_secret_fields: + raise IllegalOperationError( + "Unsafe usage of statically and dynamically defined secrets, aborting." + ) + return f(self, *args, **kwargs) + + return wrapper + + +def either_static_or_dynamic_secrets(f): + """Decorator to ensure that static and dynamic secrets won't be used in parallel.""" + + def wrapper(self, *args, **kwargs): + if self.static_secret_fields and set(self.current_secret_fields) - set( + self.static_secret_fields + ): + raise IllegalOperationError( + "Unsafe usage of statically and dynamically defined secrets, aborting." + ) + return f(self, *args, **kwargs) + + return wrapper + + +def legacy_apply_from_version(version: int) -> Callable: + """Decorator to decide whether to apply a legacy function or not. + + Based on LEGACY_SUPPORT_FROM module variable value, the importer charm may only want + to apply legacy solutions starting from a specific LIBPATCH. + + NOTE: All 'legacy' functions have to be defined and called in a way that they return `None`. + This results in cleaner and more secure execution flows in case the function may be disabled. + This requirement implicitly means that legacy functions change the internal state strictly, + don't return information. + """ + + def decorator(f: Callable[..., None]): + """Signature is ensuring None return value.""" + f.legacy_version = version + + def wrapper(self, *args, **kwargs) -> None: + if version >= LEGACY_SUPPORT_FROM: + return f(self, *args, **kwargs) + + return wrapper + + return decorator + + +############################################################################## +# Helper classes +############################################################################## + + +class Scope(Enum): + """Peer relations scope.""" + + APP = "app" + UNIT = "unit" + + +class SecretGroup(str): + """Secret groups specific type.""" + + +@dataclass +class RelationStatus: + """Base data class for status propagation on charm relations.""" + + code: int + message: str + resolution: str + + @property + def is_informational(self) -> bool: + """Is this an informational status?""" + return self.code // 1000 == 1 + + @property + def is_transitory(self) -> bool: + """Is this a transitory status?""" + return self.code // 1000 == 4 + + @property + def is_fatal(self) -> bool: + """Is this a fatal status, requiring removing the relation?""" + return self.code // 1000 == 5 + + +class RelationStatusDict(TypedDict): + """Base type for dict representation of `RelationStatus` dataclass.""" + + code: int + message: str + resolution: str + + +class SecretGroupsAggregate(str): + """Secret groups with option to extend with additional constants.""" + + def __init__(self): + self.USER = SecretGroup("user") + self.TLS = SecretGroup("tls") + self.MTLS = SecretGroup("mtls") + self.ENTITY = SecretGroup("entity") + self.EXTRA = SecretGroup("extra") + + def __setattr__(self, name, value): + """Setting internal constants.""" + if name in self.__dict__: + raise RuntimeError("Can't set constant!") + else: + super().__setattr__(name, SecretGroup(value)) + + def groups(self) -> list: + """Return the list of stored SecretGroups.""" + return list(self.__dict__.values()) + + def get_group(self, group: str) -> Optional[SecretGroup]: + """If the input str translates to a group name, return that.""" + return SecretGroup(group) if group in self.groups() else None + + +SECRET_GROUPS = SecretGroupsAggregate() + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [MODEL_ERRORS["no_label_and_uri"], MODEL_ERRORS["owner_no_refresh"]] + + def __init__( + self, + model: Model, + component: Union[Application, Unit], + label: str, + secret_uri: Optional[str] = None, + legacy_labels: List[str] = [], + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.legacy_labels = legacy_labels + self.current_label = None + + @property + def meta(self) -> Optional[Secret]: + """Getting cached secret meta-information.""" + if not self._secret_meta: + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + self._legacy_compat_find_secret_by_old_label() + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + return self._secret_meta + + ########################################################################## + # Backwards compatibility / Upgrades + ########################################################################## + # These functions are used to keep backwards compatibility on rolling upgrades + # Policy: + # All data is kept intact until the first write operation. (This allows a minimal + # grace period during which rollbacks are fully safe. For more info see the spec.) + # All data involves: + # - databag contents + # - secrets content + # - secret labels (!!!) + # Legacy functions must return None, and leave an equally consistent state whether + # they are executed or skipped (as a high enough versioned execution environment may + # not require so) + + # Compatibility + + @legacy_apply_from_version(34) + def _legacy_compat_find_secret_by_old_label(self) -> None: + """Compatibility function, allowing to find a secret by a legacy label. + + This functionality is typically needed when secret labels changed over an upgrade. + Until the first write operation, we need to maintain data as it was, including keeping + the old secret label. In order to keep track of the old label currently used to access + the secret, and additional 'current_label' field is being defined. + """ + for label in self.legacy_labels: + try: + self._secret_meta = self._model.get_secret(label=label) + except SecretNotFoundError: + pass + except ModelError as e: + # Permission denied can be raised if the secret exists but is not yet granted to us. + if "permission denied" in str(e): + return + raise + else: + if label != self.label: + self.current_label = label + return + + # Migrations + + @legacy_apply_from_version(34) + def _legacy_migration_to_new_label_if_needed(self) -> None: + """Helper function to re-create the secret with a different label. + + Juju does not provide a way to change secret labels. + Thus whenever moving from secrets version that involves secret label changes, + we "re-create" the existing secret, and attach the new label to the new + secret, to be used from then on. + + Note: we replace the old secret with a new one "in place", as we can't + easily switch the containing SecretCache structure to point to a new secret. + Instead we are changing the 'self' (CachedSecret) object to point to the + new instance. + """ + if not self.current_label or not (self.meta and self._secret_meta): + return + + # Create a new secret with the new label + content = self._secret_meta.get_content() + self._secret_uri = None + + # It will be nice to have the possibility to check if we are the owners of the secret... + try: + self._secret_meta = self.add_secret(content, label=self.label) + except ModelError as err: + if MODEL_ERRORS["not_leader"] not in str(err): + raise + if "permission denied" not in str(err): + raise + self.current_label = None + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: Dict[str, str], + relation: Optional[Relation] = None, + label: Optional[str] = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + "Secret is already defined with uri %s", self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> Dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: Dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + # DPE-4182: do not create new revision if the content stay the same + if content == self.get_content(): + return + + if content: + self._legacy_migration_to_new_label_if_needed() + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> Optional[SecretInfo]: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError("Non-existent secret was attempted to be removed.") + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Union[Application, Unit]): + self._model = model + self.component = component + self._secrets: Dict[str, CachedSecret] = {} + + def get( + self, label: str, uri: Optional[str] = None, legacy_labels: List[str] = [] + ) -> Optional[CachedSecret]: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret( + self._model, self.component, label, uri, legacy_labels=legacy_labels + ) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: Dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f"Secret {label} already exists") + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug("Non-existing Juju Secret was attempted to be removed %s", label) + + +################################################################################ +# Relation Data base/abstract ancestors (i.e. parent classes) +################################################################################ + + +# Base Data + + +class DataDict(UserDict): + """Python Standard Library 'dict' - like representation of Relation Data.""" + + def __init__(self, relation_data: "Data", relation_id: int): + self.relation_data = relation_data + self.relation_id = relation_id + + @property + def data(self) -> Dict[str, str]: + """Return the full content of the Abstract Relation Data dictionary.""" + result = self.relation_data.fetch_my_relation_data([self.relation_id]) + try: + result_remote = self.relation_data.fetch_relation_data([self.relation_id]) + except NotImplementedError: + result_remote = {self.relation_id: {}} + if result: + result_remote[self.relation_id].update(result[self.relation_id]) + return result_remote.get(self.relation_id, {}) + + def __setitem__(self, key: str, item: str) -> None: + """Set an item of the Abstract Relation Data dictionary.""" + self.relation_data.update_relation_data(self.relation_id, {key: item}) + + def __getitem__(self, key: str) -> str: + """Get an item of the Abstract Relation Data dictionary.""" + result = None + + # Avoiding "leader_only" error when cross-charm non-leader unit, not to report useless error + if ( + not hasattr(self.relation_data.fetch_my_relation_field, "leader_only") + or self.relation_data.component != self.relation_data.local_app + or self.relation_data.local_unit.is_leader() + ): + result = self.relation_data.fetch_my_relation_field(self.relation_id, key) + + if not result: + try: + result = self.relation_data.fetch_relation_field(self.relation_id, key) + except NotImplementedError: + pass + + if not result: + raise KeyError + return result + + def __eq__(self, d: dict) -> bool: + """Equality.""" + return self.data == d + + def __repr__(self) -> str: + """String representation Abstract Relation Data dictionary.""" + return repr(self.data) + + def __len__(self) -> int: + """Length of the Abstract Relation Data dictionary.""" + return len(self.data) + + def __delitem__(self, key: str) -> None: + """Delete an item of the Abstract Relation Data dictionary.""" + self.relation_data.delete_relation_data(self.relation_id, [key]) + + def has_key(self, key: str) -> bool: + """Does the key exist in the Abstract Relation Data dictionary?""" + return key in self.data + + def update(self, items: Dict[str, str]): + """Update the Abstract Relation Data dictionary.""" + self.relation_data.update_relation_data(self.relation_id, items) + + def keys(self) -> KeysView[str]: + """Keys of the Abstract Relation Data dictionary.""" + return self.data.keys() + + def values(self) -> ValuesView[str]: + """Values of the Abstract Relation Data dictionary.""" + return self.data.values() + + def items(self) -> ItemsView[str, str]: + """Items of the Abstract Relation Data dictionary.""" + return self.data.items() + + def pop(self, item: str) -> str: + """Pop an item of the Abstract Relation Data dictionary.""" + result = self.relation_data.fetch_my_relation_field(self.relation_id, item) + if not result: + raise KeyError(f"Item {item} doesn't exist.") + self.relation_data.delete_relation_data(self.relation_id, [item]) + return result + + def __contains__(self, item: str) -> bool: + """Does the Abstract Relation Data dictionary contain item?""" + return item in self.data.values() + + def __iter__(self): + """Iterate through the Abstract Relation Data dictionary.""" + return iter(self.data) + + def get(self, key: str, default: Optional[str] = None) -> Optional[str]: + """Safely get an item of the Abstract Relation Data dictionary.""" + try: + if result := self[key]: + return result + except KeyError: + return default + + +class Data(ABC): + """Base relation data manipulation (abstract) class.""" + + SCOPE = Scope.APP + + # Local map to associate mappings with secrets potentially as a group + SECRET_LABEL_MAP = { + "username": SECRET_GROUPS.USER, + "password": SECRET_GROUPS.USER, + "uris": SECRET_GROUPS.USER, + "read-only-uris": SECRET_GROUPS.USER, + "tls": SECRET_GROUPS.TLS, + "tls-ca": SECRET_GROUPS.TLS, + "mtls-cert": SECRET_GROUPS.MTLS, + "entity-name": SECRET_GROUPS.ENTITY, + "entity-password": SECRET_GROUPS.ENTITY, + } + + SECRET_FIELDS = [] + + def __init__( + self, + model: Model, + relation_name: str, + ) -> None: + self._model = model + self.local_app = self._model.app + self.local_unit = self._model.unit + self.relation_name = relation_name + self._jujuversion = None + self.component = self.local_app if self.SCOPE == Scope.APP else self.local_unit + self.secrets = SecretCache(self._model, self.component) + self.data_component = None + self._local_secret_fields = [] + self._remote_secret_fields = list(self.SECRET_FIELDS) + + @property + def relations(self) -> List[Relation]: + """The list of Relation instances associated with this relation_name.""" + return self._model.relations[self.relation_name] + + @property + def secrets_enabled(self): + """Is this Juju version allowing for Secrets usage?""" + if not self._jujuversion: + self._jujuversion = JujuVersion.from_environ() + return self._jujuversion.has_secrets + + @property + def secret_label_map(self): + """Exposing secret-label map via a property -- could be overridden in descendants!""" + return self.SECRET_LABEL_MAP + + @property + def local_secret_fields(self) -> Optional[List[str]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return self._local_secret_fields + + @property + def remote_secret_fields(self) -> Optional[List[str]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return self._remote_secret_fields + + @property + def my_secret_groups(self) -> Optional[List[SecretGroup]]: + """Local access to secrets field, in case they are being used.""" + if self.secrets_enabled: + return [ + self.SECRET_LABEL_MAP[field] + for field in self._local_secret_fields + if field in self.SECRET_LABEL_MAP + ] + + # Mandatory overrides for internal/helper methods + + @juju_secrets_only + def _get_relation_secret( + self, relation_id: int, group_mapping: SecretGroup, relation_name: Optional[str] = None + ) -> Optional[CachedSecret]: + """Retrieve a Juju Secret that's been stored in the relation databag.""" + if not relation_name: + relation_name = self.relation_name + + label = self._generate_secret_label(relation_name, relation_id, group_mapping) + if secret := self.secrets.get(label): + return secret + + relation = self._model.get_relation(relation_name, relation_id) + if not relation: + return + + if secret_uri := self.get_secret_uri(relation, group_mapping): + return self.secrets.get(label, secret_uri) + + # Mandatory overrides for requirer and peer, implemented for Provider + # Requirer uses local component and switched keys + # _local_secret_fields -> PROV_SECRET_FIELDS + # _remote_secret_fields -> REQ_SECRET_FIELDS + # provider uses remote component and + # _local_secret_fields -> REQ_SECRET_FIELDS + # _remote_secret_fields -> PROV_SECRET_FIELDS + @abstractmethod + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + raise NotImplementedError + + def _fetch_specific_relation_data( + self, relation: Relation, fields: Optional[List[str]] + ) -> Dict[str, str]: + """Fetch data available (directily or indirectly -- i.e. secrets) from the relation (remote app data).""" + if not relation.app: + return {} + self._load_secrets_from_databag(relation) + return self._fetch_relation_data_with_secrets( + relation.app, self.remote_secret_fields, relation, fields + ) + + def _fetch_my_specific_relation_data( + self, relation: Relation, fields: Optional[List[str]] + ) -> dict: + """Fetch our own relation data.""" + # load secrets + self._load_secrets_from_databag(relation) + return self._fetch_relation_data_with_secrets( + self.local_app, + self.local_secret_fields, + relation, + fields, + ) + + def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: + """Set values for fields not caring whether it's a secret or not.""" + self._load_secrets_from_databag(relation) + + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + list(data), + self._add_or_update_relation_secrets, + data=data, + ) + + normal_content = {k: v for k, v in data.items() if k in normal_fields} + self._update_relation_data_without_secrets(self.local_app, relation, normal_content) + + def _add_or_update_relation_secrets( + self, + relation: Relation, + group: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + uri_to_databag=True, + ) -> bool: + """Update contents for Secret group. If the Secret doesn't exist, create it.""" + if self._get_relation_secret(relation.id, group): + return self._update_relation_secret(relation, group, secret_fields, data) + + return self._add_relation_secret(relation, group, secret_fields, data, uri_to_databag) + + @juju_secrets_only + def _add_relation_secret( + self, + relation: Relation, + group_mapping: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + uri_to_databag=True, + ) -> bool: + """Add a new Juju Secret that will be registered in the relation databag.""" + if uri_to_databag and self.get_secret_uri(relation, group_mapping): + logging.error("Secret for relation %s already exists, not adding again", relation.id) + return False + + content = self._content_for_secret_group(data, secret_fields, group_mapping) + + label = self._generate_secret_label(self.relation_name, relation.id, group_mapping) + secret = self.secrets.add(label, content, relation) + + if uri_to_databag: + # According to lint we may not have a Secret ID + if not secret.meta or not secret.meta.id: + logging.error("Secret is missing Secret ID") + raise SecretError("Secret added but is missing Secret ID") + + self.set_secret_uri(relation, group_mapping, secret.meta.id) + + # Return the content that was added + return True + + @juju_secrets_only + def _update_relation_secret( + self, + relation: Relation, + group_mapping: SecretGroup, + secret_fields: Set[str], + data: Dict[str, str], + ) -> bool: + """Update the contents of an existing Juju Secret, referred in the relation databag.""" + secret = self._get_relation_secret(relation.id, group_mapping) + + if not secret: + logging.error("Can't update secret for relation %s", relation.id) + return False + + content = self._content_for_secret_group(data, secret_fields, group_mapping) + + old_content = secret.get_content() + full_content = copy.deepcopy(old_content) + full_content.update(content) + secret.set_content(full_content) + + # Return True on success + return True + + @juju_secrets_only + def _delete_relation_secret( + self, relation: Relation, group: SecretGroup, secret_fields: List[str], fields: List[str] + ) -> bool: + """Update the contents of an existing Juju Secret, referred in the relation databag.""" + secret = self._get_relation_secret(relation.id, group) + + if not secret: + logging.error("Can't delete secret for relation %s", str(relation.id)) + return False + + old_content = secret.get_content() + new_content = copy.deepcopy(old_content) + for field in fields: + try: + new_content.pop(field) + except KeyError: + logging.debug( + "Non-existing secret was attempted to be removed %s, %s", + str(relation.id), + str(field), + ) + return False + + # Remove secret from the relation if it's fully gone + if not new_content: + field = self._generate_secret_field_name(group) + try: + relation.data[self.component].pop(field) + except KeyError: + pass + label = self._generate_secret_label(self.relation_name, relation.id, group) + self.secrets.remove(label) + else: + secret.set_content(new_content) + + # Return the content that was removed + return True + + def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: + """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + if relation.app: + self._load_secrets_from_databag(relation) + + _, normal_fields = self._process_secret_fields( + relation, self.local_secret_fields, fields, self._delete_relation_secret, fields=fields + ) + self._delete_relation_data_without_secrets(self.local_app, relation, list(normal_fields)) + + def _register_secret_to_relation( + self, relation_name: str, relation_id: int, secret_id: str, group: SecretGroup + ): + """Fetch secrets and apply local label on them. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + label = self._generate_secret_label(relation_name, relation_id, group) + + # Fetching the Secret's meta information ensuring that it's locally getting registered with + CachedSecret(self._model, self.component, label, secret_id).meta + + def _register_secrets_to_relation(self, relation: Relation, params_name_list: List[str]): + """Make sure that secrets of the provided list are locally 'registered' from the databag. + + More on 'locally registered' magic is described in _register_secret_to_relation() method + """ + if not relation.app: + return + + for group in SECRET_GROUPS.groups(): + secret_field = self._generate_secret_field_name(group) + if secret_field in params_name_list and ( + secret_uri := self.get_secret_uri(relation, group) + ): + self._register_secret_to_relation(relation.name, relation.id, secret_uri, group) + + # Optional overrides + + def _legacy_apply_on_fetch(self) -> None: + """This function should provide a list of compatibility functions to be applied when fetching (legacy) data.""" + pass + + def _legacy_apply_on_update(self, fields: List[str]) -> None: + """This function should provide a list of compatibility functions to be applied when writing data. + + Since data may be at a legacy version, migration may be mandatory. + """ + pass + + def _legacy_apply_on_delete(self, fields: List[str]) -> None: + """This function should provide a list of compatibility functions to be applied when deleting (legacy) data.""" + pass + + # Internal helper methods + + @staticmethod + def _is_secret_field(field: str) -> bool: + """Is the field in question a secret reference (URI) field or not?""" + return field.startswith(PROV_SECRET_PREFIX) + + @staticmethod + def _generate_secret_label( + relation_name: str, relation_id: int, group_mapping: SecretGroup + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + return f"{relation_name}.{relation_id}.{group_mapping}.secret" + + def _generate_secret_field_name(self, group_mapping: SecretGroup) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + return f"{PROV_SECRET_PREFIX}{group_mapping}" + + def _relation_from_secret_label(self, secret_label: str) -> Optional[Relation]: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split(".") + + if not (contents and len(contents) >= 3): + return + + contents.pop() # ".secret" at the end + contents.pop() # Group mapping + relation_id = contents.pop() + try: + relation_id = int(relation_id) + except ValueError: + return + + # In case '.' character appeared in relation name + relation_name = ".".join(contents) + + try: + return self.get_relation(relation_name, relation_id) + except ModelError: + return + + def _group_secret_fields(self, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + """Helper function to arrange secret mappings under their group. + + NOTE: All unrecognized items end up in the 'extra' secret bucket. + Make sure only secret fields are passed! + """ + secret_fieldnames_grouped = {} + for key in secret_fields: + if group := self.secret_label_map.get(key): + secret_fieldnames_grouped.setdefault(group, []).append(key) + else: + secret_fieldnames_grouped.setdefault(SECRET_GROUPS.EXTRA, []).append(key) + return secret_fieldnames_grouped + + def _get_group_secret_contents( + self, + relation: Relation, + group: SecretGroup, + secret_fields: Union[Set[str], List[str]] = [], + ) -> Dict[str, str]: + """Helper function to retrieve collective, requested contents of a secret.""" + if (secret := self._get_relation_secret(relation.id, group)) and ( + secret_data := secret.get_content() + ): + return { + k: v for k, v in secret_data.items() if not secret_fields or k in secret_fields + } + return {} + + def _content_for_secret_group( + self, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + ) -> Dict[str, str]: + """Select : pairs from input, that belong to this particular Secret group.""" + if group_mapping == SECRET_GROUPS.EXTRA: + return { + k: v + for k, v in content.items() + if k in secret_fields and k not in self.secret_label_map.keys() + } + + return { + k: v + for k, v in content.items() + if k in secret_fields and self.secret_label_map.get(k) == group_mapping + } + + @juju_secrets_only + def _get_relation_secret_data( + self, relation_id: int, group_mapping: SecretGroup, relation_name: Optional[str] = None + ) -> Optional[Dict[str, str]]: + """Retrieve contents of a Juju Secret that's been stored in the relation databag.""" + secret = self._get_relation_secret(relation_id, group_mapping, relation_name) + if secret: + return secret.get_content() + + # Core operations on Relation Fields manipulations (regardless whether the field is in the databag or in a secret) + # Internal functions to be called directly from transparent public interface functions (+closely related helpers) + + def _process_secret_fields( + self, + relation: Relation, + req_secret_fields: Optional[List[str]], + impacted_rel_fields: List[str], + operation: Callable, + *args, + **kwargs, + ) -> Tuple[Dict[str, str], Set[str]]: + """Isolate target secret fields of manipulation, and execute requested operation by Secret Group.""" + result = {} + + # If the relation started on a databag, we just stay on the databag + # (Rolling upgrades may result in a relation starting on databag, getting secrets enabled on-the-fly) + # self.local_app is sufficient to check (ignored if Requires, never has secrets -- works if Provider) + fallback_to_databag = ( + req_secret_fields + and (self.local_unit == self._model.unit and self.local_unit.is_leader()) + and set(req_secret_fields) & set(relation.data[self.component]) + ) + normal_fields = set(impacted_rel_fields) + if req_secret_fields and self.secrets_enabled and not fallback_to_databag: + normal_fields = normal_fields - set(req_secret_fields) + secret_fields = set(impacted_rel_fields) - set(normal_fields) + + secret_fieldnames_grouped = self._group_secret_fields(list(secret_fields)) + + for group in secret_fieldnames_grouped: + # operation() should return nothing when all goes well + if group_result := operation(relation, group, secret_fields, *args, **kwargs): + # If "meaningful" data was returned, we take it. (Some 'operation'-s only return success/failure.) + if isinstance(group_result, dict): + result.update(group_result) + else: + # If it wasn't found as a secret, let's give it a 2nd chance as "normal" field + # Needed when Juju3 Requires meets Juju2 Provider + normal_fields |= set(secret_fieldnames_grouped[group]) + return (result, normal_fields) + + def _fetch_relation_data_without_secrets( + self, component: Union[Application, Unit], relation: Relation, fields: Optional[List[str]] + ) -> Dict[str, str]: + """Fetching databag contents when no secrets are involved. + + Since the Provider's databag is the only one holding secrest, we can apply + a simplified workflow to read the Require's side's databag. + This is used typically when the Provider side wants to read the Requires side's data, + or when the Requires side may want to read its own data. + """ + if component not in relation.data or not relation.data[component]: + return {} + + if fields: + return { + k: relation.data[component][k] for k in fields if k in relation.data[component] + } + else: + return dict(relation.data[component]) + + def _fetch_relation_data_with_secrets( + self, + component: Union[Application, Unit], + req_secret_fields: Optional[List[str]], + relation: Relation, + fields: Optional[List[str]] = None, + ) -> Dict[str, str]: + """Fetching databag contents when secrets may be involved. + + This function has internal logic to resolve if a requested field may be "hidden" + within a Relation Secret, or directly available as a databag field. Typically + used to read the Provider side's databag (eigher by the Requires side, or by + Provider side itself). + """ + result = {} + normal_fields = [] + + if not fields: + if component not in relation.data: + return {} + + all_fields = list(relation.data[component].keys()) + normal_fields = [field for field in all_fields if not self._is_secret_field(field)] + fields = normal_fields + req_secret_fields if req_secret_fields else normal_fields + + if fields: + result, normal_fields = self._process_secret_fields( + relation, req_secret_fields, fields, self._get_group_secret_contents + ) + + # Processing "normal" fields. May include leftover from what we couldn't retrieve as a secret. + # (Typically when Juju3 Requires meets Juju2 Provider) + if normal_fields: + result.update( + self._fetch_relation_data_without_secrets(component, relation, list(normal_fields)) + ) + return result + + def _update_relation_data_without_secrets( + self, component: Union[Application, Unit], relation: Relation, data: Dict[str, str] + ) -> None: + """Updating databag contents when no secrets are involved.""" + if component not in relation.data or relation.data[component] is None: + return + + if relation: + relation.data[component].update(data) + + def _delete_relation_data_without_secrets( + self, component: Union[Application, Unit], relation: Relation, fields: List[str] + ) -> None: + """Remove databag fields 'fields' from Relation.""" + if component not in relation.data or relation.data[component] is None: + return + + for field in fields: + try: + relation.data[component].pop(field) + except KeyError: + logger.debug( + "Non-existing field '%s' was attempted to be removed from the databag (relation ID: %s)", + str(field), + str(relation.id), + ) + pass + + # Public interface methods + # Handling Relation Fields seamlessly, regardless if in databag or a Juju Secret + + def as_dict(self, relation_id: int) -> UserDict: + """Dict behavior representation of the Abstract Data.""" + return DataDict(self, relation_id) + + def get_relation(self, relation_name, relation_id) -> Relation: + """Safe way of retrieving a relation.""" + relation = self._model.get_relation(relation_name, relation_id) + + if not relation: + raise DataInterfacesError( + "Relation %s %s couldn't be retrieved", relation_name, relation_id + ) + + return relation + + def get_secret_uri(self, relation: Relation, group: SecretGroup) -> Optional[str]: + """Get the secret URI for the corresponding group.""" + secret_field = self._generate_secret_field_name(group) + # if the secret is not managed by this component, + # we need to fetch it from the other side + + # Fix for the linter + if self.my_secret_groups is None: + raise DataInterfacesError("Secrets are not enabled for this component") + component = self.component if group in self.my_secret_groups else relation.app + return relation.data[component].get(secret_field) + + def set_secret_uri(self, relation: Relation, group: SecretGroup, secret_uri: str) -> None: + """Set the secret URI for the corresponding group.""" + secret_field = self._generate_secret_field_name(group) + relation.data[self.component][secret_field] = secret_uri + + def fetch_relation_data( + self, + relation_ids: Optional[List[int]] = None, + fields: Optional[List[str]] = None, + relation_name: Optional[str] = None, + ) -> Dict[int, Dict[str, str]]: + """Retrieves data from relation. + + This function can be used to retrieve data from a relation + in the charm code when outside an event callback. + Function cannot be used in `*-relation-broken` events and will raise an exception. + + Returns: + a dict of the values stored in the relation data bag + for all relation instances (indexed by the relation ID). + """ + self._legacy_apply_on_fetch() + + if not relation_name: + relation_name = self.relation_name + + relations = [] + if relation_ids: + relations = [ + self.get_relation(relation_name, relation_id) for relation_id in relation_ids + ] + else: + relations = self.relations + + data = {} + for relation in relations: + if not relation_ids or (relation_ids and relation.id in relation_ids): + data[relation.id] = self._fetch_specific_relation_data(relation, fields) + return data + + def fetch_relation_field( + self, relation_id: int, field: str, relation_name: Optional[str] = None + ) -> Optional[str]: + """Get a single field from the relation data.""" + return ( + self.fetch_relation_data([relation_id], [field], relation_name) + .get(relation_id, {}) + .get(field) + ) + + def fetch_my_relation_data( + self, + relation_ids: Optional[List[int]] = None, + fields: Optional[List[str]] = None, + relation_name: Optional[str] = None, + ) -> Optional[Dict[int, Dict[str, str]]]: + """Fetch data of the 'owner' (or 'this app') side of the relation. + + NOTE: Since only the leader can read the relation's 'this_app'-side + Application databag, the functionality is limited to leaders + """ + self._legacy_apply_on_fetch() + + if not relation_name: + relation_name = self.relation_name + + relations = [] + if relation_ids: + relations = [ + self.get_relation(relation_name, relation_id) for relation_id in relation_ids + ] + else: + relations = self.relations + + data = {} + for relation in relations: + if not relation_ids or relation.id in relation_ids: + data[relation.id] = self._fetch_my_specific_relation_data(relation, fields) + return data + + def fetch_my_relation_field( + self, relation_id: int, field: str, relation_name: Optional[str] = None + ) -> Optional[str]: + """Get a single field from the relation data -- owner side. + + NOTE: Since only the leader can read the relation's 'this_app'-side + Application databag, the functionality is limited to leaders + """ + if relation_data := self.fetch_my_relation_data([relation_id], [field], relation_name): + return relation_data.get(relation_id, {}).get(field) + + @leader_only + def update_relation_data(self, relation_id: int, data: dict) -> None: + """Update the data within the relation.""" + self._legacy_apply_on_update(list(data.keys())) + + relation_name = self.relation_name + relation = self.get_relation(relation_name, relation_id) + return self._update_relation_data(relation, data) + + @leader_only + def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: + """Remove field from the relation.""" + self._legacy_apply_on_delete(fields) + + relation_name = self.relation_name + relation = self.get_relation(relation_name, relation_id) + return self._delete_relation_data(relation, fields) + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: Data, unique_key: str = ""): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_data.relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_data = relation_data + + self.framework.observe( + charm.on[self.relation_data.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[relation_data.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + def _diff(self, event: RelationChangedEvent) -> Diff: + """Retrieves the diff of the data in the relation changed databag. + + Args: + event: relation changed event. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + return diff(event, self.relation_data.data_component) + + +# Base ProviderData and RequiresData + + +class ProviderData(Data): + """Base provides-side of the data products relation.""" + + RESOURCE_FIELD = "database" + + def __init__( + self, + model: Model, + relation_name: str, + status_schema_path: OptionalPathLike = None, + ) -> None: + super().__init__(model, relation_name) + self.data_component = self.local_app + self._local_secret_fields = [] + self._remote_secret_fields = list(self.SECRET_FIELDS) + self._status_schema = ( + {} if not status_schema_path else self._load_status_schema(Path(status_schema_path)) + ) + + def _load_status_schema(self, schema_path: Path) -> Dict[int, RelationStatus]: + """Load JSON schema defining status codes and their details. + + Args: + schema_path: JSON schema file path. + + Raises: + FileNotFoundError: If the provided path is invalid/inaccessible. + + Returns: + dict[int, RelationStatusDict]: Mapping of status code to RelationStatus data objects. + """ + if not schema_path.exists(): + raise FileNotFoundError(f"Can't locate status schema file: {schema_path}") + + content = json.load(open(schema_path, "r")) + + return {s["code"]: RelationStatus(**s) for s in content.get("statuses", [])} + + def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: + """Set values for fields not caring whether it's a secret or not.""" + keys = set(data.keys()) + if self.fetch_relation_field(relation.id, self.RESOURCE_FIELD) is None and ( + keys - {"endpoints", "read-only-endpoints", "replset"} + ): + raise PrematureDataAccessError( + "Premature access to relation data, update is forbidden before the connection is initialized." + ) + super()._update_relation_data(relation, data) + + # Public methods - "native" + + def set_credentials(self, relation_id: int, username: str, password: str) -> None: + """Set credentials. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + username: user that was created. + password: password of the created user. + """ + self.update_relation_data(relation_id, {"username": username, "password": password}) + + def set_entity_credentials( + self, relation_id: int, entity_name: str, entity_password: Optional[str] = None + ) -> None: + """Set entity credentials. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + entity_name: name of the created entity + entity_password: password of the created entity. + """ + self.update_relation_data( + relation_id, + {"entity-name": entity_name, "entity-password": entity_password}, + ) + + def set_tls(self, relation_id: int, tls: str) -> None: + """Set whether TLS is enabled. + + Args: + relation_id: the identifier for a particular relation. + tls: whether tls is enabled (True or False). + """ + self.update_relation_data(relation_id, {"tls": tls}) + + def set_tls_ca(self, relation_id: int, tls_ca: str) -> None: + """Set the TLS CA in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + tls_ca: TLS certification authority. + """ + self.update_relation_data(relation_id, {"tls-ca": tls_ca}) + + @leader_only + def get_statuses(self, relation_id: int) -> Dict[int, RelationStatus]: + """Return all currently active statuses on this relation. Can only be called on leader units. + + Args: + relation_id (int): the identifier for a particular relation. + + Returns: + Dict[int, RelationStatus]: A mapping of status code to RelationStatus instances. + """ + raw = self.fetch_my_relation_field(relation_id, STATUS_FIELD) or "[]" + + return {item["code"]: RelationStatus(**item) for item in json.loads(raw)} + + @overload + def raise_status(self, relation_id: int, status: int) -> None: ... + + @overload + def raise_status(self, relation_id: int, status: RelationStatusDict) -> None: ... + + @overload + def raise_status(self, relation_id: int, status: RelationStatus) -> None: ... + + def raise_status( + self, relation_id: int, status: Union[RelationStatus, RelationStatusDict, int] + ) -> None: + """Raise a status on the relation. Can only be called on leader units. + + Args: + relation_id (int): the identifier for a particular relation. + status (RelationStatus | RelationStatusDict | int): A representation of the status being raised, + which could be either a RelationStatus, an appropriate dict, or the numeric status code. + + Raises: + ValueError: If the status provided is not correctly formatted. + """ + if isinstance(status, int): + # we expect the status schema to be defined in this case. + if status not in self._status_schema: + raise KeyError(f"Status code [{status}] not defined.") + _status = self._status_schema[status] + elif isinstance(status, dict): + _status = RelationStatus(**status) + elif isinstance(status, RelationStatus): + _status = status + else: + raise ValueError( + "The status should be either a RelationStatus, an appropriate dict, or the numeric status code." + ) + + statuses = self.get_statuses(relation_id) + statuses.update({_status.code: _status}) + serialized = json.dumps([asdict(statuses[k]) for k in sorted(statuses)]) + self.update_relation_data(relation_id, {STATUS_FIELD: serialized}) + + def resolve_status(self, relation_id: int, status_code: int) -> None: + """Set a previously raised status as resolved. + + Args: + relation_id (int): the identifier for a particular relation. + status_code (int): the numeric code of the resolved status. + """ + statuses = self.get_statuses(relation_id) + if status_code not in statuses: + logger.error(f"Status [{status_code}] has never been raised before.") + return + + statuses.pop(status_code) + serialized = json.dumps([asdict(statuses[k]) for k in sorted(statuses)]) + self.update_relation_data(relation_id, {STATUS_FIELD: serialized}) + + def clear_statuses(self, relation_id: int) -> None: + """Clear all previously raised statuses. + + Args: + relation_id (int): the identifier for a particular relation. + """ + self.delete_relation_data(relation_id, [STATUS_FIELD]) + + # Public functions -- inherited + + fetch_my_relation_data = leader_only(Data.fetch_my_relation_data) + fetch_my_relation_field = leader_only(Data.fetch_my_relation_field) + + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, relation.app, PROV_SECRET_FIELDS) + if requested_secrets is not None: + self._local_secret_fields = requested_secrets + + if provided_secrets is not None: + self._remote_secret_fields = provided_secrets + + +class RequirerData(Data): + """Requirer-side of the relation.""" + + SECRET_FIELDS = [ + "username", + "password", + "tls", + "tls-ca", + "uris", + "read-only-uris", + "entity-name", + "entity-password", + ] + + def __init__( + self, + model, + relation_name: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + requested_entity_secret: Optional[str] = None, + requested_entity_name: Optional[str] = None, + requested_entity_password: Optional[str] = None, + prefix_matching: Optional[str] = None, + ): + """Manager of base client relations.""" + super().__init__(model, relation_name) + self.extra_user_roles = extra_user_roles + self.extra_group_roles = extra_group_roles + self.entity_type = entity_type + self.entity_permissions = entity_permissions + self.requested_entity_secret = requested_entity_secret + self.requested_entity_name = requested_entity_name + self.requested_entity_password = requested_entity_password + self.prefix_matching = prefix_matching + + if ( + self.requested_entity_secret or self.requested_entity_name + ) and not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + if self.requested_entity_secret and ( + self.requested_entity_name or self.requested_entity_password + ): + raise IllegalOperationError("Unable to use provided and automated entity name secret") + + if self.requested_entity_password and not self.requested_entity_name: + raise IllegalOperationError("Unable to set entity password without an entity name") + + self._validate_entity_type() + self._validate_entity_permissions() + + self._remote_secret_fields = list(self.SECRET_FIELDS) + self._local_secret_fields = [ + field + for field in self.SECRET_LABEL_MAP.keys() + if field not in self._remote_secret_fields + ] + if additional_secret_fields: + self._remote_secret_fields += additional_secret_fields + self.data_component = self.local_unit + + # Internal functions + + def _is_resource_created_for_relation(self, relation: Relation) -> bool: + if not relation.app: + return False + + data = self.fetch_relation_data( + [relation.id], + ["username", "password", "entity-name", "entity-password"], + ).get(relation.id, {}) + + return any( + [ + all(bool(data.get(field)) for field in ("username", "password")), + all(bool(data.get(field)) for field in ("entity-name",)), + ] + ) + + def _validate_entity_type(self) -> None: + """Validates the consistency of the provided entity-type and its extra roles.""" + if self.entity_type and self.entity_type not in {ENTITY_USER, ENTITY_GROUP}: + raise ValueError("Invalid entity-type. Possible values are USER and GROUP") + + if self.entity_type == ENTITY_USER and self.extra_group_roles: + raise ValueError("Inconsistent entity information. Use extra_user_roles instead") + + if self.entity_type == ENTITY_GROUP and self.extra_user_roles: + raise ValueError("Inconsistent entity information. Use extra_group_roles instead") + + def _validate_entity_permissions(self) -> None: + """Validates whether the provided entity permissions follow the right JSON format.""" + if not self.entity_permissions: + return + + accepted_keys = {"resource_name", "resource_type", "privileges"} + + try: + permissions = json.loads(self.entity_permissions) + for permission in permissions: + if permission.keys() != accepted_keys: + raise ValueError("Invalid entity permissions format. See accepted keys") + except json.decoder.JSONDecodeError: + raise ValueError("Invalid entity permissions format. It must be JSON format") + + # Public functions + + def is_resource_created(self, relation_id: Optional[int] = None) -> bool: + """Check if the resource has been created. + + This function can be used to check if the Provider answered with data in the charm code + when outside an event callback. + + Args: + relation_id (int, optional): When provided the check is done only for the relation id + provided, otherwise the check is done for all relations + + Returns: + True or False + + Raises: + IndexError: If relation_id is provided but that relation does not exist + """ + if relation_id is not None: + try: + relation = [relation for relation in self.relations if relation.id == relation_id][ + 0 + ] + return self._is_resource_created_for_relation(relation) + except IndexError: + raise IndexError(f"relation id {relation_id} cannot be accessed") + else: + return ( + all( + self._is_resource_created_for_relation(relation) for relation in self.relations + ) + if self.relations + else False + ) + + # Public functions -- inherited + + fetch_my_relation_data = leader_only(Data.fetch_my_relation_data) + fetch_my_relation_field = leader_only(Data.fetch_my_relation_field) + + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, self.local_unit, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, self.local_unit, PROV_SECRET_FIELDS) + if requested_secrets: + self._remote_secret_fields = requested_secrets + + if provided_secrets: + self._local_secret_fields = provided_secrets + + +class StatusEventBase(RelationEvent): + """Base class for relation status change events.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + status: RelationStatus, + app: Optional[Application] = None, + unit: Optional[Unit] = None, + ): + super().__init__(handle, relation, app=app, unit=unit) + self.status = status + + def snapshot(self) -> dict: + """Return a snapshot of the event.""" + return super().snapshot() | {"status": json.dumps(asdict(self.status))} + + def restore(self, snapshot: dict): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.status = RelationStatus(**json.loads(snapshot["status"])) + + @property + def active_statuses(self) -> List[RelationStatus]: + """Returns a list of all currently active statuses on this relation.""" + if not self.relation.app: + return [] + + raw = json.loads(self.relation.data[self.relation.app].get(STATUS_FIELD, "[]")) + + return [RelationStatus(**item) for item in raw] + + +class StatusRaisedEvent(StatusEventBase): + """Event emitted on the requirer when a new status is being raised by the provider on relation.""" + + +class StatusResolvedEvent(StatusEventBase): + """Event emitted on the requirer when a status is marked as resolved by the provider on relation.""" + + +class RequirerCharmEvents(CharmEvents): + """Base events for data requirer charms.""" + + status_raised = EventSource(StatusRaisedEvent) + status_resolved = EventSource(StatusResolvedEvent) + + +class RequirerEventHandlers(EventHandlers): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + + def _main_credentials_shared(self, diff: Diff) -> bool: + """Whether the relation data-bag contains username / password keys.""" + user_secret = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + return any( + [ + user_secret in diff.added, + "username" in diff.added and "password" in diff.added, + ] + ) + + def _entity_credentials_shared(self, diff: Diff) -> bool: + """Whether the relation data-bag contains rolename / password keys.""" + entity_secret = self.relation_data._generate_secret_field_name(SECRET_GROUPS.ENTITY) + return any( + [ + entity_secret in diff.added, + "entity-name" in diff.added, + ] + ) + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + if not self.relation_data.local_unit.is_leader(): + return + + if self.relation_data.remote_secret_fields: + if self.relation_data.SCOPE == Scope.APP: + set_encoded_field( + event.relation, + self.relation_data.local_app, + REQ_SECRET_FIELDS, + self.relation_data.remote_secret_fields, + ) + + set_encoded_field( + event.relation, + self.relation_data.local_unit, + REQ_SECRET_FIELDS, + self.relation_data.remote_secret_fields, + ) + + if self.relation_data.local_secret_fields: + if self.relation_data.SCOPE == Scope.APP: + set_encoded_field( + event.relation, + self.relation_data.local_app, + PROV_SECRET_FIELDS, + self.relation_data.local_secret_fields, + ) + set_encoded_field( + event.relation, + self.relation_data.local_unit, + PROV_SECRET_FIELDS, + self.relation_data.local_secret_fields, + ) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + # Retrieve old statuses from "data" + old_data = get_encoded_dict(event.relation, self.relation_data.local_unit, "data") or {} + old_statuses = json.loads(old_data.get(STATUS_FIELD, "[]")) + previous_codes = {status.get("code") for status in old_statuses} + + # Compute current statuses + current_statuses = json.loads( + self.relation_data.fetch_relation_field(event.relation.id, STATUS_FIELD) or "[]" + ) + current_codes = {status.get("code") for status in current_statuses} + + # Detect changes + raised = current_codes - previous_codes + resolved = previous_codes - current_codes + + for status_code in raised: + logger.debug(f"Status [{status_code}] raised") + _status = next(s for s in current_statuses if s["code"] == status_code) + _status_instance = RelationStatus(**_status) + getattr(self.on, "status_raised").emit( + event.relation, + status=_status_instance, + app=event.app, + unit=event.unit, + ) + + for status_code in resolved: + logger.debug(f"Status [{status_code}] resolved") + _status = next(s for s in old_statuses if s["code"] == status_code) + _status_instance = RelationStatus(**_status) + getattr(self.on, "status_resolved").emit( + event.relation, + status=_status_instance, + app=event.app, + unit=event.unit, + ) + + +class ProviderEventHandlers(EventHandlers): + """Provider-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: ProviderData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + + @staticmethod + def _validate_entity_consistency(event: RelationEvent, diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in ["entity-type", "extra-user-roles", "extra-group-roles"]: + if key in diff.changed: + raise ValueError(f"Cannot change {key} after relation has already been created") + + # Event handlers + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + requested_secrets = get_encoded_list(event.relation, event.relation.app, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(event.relation, event.relation.app, PROV_SECRET_FIELDS) + if requested_secrets is not None: + self.relation_data._local_secret_fields = requested_secrets + + if provided_secrets is not None: + self.relation_data._remote_secret_fields = provided_secrets + + +################################################################################ +# Peer Relation Data +################################################################################ + + +class DataPeerData(RequirerData, ProviderData): + """Represents peer relations data.""" + + SECRET_FIELDS = [] + SECRET_FIELD_NAME = "internal_secret" + SECRET_LABEL_MAP = {} + + def __init__( + self, + model, + relation_name: str, + additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + ): + RequirerData.__init__( + self, + model=model, + relation_name=relation_name, + additional_secret_fields=additional_secret_fields, + ) + self.secret_field_name = secret_field_name if secret_field_name else self.SECRET_FIELD_NAME + self.deleted_label = deleted_label + self._secret_label_map = {} + + # Legacy information holders + self._legacy_labels = [] + self._legacy_secret_uri = None + + # Secrets that are being dynamically added within the scope of this event handler run + self._new_secrets = [] + self._additional_secret_group_mapping = additional_secret_group_mapping + + for group, fields in additional_secret_group_mapping.items(): + if group not in SECRET_GROUPS.groups(): + setattr(SECRET_GROUPS, group, group) + for field in fields: + secret_group = SECRET_GROUPS.get_group(group) + internal_field = self._field_to_internal_name(field, secret_group) + self._secret_label_map.setdefault(group, []).append(internal_field) + self._remote_secret_fields.append(internal_field) + + @property + def scope(self) -> Optional[Scope]: + """Turn component information into Scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + + @property + def secret_label_map(self) -> Dict[str, str]: + """Property storing secret mappings.""" + return self._secret_label_map + + @property + def static_secret_fields(self) -> List[str]: + """Re-definition of the property in a way that dynamically extended list is retrieved.""" + return self._remote_secret_fields + + @property + def local_secret_fields(self) -> List[str]: + """Re-definition of the property in a way that dynamically extended list is retrieved.""" + return ( + self.static_secret_fields if self.static_secret_fields else self.current_secret_fields + ) + + @property + def current_secret_fields(self) -> List[str]: + """Helper method to get all currently existing secret fields (added statically or dynamically).""" + if not self.secrets_enabled: + return [] + + if len(self._model.relations[self.relation_name]) > 1: + raise ValueError(f"More than one peer relation on {self.relation_name}") + + relation = self._model.relations[self.relation_name][0] + fields = [] + + ignores = [ + SECRET_GROUPS.get_group("user"), + SECRET_GROUPS.get_group("tls"), + SECRET_GROUPS.get_group("mtls"), + SECRET_GROUPS.get_group("entity"), + ] + for group in SECRET_GROUPS.groups(): + if group in ignores: + continue + if content := self._get_group_secret_contents(relation, group): + fields += list(content.keys()) + return list(set(fields) | set(self._new_secrets)) + + @dynamic_secrets_only + def set_secret( + self, + relation_id: int, + field: str, + value: str, + group_mapping: Optional[SecretGroup] = None, + ) -> None: + """Public interface method to add a Relation Data field specifically as a Juju Secret. + + Args: + relation_id: ID of the relation + field: The secret field that is to be added + value: The string value of the secret + group_mapping: The name of the "secret group", in case the field is to be added to an existing secret + """ + self._legacy_apply_on_update([field]) + + full_field = self._field_to_internal_name(field, group_mapping) + if self.secrets_enabled and full_field not in self.current_secret_fields: + self._new_secrets.append(full_field) + if self.valid_field_pattern(field, full_field): + self.update_relation_data(relation_id, {full_field: value}) + + # Unlike for set_secret(), there's no harm using this operation with static secrets + # The restricion is only added to keep the concept clear + @dynamic_secrets_only + def get_secret( + self, + relation_id: int, + field: str, + group_mapping: Optional[SecretGroup] = None, + ) -> Optional[str]: + """Public interface method to fetch secrets only.""" + self._legacy_apply_on_fetch() + + full_field = self._field_to_internal_name(field, group_mapping) + if ( + self.secrets_enabled + and full_field not in self.current_secret_fields + and field not in self.current_secret_fields + ): + return + if self.valid_field_pattern(field, full_field): + return self.fetch_my_relation_field(relation_id, full_field) + + @dynamic_secrets_only + def delete_secret( + self, + relation_id: int, + field: str, + group_mapping: Optional[SecretGroup] = None, + ) -> Optional[str]: + """Public interface method to delete secrets only.""" + self._legacy_apply_on_delete([field]) + + full_field = self._field_to_internal_name(field, group_mapping) + if self.secrets_enabled and full_field not in self.current_secret_fields: + logger.warning(f"Secret {field} from group {group_mapping} was not found") + return + + if self.valid_field_pattern(field, full_field): + self.delete_relation_data(relation_id, [full_field]) + + ########################################################################## + # Helpers + ########################################################################## + + @staticmethod + def _field_to_internal_name(field: str, group: Optional[SecretGroup]) -> str: + if not group or group == SECRET_GROUPS.EXTRA: + return field + return f"{field}{GROUP_SEPARATOR}{group}" + + @staticmethod + def _internal_name_to_field(name: str) -> Tuple[str, SecretGroup]: + parts = name.split(GROUP_SEPARATOR) + if not len(parts) > 1: + return (parts[0], SECRET_GROUPS.EXTRA) + secret_group = SECRET_GROUPS.get_group(parts[1]) + if not secret_group: + raise ValueError(f"Invalid secret field {name}") + return (parts[0], secret_group) + + def _group_secret_fields(self, secret_fields: List[str]) -> Dict[SecretGroup, List[str]]: + """Helper function to arrange secret mappings under their group. + + NOTE: All unrecognized items end up in the 'extra' secret bucket. + Make sure only secret fields are passed! + """ + secret_fieldnames_grouped = {} + for key in secret_fields: + field, group = self._internal_name_to_field(key) + secret_fieldnames_grouped.setdefault(group, []).append(field) + return secret_fieldnames_grouped + + def _content_for_secret_group( + self, content: Dict[str, str], secret_fields: Set[str], group_mapping: SecretGroup + ) -> Dict[str, str]: + """Select : pairs from input, that belong to this particular Secret group.""" + if group_mapping == SECRET_GROUPS.EXTRA: + return {k: v for k, v in content.items() if k in self.local_secret_fields} + return { + self._internal_name_to_field(k)[0]: v + for k, v in content.items() + if k in self.local_secret_fields + } + + def valid_field_pattern(self, field: str, full_field: str) -> bool: + """Check that no secret group is attempted to be used together without secrets being enabled. + + Secrets groups are impossible to use with versions that are not yet supporting secrets. + """ + if not self.secrets_enabled and full_field != field: + logger.error( + f"Can't access {full_field}: no secrets available (i.e. no secret groups either)." + ) + return False + return True + + def _load_secrets_from_databag(self, relation: Relation) -> None: + """Load secrets from the databag.""" + requested_secrets = get_encoded_list(relation, self.component, REQ_SECRET_FIELDS) + provided_secrets = get_encoded_list(relation, self.component, PROV_SECRET_FIELDS) + if requested_secrets: + self._remote_secret_fields = requested_secrets + + if provided_secrets: + self._local_secret_fields = provided_secrets + + ########################################################################## + # Backwards compatibility / Upgrades + ########################################################################## + # These functions are used to keep backwards compatibility on upgrades + # Policy: + # All data is kept intact until the first write operation. (This allows a minimal + # grace period during which rollbacks are fully safe. For more info see spec.) + # All data involves: + # - databag + # - secrets content + # - secret labels (!!!) + # Legacy functions must return None, and leave an equally consistent state whether + # they are executed or skipped (as a high enough versioned execution environment may + # not require so) + + # Full legacy stack for each operation + + def _legacy_apply_on_fetch(self) -> None: + """All legacy functions to be applied on fetch.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + + def _legacy_apply_on_update(self, fields) -> None: + """All legacy functions to be applied on update.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + self._legacy_migration_remove_secret_from_databag(relation, fields) + self._legacy_migration_remove_secret_field_name_from_databag(relation) + + def _legacy_apply_on_delete(self, fields) -> None: + """All legacy functions to be applied on delete.""" + relation = self._model.relations[self.relation_name][0] + self._legacy_compat_generate_prev_labels() + self._legacy_compat_secret_uri_from_databag(relation) + self._legacy_compat_check_deleted_label(relation, fields) + + # Compatibility + + @legacy_apply_from_version(18) + def _legacy_compat_check_deleted_label(self, relation, fields) -> None: + """Helper function for legacy behavior. + + As long as https://bugs.launchpad.net/juju/+bug/2028094 wasn't fixed, + we did not delete fields but rather kept them in the secret with a string value + expressing invalidity. This function is maintainnig that behavior when needed. + """ + if not self.deleted_label: + return + + current_data = self.fetch_my_relation_data([relation.id], fields) + if current_data is not None: + # Check if the secret we wanna delete actually exists + # Given the "deleted label", here we can't rely on the default mechanism (i.e. 'key not found') + if non_existent := (set(fields) & set(self.local_secret_fields)) - set( + current_data.get(relation.id, []) + ): + logger.debug( + "Non-existing secret %s was attempted to be removed.", + ", ".join(non_existent), + ) + + @legacy_apply_from_version(18) + def _legacy_compat_secret_uri_from_databag(self, relation) -> None: + """Fetching the secret URI from the databag, in case stored there.""" + self._legacy_secret_uri = relation.data[self.component].get( + self._generate_secret_field_name(), None + ) + + @legacy_apply_from_version(34) + def _legacy_compat_generate_prev_labels(self) -> None: + """Generator for legacy secret label names, for backwards compatibility. + + Secret label is part of the data that MUST be maintained across rolling upgrades. + In case there may be a change on a secret label, the old label must be recognized + after upgrades, and left intact until the first write operation -- when we roll over + to the new label. + + This function keeps "memory" of previously used secret labels. + NOTE: Return value takes decorator into account -- all 'legacy' functions may return `None` + + v0.34 (rev69): Fixing issue https://github.com/canonical/data-platform-libs/issues/155 + meant moving from '.' (i.e. 'mysql.app', 'mysql.unit') + to labels '..' (like 'peer.mysql.app') + """ + if self._legacy_labels: + return + + result = [] + members = [self._model.app.name] + if self.scope: + members.append(self.scope.value) + result.append(f"{'.'.join(members)}") + self._legacy_labels = result + + # Migration + + @legacy_apply_from_version(18) + def _legacy_migration_remove_secret_from_databag(self, relation, fields: List[str]) -> None: + """For Rolling Upgrades -- when moving from databag to secrets usage. + + Practically what happens here is to remove stuff from the databag that is + to be stored in secrets. + """ + if not self.local_secret_fields: + return + + secret_fields_passed = set(self.local_secret_fields) & set(fields) + for field in secret_fields_passed: + if self._fetch_relation_data_without_secrets(self.component, relation, [field]): + self._delete_relation_data_without_secrets(self.component, relation, [field]) + + @legacy_apply_from_version(18) + def _legacy_migration_remove_secret_field_name_from_databag(self, relation) -> None: + """Making sure that the old databag URI is gone. + + This action should not be executed more than once. + + There was a phase (before moving secrets usage to libs) when charms saved the peer + secret URI to the databag, and used this URI from then on to retrieve their secret. + When upgrading to charm versions using this library, we need to add a label to the + secret and access it via label from than on, and remove the old traces from the databag. + """ + # Nothing to do if 'internal-secret' is not in the databag + if not (relation.data[self.component].get(self._generate_secret_field_name())): + return + + # Making sure that the secret receives its label + # (This should have happened by the time we get here, rather an extra security measure.) + secret = self._get_relation_secret(relation.id) + + # Either app scope secret with leader executing, or unit scope secret + leader_or_unit_scope = self.component != self.local_app or self.local_unit.is_leader() + if secret and leader_or_unit_scope: + # Databag reference to the secret URI can be removed, now that it's labelled + relation.data[self.component].pop(self._generate_secret_field_name(), None) + + ########################################################################## + # Event handlers + ########################################################################## + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + pass + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + ########################################################################## + # Overrides of Relation Data handling functions + ########################################################################## + + def _generate_secret_label( + self, relation_name: str, relation_id: int, group_mapping: SecretGroup + ) -> str: + members = [relation_name, self._model.app.name] + if self.scope: + members.append(self.scope.value) + if group_mapping != SECRET_GROUPS.EXTRA: + members.append(group_mapping) + return f"{'.'.join(members)}" + + def _generate_secret_field_name(self, group_mapping: SecretGroup = SECRET_GROUPS.EXTRA) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + return f"{self.secret_field_name}" + + @juju_secrets_only + def _get_relation_secret( + self, + relation_id: int, + group_mapping: SecretGroup = SECRET_GROUPS.EXTRA, + relation_name: Optional[str] = None, + ) -> Optional[CachedSecret]: + """Retrieve a Juju Secret specifically for peer relations. + + In case this code may be executed within a rolling upgrade, and we may need to + migrate secrets from the databag to labels, we make sure to stick the correct + label on the secret, and clean up the local databag. + """ + if not relation_name: + relation_name = self.relation_name + + relation = self._model.get_relation(relation_name, relation_id) + if not relation: + return + + label = self._generate_secret_label(relation_name, relation_id, group_mapping) + + # URI or legacy label is only to applied when moving single legacy secret to a (new) label + if group_mapping == SECRET_GROUPS.EXTRA: + # Fetching the secret with fallback to URI (in case label is not yet known) + # Label would we "stuck" on the secret in case it is found + return self.secrets.get( + label, self._legacy_secret_uri, legacy_labels=self._legacy_labels + ) + return self.secrets.get(label) + + def _get_group_secret_contents( + self, + relation: Relation, + group: SecretGroup, + secret_fields: Union[Set[str], List[str]] = [], + ) -> Dict[str, str]: + """Helper function to retrieve collective, requested contents of a secret.""" + secret_fields = [self._internal_name_to_field(k)[0] for k in secret_fields] + result = super()._get_group_secret_contents(relation, group, secret_fields) + if self.deleted_label: + result = {key: result[key] for key in result if result[key] != self.deleted_label} + if self._additional_secret_group_mapping: + return {self._field_to_internal_name(key, group): result[key] for key in result} + return result + + @either_static_or_dynamic_secrets + def _fetch_my_specific_relation_data( + self, relation: Relation, fields: Optional[List[str]] + ) -> Dict[str, str]: + """Fetch data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + return self._fetch_relation_data_with_secrets( + self.component, self.local_secret_fields, relation, fields + ) + + @either_static_or_dynamic_secrets + def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: + """Update data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + self._load_secrets_from_databag(relation) + + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + list(data), + self._add_or_update_relation_secrets, + data=data, + uri_to_databag=False, + ) + + normal_content = {k: v for k, v in data.items() if k in normal_fields} + self._update_relation_data_without_secrets(self.component, relation, normal_content) + + @either_static_or_dynamic_secrets + def _delete_relation_data(self, relation: Relation, fields: List[str]) -> None: + """Delete data available (directily or indirectly -- i.e. secrets) from the relation for owner/this_app.""" + self._load_secrets_from_databag(relation) + if self.local_secret_fields and self.deleted_label: + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + fields, + self._update_relation_secret, + data=dict.fromkeys(fields, self.deleted_label), + ) + else: + _, normal_fields = self._process_secret_fields( + relation, + self.local_secret_fields, + fields, + self._delete_relation_secret, + fields=fields, + ) + self._delete_relation_data_without_secrets(self.component, relation, list(normal_fields)) + + def fetch_relation_data( + self, + relation_ids: Optional[List[int]] = None, + fields: Optional[List[str]] = None, + relation_name: Optional[str] = None, + ) -> Dict[int, Dict[str, str]]: + """This method makes no sense for a Peer Relation.""" + raise NotImplementedError( + "Peer Relation only supports 'self-side' fetch methods: " + "fetch_my_relation_data() and fetch_my_relation_field()" + ) + + def fetch_relation_field( + self, relation_id: int, field: str, relation_name: Optional[str] = None + ) -> Optional[str]: + """This method makes no sense for a Peer Relation.""" + raise NotImplementedError( + "Peer Relation only supports 'self-side' fetch methods: " + "fetch_my_relation_data() and fetch_my_relation_field()" + ) + + ########################################################################## + # Public functions -- inherited + ########################################################################## + + fetch_my_relation_data = Data.fetch_my_relation_data + fetch_my_relation_field = Data.fetch_my_relation_field + + +class DataPeerEventHandlers(RequirerEventHandlers): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: RequirerData, unique_key: str = ""): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + pass + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + +class DataPeer(DataPeerData, DataPeerEventHandlers): + """Represents peer relations.""" + + def __init__( + self, + charm, + relation_name: str, + additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + unique_key: str = "", + ): + DataPeerData.__init__( + self, + charm.model, + relation_name, + additional_secret_fields, + additional_secret_group_mapping, + secret_field_name, + deleted_label, + ) + DataPeerEventHandlers.__init__(self, charm, self, unique_key) + + +class DataPeerUnitData(DataPeerData): + """Unit data abstraction representation.""" + + SCOPE = Scope.UNIT + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class DataPeerUnit(DataPeerUnitData, DataPeerEventHandlers): + """Unit databag representation.""" + + def __init__( + self, + charm, + relation_name: str, + additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + unique_key: str = "", + ): + DataPeerData.__init__( + self, + charm.model, + relation_name, + additional_secret_fields, + additional_secret_group_mapping, + secret_field_name, + deleted_label, + ) + DataPeerEventHandlers.__init__(self, charm, self, unique_key) + + +class DataPeerOtherUnitData(DataPeerUnitData): + """Unit data abstraction representation.""" + + def __init__(self, unit: Unit, *args, **kwargs): + super().__init__(*args, **kwargs) + self.local_unit = unit + self.component = unit + + def update_relation_data(self, relation_id: int, data: dict) -> None: + """This method makes no sense for a Other Peer Relation.""" + raise NotImplementedError("It's not possible to update data of another unit.") + + def delete_relation_data(self, relation_id: int, fields: List[str]) -> None: + """This method makes no sense for a Other Peer Relation.""" + raise NotImplementedError("It's not possible to delete data of another unit.") + + +class DataPeerOtherUnitEventHandlers(DataPeerEventHandlers): + """Requires-side of the relation.""" + + def __init__(self, charm: CharmBase, relation_data: DataPeerUnitData): + """Manager of base client relations.""" + unique_key = f"{relation_data.relation_name}-{relation_data.local_unit.name}" + super().__init__(charm, relation_data, unique_key=unique_key) + + +class DataPeerOtherUnit(DataPeerOtherUnitData, DataPeerOtherUnitEventHandlers): + """Unit databag representation for another unit than the executor.""" + + def __init__( + self, + unit: Unit, + charm: CharmBase, + relation_name: str, + additional_secret_fields: Optional[List[str]] = [], + additional_secret_group_mapping: Dict[str, str] = {}, + secret_field_name: Optional[str] = None, + deleted_label: Optional[str] = None, + ): + DataPeerOtherUnitData.__init__( + self, + unit, + charm.model, + relation_name, + additional_secret_fields, + additional_secret_group_mapping, + secret_field_name, + deleted_label, + ) + DataPeerOtherUnitEventHandlers.__init__(self, charm, self) + + +################################################################################ +# Cross-charm Relations Data Handling and Events +################################################################################ + +# Generic events + + +class RelationEventWithSecret(RelationEvent): + """Base class for Relation Events that need to handle secrets.""" + + @property + def _secrets(self) -> dict: + """Caching secrets to avoid fetching them each time a field is referrd. + + DON'T USE the encapsulated helper variable outside of this function + """ + if not hasattr(self, "_cached_secrets"): + self._cached_secrets = {} + return self._cached_secrets + + def _get_secret(self, group) -> Optional[Dict[str, str]]: + """Retrieving secrets.""" + if not self.app: + return + if not self._secrets.get(group): + self._secrets[group] = None + secret_field = f"{PROV_SECRET_PREFIX}{group}" + if secret_uri := self.relation.data[self.app].get(secret_field): + secret = self.framework.model.get_secret(id=secret_uri) + self._secrets[group] = secret.get_content() + return self._secrets[group] + + @property + def secrets_enabled(self): + """Is this Juju version allowing for Secrets usage?""" + return JujuVersion.from_environ().has_secrets + + +class EntityProvidesEvent(RelationEvent): + """Base class for data events.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + @property + def extra_group_roles(self) -> Optional[str]: + """Returns the extra group roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-group-roles") + + @property + def entity_type(self) -> Optional[str]: + """Returns the entity_type that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("entity-type") + + @property + def entity_permissions(self) -> Optional[str]: + """Returns the entity_permissions that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("entity-permissions") + + +class EntityRequiresEvent(RelationEventWithSecret): + """Base class for authentication fields for events. + + The amount of logic added here is not ideal -- but this was the only way to preserve + the interface when moving to Juju Secrets + """ + + @property + def entity_name(self) -> Optional[str]: + """Returns the name for the created entity.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("entity") + if secret: + return secret.get("entity-name") + + return self.relation.data[self.relation.app].get("entity-name") + + @property + def entity_password(self) -> Optional[str]: + """Returns the password for the created entity.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("entity") + if secret: + return secret.get("entity-password") + + return self.relation.data[self.relation.app].get("entity-password") + + +class AuthenticationEvent(RelationEventWithSecret): + """Base class for authentication fields for events. + + The amount of logic added here is not ideal -- but this was the only way to preserve + the interface when moving to Juju Secrets + """ + + @property + def username(self) -> Optional[str]: + """Returns the created username.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("username") + + return self.relation.data[self.relation.app].get("username") + + @property + def password(self) -> Optional[str]: + """Returns the password for the created user.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("password") + + return self.relation.data[self.relation.app].get("password") + + @property + def tls(self) -> Optional[str]: + """Returns whether TLS is configured.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("tls") + if secret: + return secret.get("tls") + + return self.relation.data[self.relation.app].get("tls") + + @property + def tls_ca(self) -> Optional[str]: + """Returns TLS CA.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("tls") + if secret: + return secret.get("tls-ca") + + return self.relation.data[self.relation.app].get("tls-ca") + + +# Database related events and fields + + +class DatabaseProvidesEvent(RelationEvent): + """Base class for database events.""" + + @property + def database(self) -> Optional[str]: + """Returns the database that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("database") + + +class DatabaseRequestedEvent(DatabaseProvidesEvent): + """Event emitted when a new database is requested for use on this relation.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + @property + def external_node_connectivity(self) -> bool: + """Returns the requested external_node_connectivity field.""" + if not self.relation.app: + return False + + return ( + self.relation.data[self.relation.app].get("external-node-connectivity", "false") + == "true" + ) + + @property + def requested_entity_secret_content(self) -> Optional[Dict[str, Optional[str]]]: + """Returns the content of the requested entity secret.""" + names = None + if secret_uri := self.relation.data.get(self.relation.app, {}).get( + "requested-entity-secret" + ): + secret = self.framework.model.get_secret(id=secret_uri) + if content := secret.get_content(refresh=True): + if "entity-name" in content: + names = {content["entity-name"]: content.get("password")} + else: + logger.warning("Invalid requested-entity-secret: no entity name") + return names + + @property + def prefix_matching(self) -> Optional[str]: + """Returns the prefix matching strategy that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("prefix-matching") + + +class DatabaseEntityRequestedEvent(DatabaseProvidesEvent, EntityProvidesEvent): + """Event emitted when a new entity is requested for use on this relation.""" + + +class DatabaseEntityPermissionsChangedEvent(DatabaseProvidesEvent, EntityProvidesEvent): + """Event emitted when existing entity permissions are changed on this relation.""" + + +class DatabaseProvidesEvents(CharmEvents): + """Database events. + + This class defines the events that the database can emit. + """ + + database_requested = EventSource(DatabaseRequestedEvent) + database_entity_requested = EventSource(DatabaseEntityRequestedEvent) + database_entity_permissions_changed = EventSource(DatabaseEntityPermissionsChangedEvent) + + +class DatabaseRequiresEvent(RelationEventWithSecret): + """Base class for database events.""" + + @property + def database(self) -> Optional[str]: + """Returns the database name.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("database") + + @property + def endpoints(self) -> Optional[str]: + """Returns a comma separated list of read/write endpoints. + + In VM charms, this is the primary's address. + In kubernetes charms, this is the service to the primary pod. + """ + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("endpoints") + + @property + def read_only_endpoints(self) -> Optional[str]: + """Returns a comma separated list of read only endpoints. + + In VM charms, this is the address of all the secondary instances. + In kubernetes charms, this is the service to all replica pod instances. + """ + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("read-only-endpoints") + + @property + def replset(self) -> Optional[str]: + """Returns the replicaset name. + + MongoDB only. + """ + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("replset") + + @property + def uris(self) -> Optional[str]: + """Returns the connection URIs. + + MongoDB, Redis, OpenSearch. + """ + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("uris") + + return self.relation.data[self.relation.app].get("uris") + + @property + def read_only_uris(self) -> Optional[str]: + """Returns the readonly connection URIs.""" + if not self.relation.app: + return None + + if self.secrets_enabled: + secret = self._get_secret("user") + if secret: + return secret.get("read-only-uris") + + return self.relation.data[self.relation.app].get("read-only-uris") + + @property + def version(self) -> Optional[str]: + """Returns the version of the database. + + Version as informed by the database daemon. + """ + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("version") + + @property + def prefix_databases(self) -> Optional[List[str]]: + """Returns a list of databases matching a prefix.""" + if not self.relation.app: + return None + + if prefixed_databases := self.relation.data[self.relation.app].get("prefix-databases"): + return prefixed_databases.split(",") + return [] + + +class DatabaseCreatedEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when a new database is created for use on this relation.""" + + +class DatabaseEntityCreatedEvent(EntityRequiresEvent, DatabaseRequiresEvent): + """Event emitted when a new entity is created for use on this relation.""" + + +class DatabaseEndpointsChangedEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the read/write endpoints are changed.""" + + +class DatabaseReadOnlyEndpointsChangedEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the read only endpoints are changed.""" + + +class DatabasePrefixDatabasesChangedEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the prefix databases are changed.""" + + +class DatabaseRequiresEvents(RequirerCharmEvents): + """Database events. + + This class defines the events that the database can emit. + """ + + database_created = EventSource(DatabaseCreatedEvent) + database_entity_created = EventSource(DatabaseEntityCreatedEvent) + endpoints_changed = EventSource(DatabaseEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(DatabaseReadOnlyEndpointsChangedEvent) + prefix_databases_changed = EventSource(DatabasePrefixDatabasesChangedEvent) + + +# Database Provider and Requires + + +class DatabaseProviderData(ProviderData): + """Provider-side data of the database relations.""" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_database(self, relation_id: int, database_name: str) -> None: + """Set database name. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + database_name: database name. + """ + self.update_relation_data(relation_id, {"database": database_name}) + + def set_prefix_databases(self, relation_id: int, databases: List[str]) -> None: + """Set a coma separated list of databases matching a prefix. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + databases: list of database names matching the requested prefix. + """ + self.update_relation_data(relation_id, {"prefix-databases": ",".join(sorted(databases))}) + + def set_endpoints(self, relation_id: int, connection_strings: str) -> None: + """Set database primary connections. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + In VM charms, only the primary's address should be passed as an endpoint. + In kubernetes charms, the service endpoint to the primary pod should be + passed as an endpoint. + + Args: + relation_id: the identifier for a particular relation. + connection_strings: database hosts and ports comma separated list. + """ + self.update_relation_data(relation_id, {"endpoints": connection_strings}) + + def set_read_only_endpoints(self, relation_id: int, connection_strings: str) -> None: + """Set database replicas connection strings. + + This function writes in the application data bag, therefore, + only the leader unit can call it. + + Args: + relation_id: the identifier for a particular relation. + connection_strings: database hosts and ports comma separated list. + """ + self.update_relation_data(relation_id, {"read-only-endpoints": connection_strings}) + + def set_replset(self, relation_id: int, replset: str) -> None: + """Set replica set name in the application relation databag. + + MongoDB only. + + Args: + relation_id: the identifier for a particular relation. + replset: replica set name. + """ + self.update_relation_data(relation_id, {"replset": replset}) + + def set_uris(self, relation_id: int, uris: str) -> None: + """Set the database connection URIs in the application relation databag. + + MongoDB, Redis, and OpenSearch only. + + Args: + relation_id: the identifier for a particular relation. + uris: connection URIs. + """ + self.update_relation_data(relation_id, {"uris": uris}) + + def set_read_only_uris(self, relation_id: int, uris: str) -> None: + """Set the database readonly connection URIs in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + uris: connection URIs. + """ + self.update_relation_data(relation_id, {"read-only-uris": uris}) + + def set_version(self, relation_id: int, version: str) -> None: + """Set the database version in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + version: database version. + """ + self.update_relation_data(relation_id, {"version": version}) + + def set_subordinated(self, relation_id: int) -> None: + """Raises the subordinated flag in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + """ + self.update_relation_data(relation_id, {"subordinated": "true"}) + + +class DatabaseProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the database relation handlers.""" + + on = DatabaseProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__( + self, charm: CharmBase, relation_data: DatabaseProviderData, unique_key: str = "" + ): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + # Just to calm down pyright, it can't parse that the same type is being used in the super() call above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + # Emit a database requested event if the setup key (database name) + # was added to the relation databag, but the entity-type key was not. + if "database" in diff.added and "entity-type" not in diff.added: + getattr(self.on, "database_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (database name) + # was added to the relation databag, in addition to the entity-type key. + if "database" in diff.added and "entity-type" in diff.added: + getattr(self.on, "database_entity_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (database name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + "database" not in diff.added + and "entity-type" not in diff.added + and ("entity-permissions" in diff.added or "entity-permissions" in diff.changed) + ): + getattr(self.on, "database_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the secret has changed.""" + pass + + +class DatabaseProvides(DatabaseProviderData, DatabaseProviderEventHandlers): + """Provider-side of the database relations.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + DatabaseProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + DatabaseProviderEventHandlers.__init__(self, charm, self) + + +class DatabaseRequirerData(RequirerData): + """Requirer-side of the database relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + database_name: str, + extra_user_roles: Optional[str] = None, + relations_aliases: Optional[List[str]] = None, + additional_secret_fields: Optional[List[str]] = [], + external_node_connectivity: bool = False, + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + requested_entity_secret: Optional[str] = None, + requested_entity_name: Optional[str] = None, + requested_entity_password: Optional[str] = None, + prefix_matching: Optional[str] = None, + ): + """Manager of database client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + requested_entity_secret, + requested_entity_name, + requested_entity_password, + prefix_matching, + ) + self.database = database_name + self.relations_aliases = relations_aliases + self.external_node_connectivity = external_node_connectivity + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: optional relation index to check the database + (default: 0 - first relation). + + PostgreSQL only. + """ + # Psycopg 3 is imported locally to avoid the need of its package installation + # when relating to a database charm other than PostgreSQL. + import psycopg + + # Return False if no relation is established. + if len(self.relations) == 0: + return False + + relation_id = self.relations[relation_index].id + host = self.fetch_relation_field(relation_id, "endpoints") + + # Return False if there is no endpoint available. + if host is None: + return False + + host = host.split(":")[0] + + content = self.fetch_relation_data([relation_id], ["username", "password"]).get( + relation_id, {} + ) + user = content.get("username") + password = content.get("password") + + connection_string = ( + f"host='{host}' dbname='{self.database}' user='{user}' password='{password}'" + ) + try: + with psycopg.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + "SELECT TRUE FROM pg_extension WHERE extname=%s::text;", (plugin,) + ) + return cursor.fetchone() is not None + except psycopg.Error as e: + logger.exception( + f"failed to check whether {plugin} plugin is enabled in the database: %s", str(e) + ) + return False + + +class DatabaseRequirerEventHandlers(RequirerEventHandlers): + """Requires-side of the relation.""" + + on = DatabaseRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__( + self, charm: CharmBase, relation_data: DatabaseRequirerData, unique_key: str = "" + ): + """Manager of base client relations.""" + super().__init__(charm, relation_data, unique_key) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + # Define custom event names for each alias. + if self.relation_data.relations_aliases: + # Ensure the number of aliases does not exceed the maximum + # of connections allowed in the specific relation. + relation_connection_limit = self.charm.meta.requires[ + self.relation_data.relation_name + ].limit + if len(self.relation_data.relations_aliases) != relation_connection_limit: + raise ValueError( + f"The number of aliases must match the maximum number of connections allowed in the relation. " + f"Expected {relation_connection_limit}, got {len(self.relation_data.relations_aliases)}" + ) + + if self.relation_data.relations_aliases: + for relation_alias in self.relation_data.relations_aliases: + self.on.define_event( + f"{relation_alias}_database_created", + DatabaseCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_database_entity_created", + DatabaseEntityCreatedEvent, + ) + self.on.define_event( + f"{relation_alias}_endpoints_changed", + DatabaseEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_read_only_endpoints_changed", + DatabaseReadOnlyEndpointsChangedEvent, + ) + self.on.define_event( + f"{relation_alias}_prefix_databases_changed", + DatabasePrefixDatabasesChangedEvent, + ) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_data.relations_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_data.relation_name, relation_id) + if relation and relation.data[self.relation_data.local_unit].get("alias"): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_data.relations_aliases[:] + for relation in self.charm.model.relations[self.relation_data.relation_name]: + alias = relation.data[self.relation_data.local_unit].get("alias") + if alias: + logger.debug("Alias %s was already assigned to relation %d", alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_data.relation_name, relation_id) + if relation: + relation.data[self.relation_data.local_unit].update({"alias": available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if self.relation_data.local_unit.is_leader(): + self.relation_data.update_relation_data(relation_id, {"alias": available_aliases[0]}) + + def _emit_aliased_event(self, event: RelationChangedEvent, event_name: str) -> None: + """Emit an aliased event to a particular relation if it has an alias. + + Args: + event: the relation changed event that was received. + event_name: the name of the event to emit. + """ + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f"{alias}_{event_name}").emit( + event.relation, app=event.app, unit=event.unit + ) + + def _get_relation_alias(self, relation_id: int) -> Optional[str]: + """Returns the relation alias. + + Args: + relation_id: the identifier for a particular relation. + + Returns: + the relation alias or None if the relation was not found. + """ + for relation in self.charm.model.relations[self.relation_data.relation_name]: + if relation.id == relation_id: + return relation.data[self.relation_data.local_unit].get("alias") + return None + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + # Sets both database and extra user roles in the relation + # if the roles are provided. Otherwise, sets only the database. + if not self.relation_data.local_unit.is_leader(): + return + + event_data = {"database": self.relation_data.database} + + if self.relation_data.extra_user_roles: + event_data["extra-user-roles"] = self.relation_data.extra_user_roles + if self.relation_data.extra_group_roles: + event_data["extra-group-roles"] = self.relation_data.extra_group_roles + if self.relation_data.entity_type: + event_data["entity-type"] = self.relation_data.entity_type + if self.relation_data.entity_permissions: + event_data["entity-permissions"] = self.relation_data.entity_permissions + if self.relation_data.requested_entity_secret: + event_data["requested-entity-secret"] = self.relation_data.requested_entity_secret + if self.relation_data.prefix_matching: + event_data["prefix-matching"] = self.relation_data.prefix_matching + + # Create helper secret if needed + if ( + self.relation_data.requested_entity_name + and not self.relation_data.requested_entity_secret + ): + content = {"entity-name": self.relation_data.requested_entity_name} + if self.relation_data.requested_entity_password: + content["password"] = self.relation_data.requested_entity_password + secret = self.charm.app.add_secret( + content, label=f"{self.model.uuid}-{event.relation.id}-requested-entity" + ) + secret.grant(event.relation) + if not secret.id: + raise SecretError("Secret helper missing Id") + event_data["requested-entity-secret"] = secret.id + + # set external-node-connectivity field + if self.relation_data.external_node_connectivity: + event_data["external-node-connectivity"] = "true" + + self.relation_data.update_relation_data(event.relation.id, event_data) + + def _clear_helper_secret(self, event: RelationChangedEvent, app_databag: Dict) -> None: + """Remove helper secret if set.""" + if ( + self.relation_data.local_unit.is_leader() + and self.relation_data.requested_entity_name + and (secret_uri := app_databag.get("requested-entity-secret")) + ): + try: + secret = self.framework.model.get_secret(id=secret_uri) + secret.remove_all_revisions() + except ModelError: + logger.debug("Unable to remove helper secret") + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + super()._on_relation_changed_event(event) + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get("subordinated") == "true" + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get("state") != "ready": + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + app_databag = get_encoded_dict(event.relation, event.app, "data") + if app_databag is None: + app_databag = {} + + # Check if the database is created + # (the database charm shared the credentials). + if self._main_credentials_shared(diff) and "entity-type" not in app_databag: + # Emit the default event (the one without an alias). + logger.info("database created at %s", datetime.now()) + getattr(self.on, "database_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # Emit the aliased event (if any). + self._emit_aliased_event(event, "database_created") + self._clear_helper_secret(event, app_databag) + + # To avoid unnecessary application restarts do not trigger other events. + return + + if self._entity_credentials_shared(diff) and "entity-type" in app_databag: + # Emit the default event (the one without an alias). + logger.info("entity created at %s", datetime.now()) + getattr(self.on, "database_entity_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # Emit the aliased event (if any). + self._emit_aliased_event(event, "database_entity_created") + self._clear_helper_secret(event, app_databag) + + # To avoid unnecessary application restarts do not trigger other events. + return + + for key, event_name in [ + ("endpoints", "endpoints_changed"), + ("read-only-endpoints", "read_only_endpoints_changed"), + ("prefix-databases", "prefix_databases_changed"), + ]: + # Emit a change event if the key changed. + if key in diff.added or key in diff.changed: + # Emit the default event (the one without an alias). + logger.info("%s changed on %s", key, datetime.now()) + getattr(self.on, event_name).emit(event.relation, app=event.app, unit=event.unit) + + # Emit the aliased event (if any). + self._emit_aliased_event(event, event_name) + + # To avoid unnecessary application restarts do not trigger other events. + return + + +class DatabaseRequires(DatabaseRequirerData, DatabaseRequirerEventHandlers): + """Provider-side of the database relations.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + database_name: str, + extra_user_roles: Optional[str] = None, + relations_aliases: Optional[List[str]] = None, + additional_secret_fields: Optional[List[str]] = [], + external_node_connectivity: bool = False, + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + requested_entity_secret: Optional[str] = None, + requested_entity_name: Optional[str] = None, + requested_entity_password: Optional[str] = None, + prefix_matching: Optional[str] = None, + ): + DatabaseRequirerData.__init__( + self, + charm.model, + relation_name, + database_name, + extra_user_roles, + relations_aliases, + additional_secret_fields, + external_node_connectivity, + extra_group_roles, + entity_type, + entity_permissions, + requested_entity_secret, + requested_entity_name, + requested_entity_password, + prefix_matching, + ) + DatabaseRequirerEventHandlers.__init__(self, charm, self) + + +################################################################################ +# Charm-specific Relations Data and Events +################################################################################ + +# Kafka Events + + +class KafkaProvidesEvent(RelationEventWithSecret): + """Base class for Kafka events.""" + + @property + def topic(self) -> Optional[str]: + """Returns the topic that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("topic") + + @property + def consumer_group_prefix(self) -> Optional[str]: + """Returns the consumer-group-prefix that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("consumer-group-prefix") + + @property + def mtls_cert(self) -> Optional[str]: + """Returns TLS cert of the client.""" + if not self.relation.app: + return None + + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + secret_field = f"{PROV_SECRET_PREFIX}{SECRET_GROUPS.MTLS}" + if secret_uri := self.relation.data[self.app].get(secret_field): + secret = self.framework.model.get_secret(id=secret_uri) + content = secret.get_content(refresh=True) + if content: + return content.get("mtls-cert") + + +class KafkaClientMtlsCertUpdatedEvent(KafkaProvidesEvent): + """Event emitted when the mtls relation is updated.""" + + def __init__(self, handle, relation, old_mtls_cert: Optional[str] = None, app=None, unit=None): + super().__init__(handle, relation, app, unit) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class TopicRequestedEvent(KafkaProvidesEvent): + """Event emitted when a new topic is requested for use on this relation.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + +class TopicEntityRequestedEvent(KafkaProvidesEvent, EntityProvidesEvent): + """Event emitted when a new entity is requested for use on this relation.""" + + +class TopicEntityPermissionsChangedEvent(KafkaProvidesEvent, EntityProvidesEvent): + """Event emitted when existing entity permissions are changed on this relation.""" + + +class KafkaProvidesEvents(CharmEvents): + """Kafka events. + + This class defines the events that the Kafka can emit. + """ + + topic_requested = EventSource(TopicRequestedEvent) + topic_entity_requested = EventSource(TopicEntityRequestedEvent) + topic_entity_permissions_changed = EventSource(TopicEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(KafkaClientMtlsCertUpdatedEvent) + + +class KafkaRequiresEvent(RelationEvent): + """Base class for Kafka events.""" + + @property + def topic(self) -> Optional[str]: + """Returns the topic.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("topic") + + @property + def bootstrap_server(self) -> Optional[str]: + """Returns a comma-separated list of broker uris.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("endpoints") + + @property + def consumer_group_prefix(self) -> Optional[str]: + """Returns the consumer-group-prefix.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("consumer-group-prefix") + + @property + def zookeeper_uris(self) -> Optional[str]: + """Returns a comma separated list of Zookeeper uris.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("zookeeper-uris") + + +class TopicCreatedEvent(AuthenticationEvent, KafkaRequiresEvent): + """Event emitted when a new topic is created for use on this relation.""" + + +class TopicEntityCreatedEvent(EntityRequiresEvent, KafkaRequiresEvent): + """Event emitted when a new entity is created for use on this relation.""" + + +class BootstrapServerChangedEvent(AuthenticationEvent, KafkaRequiresEvent): + """Event emitted when the bootstrap server is changed.""" + + +class KafkaRequiresEvents(RequirerCharmEvents): + """Kafka events. + + This class defines the events that the Kafka can emit. + """ + + topic_created = EventSource(TopicCreatedEvent) + topic_entity_created = EventSource(TopicEntityCreatedEvent) + bootstrap_server_changed = EventSource(BootstrapServerChangedEvent) + + +# Kafka Provides and Requires + + +class KafkaProviderData(ProviderData): + """Provider-side of the Kafka relation.""" + + RESOURCE_FIELD = "topic" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_topic(self, relation_id: int, topic: str) -> None: + """Set topic name in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + topic: the topic name. + """ + self.update_relation_data(relation_id, {"topic": topic}) + + def set_bootstrap_server(self, relation_id: int, bootstrap_server: str) -> None: + """Set the bootstrap server in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + bootstrap_server: the bootstrap server address. + """ + self.update_relation_data(relation_id, {"endpoints": bootstrap_server}) + + def set_consumer_group_prefix(self, relation_id: int, consumer_group_prefix: str) -> None: + """Set the consumer group prefix in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + consumer_group_prefix: the consumer group prefix string. + """ + self.update_relation_data(relation_id, {"consumer-group-prefix": consumer_group_prefix}) + + def set_zookeeper_uris(self, relation_id: int, zookeeper_uris: str) -> None: + """Set the zookeeper uris in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + zookeeper_uris: comma-separated list of ZooKeeper server uris. + """ + self.update_relation_data(relation_id, {"zookeeper-uris": zookeeper_uris}) + + +class KafkaProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the Kafka relation.""" + + on = KafkaProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaProviderData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + + new_data_keys = list(event.relation.data[event.app].keys()) + if any(newval for newval in new_data_keys if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, new_data_keys) + + getattr(self.on, "mtls_cert_updated").emit(event.relation, app=event.app, unit=event.unit) + + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + # Emit a topic requested event if the setup key (topic name) + # was added to the relation databag, but the entity-type key was not. + if "topic" in diff.added and "entity-type" not in diff.added: + getattr(self.on, "topic_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (topic name) + # was added to the relation databag, in addition to the entity-type key. + if "topic" in diff.added and "entity-type" in diff.added: + getattr(self.on, "topic_entity_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (topic name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + "topic" not in diff.added + and "entity-type" not in diff.added + and ("entity-permissions" in diff.added or "entity-permissions" in diff.changed) + ): + getattr(self.on, "topic_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + # mtls-cert is the only secret that can be updated + logger.info("mtls-cert updated") + getattr(self.on, "mtls_cert_updated").emit( + relation, app=relation.app, unit=remote_unit, old_mtls_cert=old_mtls_cert + ) + + +class KafkaProvides(KafkaProviderData, KafkaProviderEventHandlers): + """Provider-side of the Kafka relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + KafkaProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + KafkaProviderEventHandlers.__init__(self, charm, self) + + +class KafkaRequirerData(RequirerData): + """Requirer-side of the Kafka relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + topic: str, + extra_user_roles: Optional[str] = None, + consumer_group_prefix: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + mtls_cert: Optional[str] = None, + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ): + """Manager of Kafka client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + self.topic = topic + self.consumer_group_prefix = consumer_group_prefix or "" + self.mtls_cert = mtls_cert + + @staticmethod + def is_topic_value_acceptable(topic_value: str) -> bool: + """Check whether the given Kafka topic value is acceptable.""" + return "*" not in topic_value[:3] + + @property + def topic(self): + """Topic to use in Kafka.""" + return self._topic + + @topic.setter + def topic(self, value): + if not self.is_topic_value_acceptable(value): + raise ValueError(f"Error on topic '{value}', unacceptable value.") + self._topic = value + + def set_mtls_cert(self, relation_id: int, mtls_cert: str) -> None: + """Set the mtls cert in the application relation databag / secret. + + Args: + relation_id: the identifier for a particular relation. + mtls_cert: mtls cert. + """ + self.update_relation_data(relation_id, {"mtls-cert": mtls_cert}) + + +class KafkaRequirerEventHandlers(RequirerEventHandlers): + """Requires-side of the Kafka relation.""" + + on = KafkaRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaRequirerData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Kafka relation is created.""" + super()._on_relation_created_event(event) + + if not self.relation_data.local_unit.is_leader(): + return + + # Sets topic, extra user roles, and "consumer-group-prefix" in the relation + relation_data = {"topic": self.relation_data.topic} + + if self.relation_data.mtls_cert: + relation_data["mtls-cert"] = self.relation_data.mtls_cert + + if self.relation_data.consumer_group_prefix: + relation_data["consumer-group-prefix"] = self.relation_data.consumer_group_prefix + + if self.relation_data.extra_user_roles: + relation_data["extra-user-roles"] = self.relation_data.extra_user_roles + if self.relation_data.extra_group_roles: + relation_data["extra-group-roles"] = self.relation_data.extra_group_roles + if self.relation_data.entity_type: + relation_data["entity-type"] = self.relation_data.entity_type + if self.relation_data.entity_permissions: + relation_data["entity-permissions"] = self.relation_data.entity_permissions + + self.relation_data.update_relation_data(event.relation.id, relation_data) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Kafka relation has changed.""" + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Check if the topic is created + # (the Kafka charm shared the credentials). + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + app_databag = get_encoded_dict(event.relation, event.app, "data") + if app_databag is None: + app_databag = {} + + if self._main_credentials_shared(diff) and "entity-type" not in app_databag: + # Emit the default event (the one without an alias). + logger.info("topic created at %s", datetime.now()) + getattr(self.on, "topic_created").emit(event.relation, app=event.app, unit=event.unit) + + # To avoid unnecessary application restarts do not trigger other events. + return + + if self._entity_credentials_shared(diff) and "entity-type" in app_databag: + # Emit the default event (the one without an alias). + logger.info("entity created at %s", datetime.now()) + getattr(self.on, "topic_entity_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an endpoints (bootstrap-server) changed event if the Kafka endpoints + # added or changed this info in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "bootstrap_server_changed").emit( + event.relation, app=event.app, unit=event.unit + ) # here check if this is the right design + + # To avoid unnecessary application restarts do not trigger other events. + return + + +class KafkaRequires(KafkaRequirerData, KafkaRequirerEventHandlers): + """Provider-side of the Kafka relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + topic: str, + extra_user_roles: Optional[str] = None, + consumer_group_prefix: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + mtls_cert: Optional[str] = None, + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ) -> None: + KafkaRequirerData.__init__( + self, + charm.model, + relation_name, + topic, + extra_user_roles=extra_user_roles, + consumer_group_prefix=consumer_group_prefix, + additional_secret_fields=additional_secret_fields, + mtls_cert=mtls_cert, + extra_group_roles=extra_group_roles, + entity_type=entity_type, + entity_permissions=entity_permissions, + ) + KafkaRequirerEventHandlers.__init__(self, charm, self) + + +# Karapace related events + + +class KarapaceProvidesEvent(RelationEvent): + """Base class for Karapace events.""" + + @property + def subject(self) -> Optional[str]: + """Returns the subject that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("subject") + + +class SubjectRequestedEvent(KarapaceProvidesEvent): + """Event emitted when a new subject is requested for use on this relation.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + +class SubjectEntityRequestedEvent(KarapaceProvidesEvent, EntityProvidesEvent): + """Event emitted when a new entity is requested for use on this relation.""" + + +class SubjectEntityPermissionsChangedEvent(KarapaceProvidesEvent, EntityProvidesEvent): + """Event emitted when existing entity permissions are changed on this relation.""" + + +class KarapaceProvidesEvents(CharmEvents): + """Karapace events. + + This class defines the events that the Karapace can emit. + """ + + subject_requested = EventSource(SubjectRequestedEvent) + subject_entity_requested = EventSource(SubjectEntityRequestedEvent) + subject_entity_permissions_changed = EventSource(SubjectEntityPermissionsChangedEvent) + + +class KarapaceRequiresEvent(RelationEvent): + """Base class for Karapace events.""" + + @property + def subject(self) -> Optional[str]: + """Returns the subject.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("subject") + + @property + def endpoints(self) -> Optional[str]: + """Returns a comma-separated list of broker uris.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("endpoints") + + +class SubjectAllowedEvent(AuthenticationEvent, KarapaceRequiresEvent): + """Event emitted when a new subject ACL is created for use on this relation.""" + + +class SubjectEntityCreatedEvent(EntityRequiresEvent, KarapaceRequiresEvent): + """Event emitted when a new entity is created for use on this relation.""" + + +class EndpointsChangedEvent(AuthenticationEvent, KarapaceRequiresEvent): + """Event emitted when the endpoints are changed.""" + + +class KarapaceRequiresEvents(RequirerCharmEvents): + """Karapace events. + + This class defines the events that Karapace can emit. + """ + + subject_allowed = EventSource(SubjectAllowedEvent) + subject_entity_created = EventSource(SubjectEntityCreatedEvent) + server_changed = EventSource(EndpointsChangedEvent) + + +# Karapace Provides and Requires + + +class KarapaceProviderData(ProviderData): + """Provider-side of the Karapace relation.""" + + RESOURCE_FIELD = "subject" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_subject(self, relation_id: int, subject: str) -> None: + """Set subject name in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + subject: the subject name. + """ + self.update_relation_data(relation_id, {"subject": subject}) + + def set_endpoint(self, relation_id: int, endpoint: str) -> None: + """Set the endpoint in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + endpoint: the server address. + """ + self.update_relation_data(relation_id, {"endpoints": endpoint}) + + +class KarapaceProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the Karapace relation.""" + + on = KarapaceProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KarapaceProviderData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + # Emit a subject requested event if the setup key (subject name) + # was added to the relation databag, but the entity-type key was not. + if "subject" in diff.added and "entity-type" not in diff.added: + getattr(self.on, "subject_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (subject name) + # was added to the relation databag, in addition to the entity-type key. + if "subject" in diff.added and "entity-type" in diff.added: + getattr(self.on, "subject_entity_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (subject name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + "subject" not in diff.added + and "entity-type" not in diff.added + and ("entity-permissions" in diff.added or "entity-permissions" in diff.changed) + ): + getattr(self.on, "subject_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + +class KarapaceProvides(KarapaceProviderData, KarapaceProviderEventHandlers): + """Provider-side of the Karapace relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + KarapaceProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + KarapaceProviderEventHandlers.__init__(self, charm, self) + + +class KarapaceRequirerData(RequirerData): + """Requirer-side of the Karapace relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + subject: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ): + """Manager of Karapace client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + self.subject = subject + + @property + def subject(self): + """Topic to use in Karapace.""" + return self._subject + + @subject.setter + def subject(self, value): + # Avoid wildcards + if value == "*": + raise ValueError(f"Error on subject '{value}', cannot be a wildcard.") + self._subject = value + + +class KarapaceRequirerEventHandlers(RequirerEventHandlers): + """Requires-side of the Karapace relation.""" + + on = KarapaceRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KarapaceRequirerData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Karapace relation is created.""" + super()._on_relation_created_event(event) + + if not self.relation_data.local_unit.is_leader(): + return + + # Sets subject and extra user roles + relation_data = {"subject": self.relation_data.subject} + + if self.relation_data.extra_user_roles: + relation_data["extra-user-roles"] = self.relation_data.extra_user_roles + if self.relation_data.extra_group_roles: + relation_data["extra-group-roles"] = self.relation_data.extra_group_roles + if self.relation_data.entity_type: + relation_data["entity-type"] = self.relation_data.entity_type + if self.relation_data.entity_permissions: + relation_data["entity-permissions"] = self.relation_data.entity_permissions + + self.relation_data.update_relation_data(event.relation.id, relation_data) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Karapace relation has changed.""" + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Check if the subject ACLs are created + # (the Karapace charm shared the credentials). + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + app_databag = get_encoded_dict(event.relation, event.app, "data") + if app_databag is None: + app_databag = {} + + if self._main_credentials_shared(diff) and "entity-type" not in app_databag: + # Emit the default event (the one without an alias). + logger.info("subject ACL created at %s", datetime.now()) + getattr(self.on, "subject_allowed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + if self._entity_credentials_shared(diff) and "entity-type" in app_databag: + # Emit the default event (the one without an alias). + logger.info("entity created at %s", datetime.now()) + getattr(self.on, "subject_entity_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an endpoints changed event if the Karapace endpoints added or changed + # this info in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "server_changed").emit( + event.relation, app=event.app, unit=event.unit + ) # here check if this is the right design + + # To avoid unnecessary application restarts do not trigger other events. + return + + +class KarapaceRequires(KarapaceRequirerData, KarapaceRequirerEventHandlers): + """Provider-side of the Karapace relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + subject: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ) -> None: + KarapaceRequirerData.__init__( + self, + charm.model, + relation_name, + subject, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + KarapaceRequirerEventHandlers.__init__(self, charm, self) + + +# Kafka Connect Events + + +class KafkaConnectProvidesEvent(RelationEvent): + """Base class for Kafka Connect Provider events.""" + + @property + def plugin_url(self) -> Optional[str]: + """Returns the REST endpoint URL which serves the connector plugin.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("plugin-url") + + +class IntegrationRequestedEvent(KafkaConnectProvidesEvent): + """Event emitted when a new integrator boots up and is ready to serve the connector plugin.""" + + +class KafkaConnectProvidesEvents(CharmEvents): + """Kafka Connect Provider Events.""" + + integration_requested = EventSource(IntegrationRequestedEvent) + + +class KafkaConnectRequiresEvent(AuthenticationEvent): + """Base class for Kafka Connect Requirer events.""" + + @property + def plugin_url(self) -> Optional[str]: + """Returns the REST endpoint URL which serves the connector plugin.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("plugin-url") + + +class IntegrationCreatedEvent(KafkaConnectRequiresEvent): + """Event emitted when the credentials are created for this integrator.""" + + +class IntegrationEndpointsChangedEvent(KafkaConnectRequiresEvent): + """Event emitted when Kafka Connect REST endpoints change.""" + + +class KafkaConnectRequiresEvents(RequirerCharmEvents): + """Kafka Connect Requirer Events.""" + + integration_created = EventSource(IntegrationCreatedEvent) + integration_endpoints_changed = EventSource(IntegrationEndpointsChangedEvent) + + +class KafkaConnectProviderData(ProviderData): + """Provider-side of the Kafka Connect relation.""" + + RESOURCE_FIELD = "plugin-url" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_endpoints(self, relation_id: int, endpoints: str) -> None: + """Sets REST endpoints of the Kafka Connect service.""" + self.update_relation_data(relation_id, {"endpoints": endpoints}) + + +class KafkaConnectProviderEventHandlers(EventHandlers): + """Provider-side implementation of the Kafka Connect event handlers.""" + + on = KafkaConnectProvidesEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaConnectProviderData) -> None: + super().__init__(charm, relation_data) + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + if "plugin-url" in diff.added: + getattr(self.on, "integration_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + +class KafkaConnectProvides(KafkaConnectProviderData, KafkaConnectProviderEventHandlers): + """Provider-side implementation of the Kafka Connect relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + KafkaConnectProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + KafkaConnectProviderEventHandlers.__init__(self, charm, self) + + +# Sentinel value passed from Kafka Connect requirer side when it does not need to serve any plugins. +PLUGIN_URL_NOT_REQUIRED: Final[str] = "NOT-REQUIRED" + + +class KafkaConnectRequirerData(RequirerData): + """Requirer-side of the Kafka Connect relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + plugin_url: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + ): + """Manager of Kafka client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles=extra_user_roles, + additional_secret_fields=additional_secret_fields, + ) + self.plugin_url = plugin_url + + @property + def plugin_url(self): + """The REST endpoint URL which serves the connector plugin.""" + return self._plugin_url + + @plugin_url.setter + def plugin_url(self, value): + self._plugin_url = value + + +class KafkaConnectRequirerEventHandlers(RequirerEventHandlers): + """Requirer-side of the Kafka Connect relation.""" + + on = KafkaConnectRequiresEvents() # pyright: ignore [reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: KafkaConnectRequirerData) -> None: + super().__init__(charm, relation_data) + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Kafka Connect relation is created.""" + super()._on_relation_created_event(event) + + if not self.relation_data.local_unit.is_leader(): + return + + relation_data = {"plugin-url": self.relation_data.plugin_url} + self.relation_data.update_relation_data(event.relation.id, relation_data) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + pass + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Kafka Connect relation has changed.""" + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + if self._main_credentials_shared(diff): + logger.info("integration created at %s", datetime.now()) + getattr(self.on, "integration_created").emit( + event.relation, app=event.app, unit=event.unit + ) + return + + # Emit an endpoints changed event if the provider added or + # changed this info in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "integration_endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + return + + +class KafkaConnectRequires(KafkaConnectRequirerData, KafkaConnectRequirerEventHandlers): + """Requirer-side implementation of the Kafka Connect relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + plugin_url: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + ) -> None: + KafkaConnectRequirerData.__init__( + self, + charm.model, + relation_name, + plugin_url, + extra_user_roles=extra_user_roles, + additional_secret_fields=additional_secret_fields, + ) + KafkaConnectRequirerEventHandlers.__init__(self, charm, self) + + +# Opensearch related events + + +class OpenSearchProvidesEvent(RelationEvent): + """Base class for OpenSearch events.""" + + @property + def index(self) -> Optional[str]: + """Returns the index that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("index") + + +class IndexRequestedEvent(OpenSearchProvidesEvent): + """Event emitted when a new index is requested for use on this relation.""" + + @property + def extra_user_roles(self) -> Optional[str]: + """Returns the extra user roles that were requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("extra-user-roles") + + +class IndexEntityRequestedEvent(OpenSearchProvidesEvent, EntityProvidesEvent): + """Event emitted when a new entity is requested for use on this relation.""" + + +class IndexEntityPermissionsChangedEvent(OpenSearchProvidesEvent, EntityProvidesEvent): + """Event emitted when existing entity permissions are changed on this relation.""" + + +class OpenSearchProvidesEvents(CharmEvents): + """OpenSearch events. + + This class defines the events that OpenSearch can emit. + """ + + index_requested = EventSource(IndexRequestedEvent) + index_entity_requested = EventSource(IndexEntityRequestedEvent) + index_entity_permissions_changed = EventSource(IndexEntityPermissionsChangedEvent) + + +class OpenSearchRequiresEvent(DatabaseRequiresEvent): + """Base class for OpenSearch requirer events.""" + + +class IndexCreatedEvent(AuthenticationEvent, OpenSearchRequiresEvent): + """Event emitted when a new index is created for use on this relation.""" + + +class IndexEntityCreatedEvent(EntityRequiresEvent, OpenSearchRequiresEvent): + """Event emitted when a new index is created for use on this relation.""" + + +class OpenSearchRequiresEvents(RequirerCharmEvents): + """OpenSearch events. + + This class defines the events that the opensearch requirer can emit. + """ + + index_created = EventSource(IndexCreatedEvent) + index_entity_created = EventSource(IndexEntityCreatedEvent) + endpoints_changed = EventSource(DatabaseEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationEvent) + + +# OpenSearch Provides and Requires Objects + + +class OpenSearchProvidesData(ProviderData): + """Provider-side of the OpenSearch relation.""" + + RESOURCE_FIELD = "index" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_index(self, relation_id: int, index: str) -> None: + """Set the index in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + index: the index as it is _created_ on the provider charm. This needn't match the + requested index, and can be used to present a different index name if, for example, + the requested index is invalid. + """ + self.update_relation_data(relation_id, {"index": index}) + + def set_endpoints(self, relation_id: int, endpoints: str) -> None: + """Set the endpoints in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + endpoints: the endpoint addresses for opensearch nodes. + """ + self.update_relation_data(relation_id, {"endpoints": endpoints}) + + def set_version(self, relation_id: int, version: str) -> None: + """Set the opensearch version in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + version: database version. + """ + self.update_relation_data(relation_id, {"version": version}) + + +class OpenSearchProvidesEventHandlers(ProviderEventHandlers): + """Provider-side of the OpenSearch relation.""" + + on = OpenSearchProvidesEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: OpenSearchProvidesData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + + # Leader only + if not self.relation_data.local_unit.is_leader(): + return + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + # Emit an index requested event if the setup key (index name) + # was added to the relation databag, but the entity-type key was not. + if "index" in diff.added and "entity-type" not in diff.added: + getattr(self.on, "index_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (index name) + # was added to the relation databag, in addition to the entity-type key. + if "index" in diff.added and "entity-type" in diff.added: + getattr(self.on, "index_entity_requested").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (index name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + "index" not in diff.added + and "entity-type" not in diff.added + and ("entity-permissions" in diff.added or "entity-permissions" in diff.changed) + ): + getattr(self.on, "index_entity_permissions_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + pass + + +class OpenSearchProvides(OpenSearchProvidesData, OpenSearchProvidesEventHandlers): + """Provider-side of the OpenSearch relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + OpenSearchProvidesData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + OpenSearchProvidesEventHandlers.__init__(self, charm, self) + + +class OpenSearchRequiresData(RequirerData): + """Requires data side of the OpenSearch relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + index: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ): + """Manager of OpenSearch client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + self.index = index + + +class OpenSearchRequiresEventHandlers(RequirerEventHandlers): + """Requires events side of the OpenSearch relation.""" + + on = OpenSearchRequiresEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: OpenSearchRequiresData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the OpenSearch relation is created.""" + super()._on_relation_created_event(event) + + if not self.relation_data.local_unit.is_leader(): + return + + # Sets both index and extra user roles in the relation if the roles are provided. + # Otherwise, sets only the index. + data = {"index": self.relation_data.index} + + if self.relation_data.extra_user_roles: + data["extra-user-roles"] = self.relation_data.extra_user_roles + if self.relation_data.extra_group_roles: + data["extra-group-roles"] = self.relation_data.extra_group_roles + if self.relation_data.entity_type: + data["entity-type"] = self.relation_data.entity_type + if self.relation_data.entity_permissions: + data["entity-permissions"] = self.relation_data.entity_permissions + + self.relation_data.update_relation_data(event.relation.id, data) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + logger.info("authentication updated") + getattr(self.on, "authentication_updated").emit( + relation, app=relation.app, unit=remote_unit + ) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the OpenSearch relation has changed. + + This event triggers individual custom events depending on the changing relation. + """ + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + secret_field_tls = self.relation_data._generate_secret_field_name(SECRET_GROUPS.TLS) + updates = {"username", "password", "tls", "tls-ca", secret_field_user, secret_field_tls} + if len(set(diff._asdict().keys()) - updates) < len(diff): + logger.info("authentication updated at: %s", datetime.now()) + getattr(self.on, "authentication_updated").emit( + event.relation, app=event.app, unit=event.unit + ) + + app_databag = get_encoded_dict(event.relation, event.app, "data") + if app_databag is None: + app_databag = {} + + # Check if the index is created + # (the OpenSearch charm shares the credentials). + if self._main_credentials_shared(diff) and "entity-type" not in app_databag: + # Emit the default event (the one without an alias). + logger.info("index created at: %s", datetime.now()) + getattr(self.on, "index_created").emit(event.relation, app=event.app, unit=event.unit) + + # To avoid unnecessary application restarts do not trigger other events. + return + + if self._entity_credentials_shared(diff) and "entity-type" in app_databag: + # Emit the default event (the one without an alias). + logger.info("entity created at: %s", datetime.now()) + getattr(self.on, "index_entity_created").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a endpoints changed event if the OpenSearch application + # added or changed this info in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + # To avoid unnecessary application restarts do not trigger other events. + return + + +class OpenSearchRequires(OpenSearchRequiresData, OpenSearchRequiresEventHandlers): + """Requires-side of the OpenSearch relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + index: str, + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ) -> None: + OpenSearchRequiresData.__init__( + self, + charm.model, + relation_name, + index, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + OpenSearchRequiresEventHandlers.__init__(self, charm, self) + + +# Etcd related events + + +class EtcdProviderEvent(RelationEventWithSecret): + """Base class for Etcd events.""" + + @property + def prefix(self) -> Optional[str]: + """Returns the index that was requested.""" + if not self.relation.app: + return None + + return self.relation.data[self.relation.app].get("prefix") + + @property + def mtls_cert(self) -> Optional[str]: + """Returns TLS cert of the client.""" + if not self.relation.app: + return None + + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + secret_field = f"{PROV_SECRET_PREFIX}{SECRET_GROUPS.MTLS}" + if secret_uri := self.relation.data[self.app].get(secret_field): + secret = self.framework.model.get_secret(id=secret_uri) + content = secret.get_content(refresh=True) + if content: + return content.get("mtls-cert") + + +class MTLSCertUpdatedEvent(EtcdProviderEvent): + """Event emitted when the mtls relation is updated.""" + + def __init__(self, handle, relation, old_mtls_cert: Optional[str] = None, app=None, unit=None): + super().__init__(handle, relation, app, unit) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {"old_mtls_cert": self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot["old_mtls_cert"] + + +class EtcdProviderEvents(CharmEvents): + """Etcd events. + + This class defines the events that Etcd can emit. + """ + + mtls_cert_updated = EventSource(MTLSCertUpdatedEvent) + + +class EtcdReadyEvent(AuthenticationEvent, DatabaseRequiresEvent): + """Event emitted when the etcd relation is ready to be consumed.""" + + +class EtcdRequirerEvents(RequirerCharmEvents): + """Etcd events. + + This class defines the events that the etcd requirer can emit. + """ + + endpoints_changed = EventSource(DatabaseEndpointsChangedEvent) + etcd_ready = EventSource(EtcdReadyEvent) + + +# Etcd Provides and Requires Objects + + +class EtcdProviderData(ProviderData): + """Provider-side of the Etcd relation.""" + + RESOURCE_FIELD = "prefix" + + def __init__( + self, model: Model, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + super().__init__(model, relation_name, status_schema_path=status_schema_path) + + def set_uris(self, relation_id: int, uris: str) -> None: + """Set the database connection URIs in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + uris: connection URIs. + """ + self.update_relation_data(relation_id, {"uris": uris}) + + def set_endpoints(self, relation_id: int, endpoints: str) -> None: + """Set the endpoints in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + endpoints: the endpoint addresses for etcd nodes "ip:port" format. + """ + self.update_relation_data(relation_id, {"endpoints": endpoints}) + + def set_version(self, relation_id: int, version: str) -> None: + """Set the etcd version in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + version: etcd API version. + """ + self.update_relation_data(relation_id, {"version": version}) + + def set_tls_ca(self, relation_id: int, tls_ca: str) -> None: + """Set the TLS CA in the application relation databag. + + Args: + relation_id: the identifier for a particular relation. + tls_ca: TLS certification authority. + """ + self.update_relation_data(relation_id, {"tls-ca": tls_ca, "tls": "True"}) + + +class EtcdProviderEventHandlers(ProviderEventHandlers): + """Provider-side of the Etcd relation.""" + + on = EtcdProviderEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: EtcdProviderData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation has changed.""" + super()._on_relation_changed_event(event) + # register all new secrets with their labels + new_data_keys = list(event.relation.data[event.app].keys()) + if any(newval for newval in new_data_keys if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, new_data_keys) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + + # Validate entity information is not dynamically changed + self._validate_entity_consistency(event, diff) + + getattr(self.on, "mtls_cert_updated").emit(event.relation, app=event.app, unit=event.unit) + return + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + old_mtls_cert = event.secret.get_content().get("mtls-cert") + # mtls-cert is the only secret that can be updated + logger.info("mtls-cert updated") + getattr(self.on, "mtls_cert_updated").emit( + relation, app=relation.app, unit=remote_unit, old_mtls_cert=old_mtls_cert + ) + + +class EtcdProvides(EtcdProviderData, EtcdProviderEventHandlers): + """Provider-side of the Etcd relation.""" + + def __init__( + self, charm: CharmBase, relation_name: str, status_schema_path: OptionalPathLike = None + ) -> None: + EtcdProviderData.__init__( + self, charm.model, relation_name, status_schema_path=status_schema_path + ) + EtcdProviderEventHandlers.__init__(self, charm, self) + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") + + +class EtcdRequirerData(RequirerData): + """Requires data side of the Etcd relation.""" + + def __init__( + self, + model: Model, + relation_name: str, + prefix: str, + mtls_cert: Optional[str], + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ): + """Manager of Etcd client relations.""" + super().__init__( + model, + relation_name, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + self.prefix = prefix + self.mtls_cert = mtls_cert + + def set_mtls_cert(self, relation_id: int, mtls_cert: str) -> None: + """Set the mtls cert in the application relation databag / secret. + + Args: + relation_id: the identifier for a particular relation. + mtls_cert: mtls cert. + """ + self.update_relation_data(relation_id, {"mtls-cert": mtls_cert}) + + +class EtcdRequirerEventHandlers(RequirerEventHandlers): + """Requires events side of the Etcd relation.""" + + on = EtcdRequirerEvents() # pyright: ignore[reportAssignmentType] + + def __init__(self, charm: CharmBase, relation_data: EtcdRequirerData) -> None: + super().__init__(charm, relation_data) + # Just to keep lint quiet, can't resolve inheritance. The same happened in super().__init__() above + self.relation_data = relation_data + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the Etcd relation is created.""" + super()._on_relation_created_event(event) + + payload = { + "prefix": self.relation_data.prefix, + } + if self.relation_data.mtls_cert: + payload["mtls-cert"] = self.relation_data.mtls_cert + + self.relation_data.update_relation_data( + event.relation.id, + payload, + ) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the Etcd relation has changed. + + This event triggers individual custom events depending on the changing relation. + """ + super()._on_relation_changed_event(event) + + # Check which data has changed to emit customs events. + diff = self._diff(event) + # Register all new secrets with their labels + if any(newval for newval in diff.added if self.relation_data._is_secret_field(newval)): + self.relation_data._register_secrets_to_relation(event.relation, diff.added) + + secret_field_user = self.relation_data._generate_secret_field_name(SECRET_GROUPS.USER) + secret_field_tls = self.relation_data._generate_secret_field_name(SECRET_GROUPS.TLS) + + # Emit a endpoints changed event if the etcd application added or changed this info + # in the relation databag. + if "endpoints" in diff.added or "endpoints" in diff.changed: + # Emit the default event (the one without an alias). + logger.info("endpoints changed on %s", datetime.now()) + getattr(self.on, "endpoints_changed").emit( + event.relation, app=event.app, unit=event.unit + ) + + if ( + secret_field_tls in diff.added + or secret_field_tls in diff.changed + or secret_field_user in diff.added + or secret_field_user in diff.changed + or "username" in diff.added + or "username" in diff.changed + ): + # Emit the default event (the one without an alias). + logger.info("etcd ready on %s", datetime.now()) + getattr(self.on, "etcd_ready").emit(event.relation, app=event.app, unit=event.unit) + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + + relation = self.relation_data._relation_from_secret_label(event.secret.label) + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.app == self.charm.app: + logging.info("Secret changed event ignored for Secret Owner") + + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + + # secret-user or secret-tls updated + logger.info("etcd_ready updated") + getattr(self.on, "etcd_ready").emit(relation, app=relation.app, unit=remote_unit) + + +class EtcdRequires(EtcdRequirerData, EtcdRequirerEventHandlers): + """Requires-side of the Etcd relation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + prefix: str, + mtls_cert: Optional[str], + extra_user_roles: Optional[str] = None, + additional_secret_fields: Optional[List[str]] = [], + extra_group_roles: Optional[str] = None, + entity_type: Optional[str] = None, + entity_permissions: Optional[str] = None, + ) -> None: + EtcdRequirerData.__init__( + self, + charm.model, + relation_name, + prefix, + mtls_cert, + extra_user_roles, + additional_secret_fields, + extra_group_roles, + entity_type, + entity_permissions, + ) + EtcdRequirerEventHandlers.__init__(self, charm, self) + if not self.secrets_enabled: + raise SecretsUnavailableError("Secrets unavailable on current Juju version") diff --git a/haproxy-route-policy-operator/pyproject.toml b/haproxy-route-policy-operator/pyproject.toml index 3f0ddb443..cca4d5cf5 100644 --- a/haproxy-route-policy-operator/pyproject.toml +++ b/haproxy-route-policy-operator/pyproject.toml @@ -17,6 +17,7 @@ dependencies = [ "ops==3.5.2", "requests==2.32.5", "charmlibs-snap==1.0.1", + "pydantic>=2.12.5", ] [dependency-groups] diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 691b45649..ca6fee239 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -10,13 +10,22 @@ from typing import Any import ops -from charmlibs import snap as snap_lib - -import snap +from charmlibs.snap import SnapError +from charms.data_platform_libs.v0.data_interfaces import ( + DatabaseRequires, +) + +from policy import configure_snap, install_snap, run_migrations, start_gunicorn_service +from state.database import ( + DatabaseInformation, + DatabaseRelationMissingError, + DatabaseRelationNotReadyError, +) logger = logging.getLogger(__name__) -POSTGRESQL_RELATION = "postgresql" +DATABASE_RELATION = "database" +HAPROXY_ROUTE_POLICY_PORT = 8080 class HaproxyRoutePolicyCharm(ops.CharmBase): @@ -25,76 +34,45 @@ class HaproxyRoutePolicyCharm(ops.CharmBase): def __init__(self, *args: Any): super().__init__(*args) - self.framework.observe(self.on.install, self._install) - self.framework.observe(self.on.upgrade_charm, self._install) + self.framework.observe(self.on.install, self._reconcile) + self.framework.observe(self.on.upgrade_charm, self._reconcile) self.framework.observe(self.on.start, self._reconcile) self.framework.observe(self.on.config_changed, self._reconcile) - self.framework.observe(self.on[POSTGRESQL_RELATION].relation_joined, self._reconcile) - self.framework.observe(self.on[POSTGRESQL_RELATION].relation_changed, self._reconcile) - self.framework.observe(self.on[POSTGRESQL_RELATION].relation_broken, self._reconcile) - self.unit.open_port("tcp", 8080) - - def _install(self, _: ops.EventBase) -> None: - """Install the route-policy snap.""" - self.unit.status = ops.MaintenanceStatus("installing haproxy-route-policy snap") - try: - snap.install_snap() - except snap_lib.SnapError as exc: - logger.exception("Failed to install haproxy-route-policy snap") - self.unit.status = ops.BlockedStatus(f"snap installation failed: {exc}") - return - self._reconcile(_) + self.database = DatabaseRequires( + self, + relation_name=DATABASE_RELATION, + database_name=self.app.name, + extra_user_roles="SUPERUSER", + ) + self.framework.observe(self.database.on.database_created, self._reconcile) def _reconcile(self, _: ops.EventBase) -> None: """Reconcile snap configuration and service state.""" - credentials = self._get_postgresql_credentials() - if not credentials: - self.unit.status = ops.WaitingStatus("waiting for postgresql relation data") - return - try: + install_snap() self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") - snap.configure_snap(credentials) + database_information = DatabaseInformation.from_requirer(self, self.database) + configure_snap(database_information.haproxy_route_policy_snap_configuration) self.unit.status = ops.MaintenanceStatus("running database migrations") - snap.run_migrations() + run_migrations() self.unit.status = ops.MaintenanceStatus("starting gunicorn service") - snap.start_gunicorn_service() - except (snap_lib.SnapError, subprocess.CalledProcessError) as exc: + start_gunicorn_service() + self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) + except (SnapError, subprocess.CalledProcessError) as exc: logger.exception("Failed to reconcile haproxy-route-policy service") self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") return + except DatabaseRelationMissingError: + self.unit.status = ops.BlockedStatus("Missing database relation.") + return + except DatabaseRelationNotReadyError: + logger.exception("Database relation not ready") + self.unit.status = ops.WaitingStatus("waiting for complete database relation.") + return self.unit.status = ops.ActiveStatus() - def _get_postgresql_credentials(self) -> dict[str, str] | None: - """Read PostgreSQL credentials from relation databag.""" - relation = self.model.get_relation(POSTGRESQL_RELATION) - if relation is None or relation.app is None: - return None - - relation_data = relation.data[relation.app] - endpoints = relation_data.get("endpoints") - database = relation_data.get("database") - username = relation_data.get("username") - password = relation_data.get("password") - - if not all([endpoints, database, username, password]): - return None - - endpoint = str(endpoints).split(",")[0].strip() - host, _, port = endpoint.partition(":") - if not port: - port = "5432" - - return { - "database-host": host, - "database-port": port, - "database-user": str(username), - "database-password": str(password), - "database-name": str(database), - } - if __name__ == "__main__": # pragma: nocover ops.main(HaproxyRoutePolicyCharm) diff --git a/haproxy-route-policy-operator/src/snap.py b/haproxy-route-policy-operator/src/policy.py similarity index 78% rename from haproxy-route-policy-operator/src/snap.py rename to haproxy-route-policy-operator/src/policy.py index dd8e16e6e..f9b38f6e9 100644 --- a/haproxy-route-policy-operator/src/snap.py +++ b/haproxy-route-policy-operator/src/policy.py @@ -20,16 +20,10 @@ def install_snap(channel: str = "latest/edge") -> None: package.ensure(snap.SnapState.Latest, channel=channel) -def configure_snap(config: dict[str, str | bool]) -> None: +def configure_snap(config: dict[str, Any]) -> None: """Apply snap configuration if any value changed.""" package = snap.SnapCache()[SNAP_NAME] - existing = package.get(None, typed=True) - to_set: dict[str, Any] = {} - for key, value in config.items(): - if existing.get(key) != value: - to_set[key] = value - if to_set: - package.set(to_set, typed=True) + package.set(config, typed=True) def run_migrations() -> None: diff --git a/haproxy-route-policy-operator/src/state/database.py b/haproxy-route-policy-operator/src/state/database.py new file mode 100644 index 000000000..9fb6bb589 --- /dev/null +++ b/haproxy-route-policy-operator/src/state/database.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Charm state for database information.""" + +import ops +from charms.data_platform_libs.v0.data_interfaces import DatabaseRequires +from pydantic import Field +from pydantic.dataclasses import dataclass + +DATABASE_RELATION = "database" + + +class DatabaseRelationMissingError(Exception): + """Raised when the database relation is missing.""" + + +class DatabaseRelationNotReadyError(Exception): + """Raised when the database relation is not ready.""" + + +@dataclass +class DatabaseInformation: + """Charm state for database information. + + Attributes: + username: Database username. + host: Database host. + port: Database port. + password: Database password. + database_name: Database name. + """ + + username: str = Field() + host: str = Field() + port: int = Field(gt=1, lt=65536) + password: str = Field() + database_name: str = Field() + + @property + def haproxy_route_policy_snap_configuration(self) -> dict[str, str]: + """Return snap configuration keys and values.""" + return { + "db-host": self.host, + "db-port": str(self.port), + "db-user": self.username, + "db-password": self.password, + "db-name": self.database_name, + } + + @classmethod + def from_requirer( + cls, charm: ops.CharmBase, database: DatabaseRequires + ) -> "DatabaseInformation": + """Create a DatabaseInformation charm state. + + Returns: + DatabaseInformation: The database information. + + Raises: + DatabaseRelationMissingError: If the database relation is missing. + DatabaseRelationNotReadyError: If the database relation is not ready. + """ + relation = charm.model.get_relation(database.relation_name) + if relation is None: + raise DatabaseRelationMissingError("Database relation not found.") + + relation_data = database.fetch_relation_data()[relation.id] + endpoint = relation_data.get("endpoints") + username = relation_data.get("username") + password = relation_data.get("password") + + if endpoint is None or username is None or password is None: + raise DatabaseRelationNotReadyError("Incomplete database relation data.") + host, _, port = endpoint.partition(":") + if not port: + port = "5432" + + return cls( + username=username, + password=password, + database_name=charm.app.name, + host=host, + port=int(port), + ) diff --git a/haproxy-route-policy-operator/uv.lock b/haproxy-route-policy-operator/uv.lock index 6ea5a1dad..7a47060b2 100644 --- a/haproxy-route-policy-operator/uv.lock +++ b/haproxy-route-policy-operator/uv.lock @@ -2,6 +2,15 @@ version = 1 revision = 3 requires-python = ">=3.12" +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + [[package]] name = "asttokens" version = "3.0.1" @@ -452,6 +461,7 @@ source = { virtual = "." } dependencies = [ { name = "charmlibs-snap" }, { name = "ops" }, + { name = "pydantic" }, { name = "requests" }, ] @@ -491,6 +501,7 @@ unit = [ requires-dist = [ { name = "charmlibs-snap", specifier = "==1.0.1" }, { name = "ops", specifier = "==3.5.2" }, + { name = "pydantic", specifier = ">=2.12.5" }, { name = "requests", specifier = "==2.32.5" }, ] @@ -1107,6 +1118,92 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +] + [[package]] name = "pygments" version = "2.20.0" @@ -1447,6 +1544,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, ] +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + [[package]] name = "urllib3" version = "2.6.3" From fa8b704d6a341fe03a42f93eecf7576eb39f8aeb Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 17:43:45 +0200 Subject: [PATCH 120/201] fix unit and reformat tox --- .../tests/unit/test_charm.py | 28 ++++---- haproxy-route-policy-operator/tox.toml | 68 +++++++++++++++---- 2 files changed, 70 insertions(+), 26 deletions(-) diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index 02fafa6bf..f767abbc1 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -10,10 +10,10 @@ from charm import HaproxyRoutePolicyCharm -def _postgresql_relation() -> testing.Relation: - """Build a postgresql relation carrying complete credentials.""" +def _database_relation() -> testing.Relation: + """Build a database relation carrying complete credentials.""" return testing.Relation( - "postgresql", + "database", remote_app_data={ "endpoints": "10.0.0.10:5432", "database": "haproxy_route_policy", @@ -25,37 +25,39 @@ def _postgresql_relation() -> testing.Relation: def test_install_without_relation_sets_waiting_status(): """ - arrange: create charm context without postgresql relation. + arrange: create charm context without database relation. act: run install event. - assert: snap install is invoked and unit waits for postgresql relation data. + assert: snap install is invoked and unit waits for database relation data. """ ctx = testing.Context(HaproxyRoutePolicyCharm) state = testing.State() - with patch("charm.snap.install_snap") as install_snap_mock: + with patch("charm.install_snap") as install_snap_mock: out = ctx.run(ctx.on.install(), state) install_snap_mock.assert_called_once_with() - assert isinstance(out.unit_status, testing.WaitingStatus) + assert isinstance(out.unit_status, testing.BlockedStatus) -def test_config_changed_reconciles_snap_with_postgresql_credentials(): +def test_config_changed_reconciles_snap_with_database_credentials(): """ - arrange: create charm context with valid postgresql relation credentials. + arrange: create charm context with valid database relation credentials. act: run config-changed event. assert: snap is configured, migrations run, and service is started. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_postgresql_relation()]) + state = testing.State(relations=[_database_relation()]) with ( - patch("charm.snap.configure_snap") as configure_mock, - patch("charm.snap.run_migrations") as migrate_mock, - patch("charm.snap.start_gunicorn_service") as start_mock, + patch("charm.install_snap") as install_snap_mock, + patch("charm.configure_snap") as configure_mock, + patch("charm.run_migrations") as migrate_mock, + patch("charm.start_gunicorn_service") as start_mock, ): out = ctx.run(ctx.on.config_changed(), state) assert out.unit_status == testing.ActiveStatus() + install_snap_mock.assert_called_once() configure_mock.assert_called_once() migrate_mock.assert_called_once() start_mock.assert_called_once() diff --git a/haproxy-route-policy-operator/tox.toml b/haproxy-route-policy-operator/tox.toml index 8bce38a1c..fb9c16a7b 100644 --- a/haproxy-route-policy-operator/tox.toml +++ b/haproxy-route-policy-operator/tox.toml @@ -3,11 +3,11 @@ skipsdist = true skip_missing_interpreters = true -requires = [ "tox>=4.21" ] +requires = ["tox>=4.21"] no_package = true [env_run_base] -passenv = [ "PYTHONPATH", "CHARM_BUILD_DIR", "MODEL_SETTINGS" ] +passenv = ["PYTHONPATH", "CHARM_BUILD_DIR", "MODEL_SETTINGS"] runner = "uv-venv-lock-runner" [env_run_base.setenv] @@ -18,27 +18,69 @@ PY_COLORS = "1" [env.fmt] description = "Apply coding style standards to code" commands = [ - [ "ruff", "check", "--fix", "--select", "I", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], - [ "ruff", "format", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], + [ + "ruff", + "check", + "--fix", + "--select", + "I", + { replace = "ref", of = [ + "vars", + "all_path", + ], extend = true }, + ], + [ + "ruff", + "format", + { replace = "ref", of = [ + "vars", + "all_path", + ], extend = true }, + ], ] -dependency_groups = [ "fmt" ] +dependency_groups = ["fmt"] [env.lint] description = "Check code against coding style standards" commands = [ - [ "codespell", "{toxinidir}" ], - [ "ruff", "format", "--check", "--diff", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], - [ "ruff", "check", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], - [ "mypy", { replace = "ref", of = [ "vars", "all_path" ], extend = true } ], + [ + "codespell", + "{toxinidir}", + ], + [ + "ruff", + "format", + "--check", + "--diff", + { replace = "ref", of = [ + "vars", + "all_path", + ], extend = true }, + ], + [ + "ruff", + "check", + { replace = "ref", of = [ + "vars", + "all_path", + ], extend = true }, + ], + [ + "mypy", + { replace = "ref", of = [ + "vars", + "all_path", + ], extend = true }, + ], ] -dependency_groups = [ "lint" ] +dependency_groups = ["lint"] [env.unit] description = "Run unit tests" -commands = [ [ "pytest", "-v", "{[vars]tst_path}unit" ] ] -dependency_groups = [ "unit" ] +commands = [["pytest", "-v", "{[vars]tst_path}unit"]] +dependency_groups = ["unit"] [vars] src_path = "{toxinidir}/src/" tst_path = "{toxinidir}/tests/" -all_path = [ "{toxinidir}/src", "{toxinidir}/tests" ] +all_path = ["{toxinidir}/src", "{toxinidir}/tests"] From c20eb2450ba8e9697a1e41aadc6b7f4807af58a9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 20:43:41 +0200 Subject: [PATCH 121/201] update snap config dict --- haproxy-route-policy-operator/src/state/database.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/haproxy-route-policy-operator/src/state/database.py b/haproxy-route-policy-operator/src/state/database.py index 9fb6bb589..b62e2eca3 100644 --- a/haproxy-route-policy-operator/src/state/database.py +++ b/haproxy-route-policy-operator/src/state/database.py @@ -43,11 +43,11 @@ class DatabaseInformation: def haproxy_route_policy_snap_configuration(self) -> dict[str, str]: """Return snap configuration keys and values.""" return { - "db-host": self.host, - "db-port": str(self.port), - "db-user": self.username, - "db-password": self.password, - "db-name": self.database_name, + "database-host": self.host, + "database-port": str(self.port), + "database-user": self.username, + "database-password": self.password, + "database-name": self.database_name, } @classmethod From 62823a966657741c1058c9918d6f9767f1ad8370 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 21:16:46 +0200 Subject: [PATCH 122/201] update tox config and add integration test --- haproxy-route-policy-operator/src/charm.py | 11 +++- haproxy-route-policy-operator/src/policy.py | 24 +++++-- .../tests/conftest.py | 13 ++++ .../tests/integration/conftest.py | 60 +++++++++++++++++ .../tests/integration/test_charm.py | 29 ++++++++ .../tests/unit/test_charm.py | 3 +- haproxy-route-policy-operator/tox.toml | 66 +++++++++++++++++-- 7 files changed, 189 insertions(+), 17 deletions(-) create mode 100644 haproxy-route-policy-operator/tests/conftest.py create mode 100644 haproxy-route-policy-operator/tests/integration/conftest.py create mode 100644 haproxy-route-policy-operator/tests/integration/test_charm.py diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index ca6fee239..b60958276 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -6,7 +6,6 @@ """haproxy-route-policy-operator charm.""" import logging -import subprocess from typing import Any import ops @@ -15,7 +14,13 @@ DatabaseRequires, ) -from policy import configure_snap, install_snap, run_migrations, start_gunicorn_service +from policy import ( + HaproxyRoutePolicyDatabaseMigrationError, + configure_snap, + install_snap, + run_migrations, + start_gunicorn_service, +) from state.database import ( DatabaseInformation, DatabaseRelationMissingError, @@ -59,7 +64,7 @@ def _reconcile(self, _: ops.EventBase) -> None: self.unit.status = ops.MaintenanceStatus("starting gunicorn service") start_gunicorn_service() self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) - except (SnapError, subprocess.CalledProcessError) as exc: + except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: logger.exception("Failed to reconcile haproxy-route-policy service") self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") return diff --git a/haproxy-route-policy-operator/src/policy.py b/haproxy-route-policy-operator/src/policy.py index f9b38f6e9..5343c135f 100644 --- a/haproxy-route-policy-operator/src/policy.py +++ b/haproxy-route-policy-operator/src/policy.py @@ -5,12 +5,18 @@ from __future__ import annotations +import logging import subprocess # nosec from typing import Any from charmlibs import snap SNAP_NAME = "haproxy-route-policy" +logger = logging.getLogger(__name__) + + +class HaproxyRoutePolicyDatabaseMigrationError(Exception): + """Raised when database migrations fail.""" def install_snap(channel: str = "latest/edge") -> None: @@ -28,13 +34,17 @@ def configure_snap(config: dict[str, Any]) -> None: def run_migrations() -> None: """Run first-time and subsequent database migrations.""" - subprocess.run( # nosec - [f"{SNAP_NAME}.manage", "migrate", "--noinput"], - check=True, - encoding="utf-8", - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) + try: + subprocess.run( # nosec + [f"{SNAP_NAME}.manage", "migrate", "--noinput"], + check=True, + encoding="utf-8", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as e: + logger.error(f"Error running migrations: {e.output}") + raise HaproxyRoutePolicyDatabaseMigrationError("Database migrations failed") from e def start_gunicorn_service() -> None: diff --git a/haproxy-route-policy-operator/tests/conftest.py b/haproxy-route-policy-operator/tests/conftest.py new file mode 100644 index 000000000..82841ef1d --- /dev/null +++ b/haproxy-route-policy-operator/tests/conftest.py @@ -0,0 +1,13 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Fixtures for charm tests.""" + + +def pytest_addoption(parser): + """Parse additional pytest options. + + Args: + parser: Pytest parser. + """ + parser.addoption("--charm-file", action="store") diff --git a/haproxy-route-policy-operator/tests/integration/conftest.py b/haproxy-route-policy-operator/tests/integration/conftest.py new file mode 100644 index 000000000..8fe147b56 --- /dev/null +++ b/haproxy-route-policy-operator/tests/integration/conftest.py @@ -0,0 +1,60 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Fixtures for haproxy-route-policy charm integration tests.""" + +import pathlib +import typing + +import jubilant +import pytest +import yaml + +JUJU_WAIT_TIMEOUT = 10 * 60 # 10 minutes + + +@pytest.fixture(scope="session", name="charm") +def charm_fixture(pytestconfig: pytest.Config): + """Pytest fixture that returns the --charm-file.""" + charm = pytestconfig.getoption("--charm-file") + assert charm, "--charm-file must be set" + return charm + + +@pytest.fixture(scope="module", name="juju") +def juju_fixture(request: pytest.FixtureRequest): + """Pytest fixture that wraps :meth:`jubilant.with_model`.""" + model = request.config.getoption("--model") + if model: + juju = jubilant.Juju(model=model) + juju.wait_timeout = JUJU_WAIT_TIMEOUT + yield juju + return + + keep_models = typing.cast(bool, request.config.getoption("--keep-models")) + with jubilant.temp_model(keep=keep_models) as juju: + juju.wait_timeout = JUJU_WAIT_TIMEOUT + yield juju + + +@pytest.fixture(scope="module", name="application") +def application_fixture(pytestconfig: pytest.Config, juju: jubilant.Juju, charm: str): + """Deploy the haproxy-route-policy application. + + Args: + juju: Jubilant juju fixture. + charm: Path to the packed charm. + + Returns: + The haproxy-route-policy app name. + """ + metadata = yaml.safe_load(pathlib.Path("./charmcraft.yaml").read_text(encoding="UTF-8")) + app_name = metadata["name"] + if pytestconfig.getoption("--no-deploy") and app_name in juju.status().apps: + return app_name + juju.deploy( + charm=charm, + app=app_name, + base="ubuntu@24.04", + ) + return app_name diff --git a/haproxy-route-policy-operator/tests/integration/test_charm.py b/haproxy-route-policy-operator/tests/integration/test_charm.py new file mode 100644 index 000000000..38a5ab2c1 --- /dev/null +++ b/haproxy-route-policy-operator/tests/integration/test_charm.py @@ -0,0 +1,29 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Basic integration tests for the haproxy-route-policy charm.""" + +import jubilant +import pytest + + +@pytest.mark.abort_on_fail +def test_charm_becomes_active_after_relation_with_postgresql( + application: str, juju: jubilant.Juju +): + """Test blocked->active transition after integrating with PostgreSQL. + + Args: + application: The deployed haproxy-route-policy application name. + juju: The Juju instance. + + Assert: + The charm is blocked before relation and active after relating with PostgreSQL. + """ + postgresql_app = "postgresql" + juju.deploy("postgresql", app=postgresql_app, channel="16/edge", base="ubuntu@24.04") + + juju.wait(lambda status: jubilant.all_blocked(status, application)) + + juju.integrate(f"{application}:database", f"{postgresql_app}:database") + juju.wait(lambda status: jubilant.all_active(status, application, postgresql_app)) diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index f767abbc1..87f66bcea 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -18,7 +18,8 @@ def _database_relation() -> testing.Relation: "endpoints": "10.0.0.10:5432", "database": "haproxy_route_policy", "username": "policy", - "password": "secret", + # Ignore bandit warning as this is for testing. + "password": "secret", # nosec }, ) diff --git a/haproxy-route-policy-operator/tox.toml b/haproxy-route-policy-operator/tox.toml index fb9c16a7b..d23cbc68a 100644 --- a/haproxy-route-policy-operator/tox.toml +++ b/haproxy-route-policy-operator/tox.toml @@ -1,8 +1,9 @@ -# Copyright 2026 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. skipsdist = true skip_missing_interpreters = true +envlist = ["lint", "unit", "static", "coverage-report"] requires = ["tox>=4.21"] no_package = true @@ -11,7 +12,7 @@ passenv = ["PYTHONPATH", "CHARM_BUILD_DIR", "MODEL_SETTINGS"] runner = "uv-venv-lock-runner" [env_run_base.setenv] -PYTHONPATH = "{toxinidir}:{toxinidir}/lib:{[vars]src_path}" +PYTHONPATH = "{toxinidir}/src:{toxinidir}/lib" PYTHONBREAKPOINT = "ipdb.set_trace" PY_COLORS = "1" @@ -22,8 +23,7 @@ commands = [ "ruff", "check", "--fix", - "--select", - "I", + "--fix-only", { replace = "ref", of = [ "vars", "all_path", @@ -77,10 +77,64 @@ dependency_groups = ["lint"] [env.unit] description = "Run unit tests" -commands = [["pytest", "-v", "{[vars]tst_path}unit"]] +commands = [ + [ + "coverage", + "run", + "--source={[vars]src_path}", + "-m", + "pytest", + "--ignore={[vars]tst_path}integration", + "-v", + "--tb", + "native", + "--log-cli-level=INFO", + "-s", + { replace = "posargs", extend = "true" }, + ], + [ + "coverage", + "report", + ], +] dependency_groups = ["unit"] +[env.coverage-report] +description = "Create test coverage report" +commands = [["coverage", "report"]] +dependency_groups = ["coverage-report"] + +[env.static] +description = "Run static analysis tests" +commands = [ + [ + "bandit", + "-c", + "{toxinidir}/pyproject.toml", + "-r", + "{[vars]src_path}", + "{[vars]tst_path}", + ], +] +dependency_groups = ["static"] + +[env.integration] +description = "Run integration tests" +commands = [ + [ + "pytest", + "-v", + "--tb", + "native", + "--ignore={[vars]tst_path}unit", + "--log-cli-level=INFO", + "-s", + { replace = "posargs", extend = "true" }, + ], +] +dependency_groups = ["integration"] + [vars] src_path = "{toxinidir}/src/" tst_path = "{toxinidir}/tests/" -all_path = ["{toxinidir}/src", "{toxinidir}/tests"] +all_path = ["{toxinidir}/src/", "{toxinidir}/tests/"] From ca6449c07ff83700c9c46551420d15b106c13c37 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 21:24:17 +0200 Subject: [PATCH 123/201] add change artifact --- docs/release-notes/artifacts/pr0421.yaml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0421.yaml diff --git a/docs/release-notes/artifacts/pr0421.yaml b/docs/release-notes/artifacts/pr0421.yaml new file mode 100644 index 000000000..a50b31886 --- /dev/null +++ b/docs/release-notes/artifacts/pr0421.yaml @@ -0,0 +1,21 @@ +version_schema: 2 + +changes: + - title: Bootstrapped MVP for haproxy-route-policy-operator charm + author: tphan025 + type: minor + description: > + Added a new machine charm, `haproxy-route-policy-operator`, as an MVP to + manage the haproxy-route-policy service. The charm installs and configures + the `haproxy-route-policy` snap from PostgreSQL relation data, runs + database migrations, starts the gunicorn snap service, and opens port 8080. + Included charm metadata/configuration, PostgreSQL relation handling state, + supporting policy helpers, vendored data-platform relation library, and + initial unit and integration tests. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/421 + related_doc: + related_issue: + visibility: public + highlight: false From d4d8ec3906d1697e03957b937da898bbca7d6d60 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 21:25:10 +0200 Subject: [PATCH 124/201] run integration tests for haproxy-route-policy-operator --- .github/workflows/integration_test.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index e52198215..37a929c22 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -32,6 +32,9 @@ jobs: - name: haproxy-ddos-protection-configurator working-directory: ./haproxy-ddos-protection-configurator modules: '["test_charm.py"]' + - name: haproxy-route-policy-operator + working-directory: ./haproxy-route-policy-operator + modules: '["test_charm.py"]' name: Integration tests for ${{ matrix.charm.name }} uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@main secrets: inherit From 7a2a4fd7b7fd690157c6fb38efb627499c3b2b66 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 22:18:50 +0200 Subject: [PATCH 125/201] add secret handling, update tests --- haproxy-route-policy-operator/src/charm.py | 76 +++++++++++++++- haproxy-route-policy-operator/src/policy.py | 26 ++++++ .../tests/unit/test_charm.py | 90 ++++++++++++++++++- 3 files changed, 188 insertions(+), 4 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index b60958276..feff31fae 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -6,6 +6,7 @@ """haproxy-route-policy-operator charm.""" import logging +import secrets from typing import Any import ops @@ -17,6 +18,7 @@ from policy import ( HaproxyRoutePolicyDatabaseMigrationError, configure_snap, + create_or_update_user, install_snap, run_migrations, start_gunicorn_service, @@ -31,6 +33,16 @@ DATABASE_RELATION = "database" HAPROXY_ROUTE_POLICY_PORT = 8080 +DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" +DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" + + +class DjangoSecretKeyMissingError(Exception): + """Raised when the Django secret key is not generated by the leader unit.""" + + +class DjangoAdminCredentialsMissingError(Exception): + """Raised when the Django admin credentials are not generated by the leader unit.""" class HaproxyRoutePolicyCharm(ops.CharmBase): @@ -58,11 +70,22 @@ def _reconcile(self, _: ops.EventBase) -> None: install_snap() self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") database_information = DatabaseInformation.from_requirer(self, self.database) - configure_snap(database_information.haproxy_route_policy_snap_configuration) + configure_snap( + { + **self._get_django_secret_key(), + **database_information.haproxy_route_policy_snap_configuration, + } + ) self.unit.status = ops.MaintenanceStatus("running database migrations") run_migrations() + + self.unit.status = ops.MaintenanceStatus("Updating Django admin user.") + username, password = self._get_django_admin_credentials().values() + create_or_update_user(username, password) + self.unit.status = ops.MaintenanceStatus("starting gunicorn service") start_gunicorn_service() + self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: logger.exception("Failed to reconcile haproxy-route-policy service") @@ -75,9 +98,60 @@ def _reconcile(self, _: ops.EventBase) -> None: logger.exception("Database relation not ready") self.unit.status = ops.WaitingStatus("waiting for complete database relation.") return + except (DjangoSecretKeyMissingError, DjangoAdminCredentialsMissingError): + logger.exception("Django shared configuration not ready") + self.unit.status = ops.WaitingStatus("Waiting for leader to set shared configuration.") + return self.unit.status = ops.ActiveStatus() + def _get_django_secret_key(self) -> dict[str, str]: + """Get the Django secret key from the charm's config. + + Returns: + The Django secret key. + + Raises: + DjangoSecretKeyMissingError: If the secret key is not yet created by the leader. + """ + try: + secret = self.model.get_secret(label=DJANGO_SECRET_KEY_SECRET_LABEL) + return secret.get_content() + except ops.SecretNotFoundError: + if self.unit.is_leader(): + django_secret_key_data = {"secret-key": secrets.token_urlsafe(32)} + secret = self.app.add_secret( + label=DJANGO_SECRET_KEY_SECRET_LABEL, content=django_secret_key_data + ) + return django_secret_key_data + raise DjangoSecretKeyMissingError( + "Waiting for the leader unit to generate the Django secret key." + ) + + def _get_django_admin_credentials(self) -> dict[str, str]: + """Get the Django admin user from the charm's config. + + Returns: + The Django admin user. + """ + try: + secret = self.model.get_secret(label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL) + return secret.get_content() + except ops.SecretNotFoundError: + if self.unit.is_leader(): + django_admin_credentials_data = { + "username": "admin", + "password": secrets.token_urlsafe(32), + } + secret = self.app.add_secret( + label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, + content=django_admin_credentials_data, + ) + return django_admin_credentials_data + raise DjangoAdminCredentialsMissingError( + "Waiting for the leader unit to generate the Django admin credentials." + ) + if __name__ == "__main__": # pragma: nocover ops.main(HaproxyRoutePolicyCharm) diff --git a/haproxy-route-policy-operator/src/policy.py b/haproxy-route-policy-operator/src/policy.py index 5343c135f..54d4cc798 100644 --- a/haproxy-route-policy-operator/src/policy.py +++ b/haproxy-route-policy-operator/src/policy.py @@ -51,3 +51,29 @@ def start_gunicorn_service() -> None: """Ensure the snap gunicorn app is running.""" package = snap.SnapCache()[SNAP_NAME] package.start() + + +def create_or_update_user(username: str, password: str) -> None: + """Create or update the HTTP proxy policy superuser. + + Args: + username: The username. + password: The password. + + Raises: + RuntimeError: If the action failed. + """ + try: + subprocess.run( # nosec + [f"{SNAP_NAME}.manage", "upsertsuperuser"], + env={ + "DJANGO_SUPERUSER_PASSWORD": password, + "DJANGO_SUPERUSER_USERNAME": username, + }, + check=True, + encoding="utf-8", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as e: + raise RuntimeError(f"failed to create/update Django user: {e.stdout}") from e diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index 87f66bcea..21b782836 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -5,9 +5,14 @@ from unittest.mock import patch +import pytest from ops import testing -from charm import HaproxyRoutePolicyCharm +from charm import ( + DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, + DJANGO_SECRET_KEY_SECRET_LABEL, + HaproxyRoutePolicyCharm, +) def _database_relation() -> testing.Relation: @@ -33,7 +38,9 @@ def test_install_without_relation_sets_waiting_status(): ctx = testing.Context(HaproxyRoutePolicyCharm) state = testing.State() - with patch("charm.install_snap") as install_snap_mock: + with ( + patch("charm.install_snap") as install_snap_mock, + ): out = ctx.run(ctx.on.install(), state) install_snap_mock.assert_called_once_with() @@ -47,13 +54,25 @@ def test_config_changed_reconciles_snap_with_database_credentials(): assert: snap is configured, migrations run, and service is started. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_database_relation()]) + state = testing.State( + relations=[_database_relation()], + secrets=[ + testing.Secret( + label=DJANGO_SECRET_KEY_SECRET_LABEL, tracked_content={"secret-key": "test"} + ), + testing.Secret( + label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, + tracked_content={"username": "admin", "password": "admin"}, + ), + ], + ) with ( patch("charm.install_snap") as install_snap_mock, patch("charm.configure_snap") as configure_mock, patch("charm.run_migrations") as migrate_mock, patch("charm.start_gunicorn_service") as start_mock, + patch("charm.create_or_update_user") as create_or_update_user_mock, ): out = ctx.run(ctx.on.config_changed(), state) @@ -62,3 +81,68 @@ def test_config_changed_reconciles_snap_with_database_credentials(): configure_mock.assert_called_once() migrate_mock.assert_called_once() start_mock.assert_called_once() + create_or_update_user_mock.assert_called_once() + + +@pytest.mark.parametrize( + "secrets", + [ + pytest.param( + [ + testing.Secret( + label=DJANGO_SECRET_KEY_SECRET_LABEL, tracked_content={"secret-key": "test"} + ) + ], + id="missing-admin-credentials", + ), + pytest.param( + [ + testing.Secret( + label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, + tracked_content={"username": "admin", "password": "admin"}, + ) + ], + id="missing-secret-key", + ), + ], +) +def test_config_changed_missing_secrets(secrets): + """ + arrange: create charm context with valid database relation credentials. + act: run config-changed event. + assert: snap is configured, migrations run, and service is started. + """ + ctx = testing.Context(HaproxyRoutePolicyCharm) + state = testing.State(relations=[_database_relation()], secrets=secrets) + + with ( + patch("charm.install_snap"), + patch("charm.configure_snap"), + patch("charm.run_migrations"), + ): + out = ctx.run(ctx.on.config_changed(), state) + + assert out.unit_status == testing.WaitingStatus( + "Waiting for leader to set shared configuration." + ) + + +def test_config_changed_leader_create_secrets(): + """ + arrange: create charm context with valid database relation credentials. + act: run config-changed event. + assert: snap is configured, migrations run, and service is started. + """ + ctx = testing.Context(HaproxyRoutePolicyCharm) + state = testing.State(relations=[_database_relation()], secrets=[], leader=True) + + with ( + patch("charm.install_snap"), + patch("charm.configure_snap"), + patch("charm.run_migrations"), + patch("charm.start_gunicorn_service"), + patch("charm.create_or_update_user"), + ): + out = ctx.run(ctx.on.config_changed(), state) + + assert len(list(out.secrets)) == 2 From e1312a71194cf9e00c32890b1754a071cc390b71 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 31 Mar 2026 22:23:29 +0200 Subject: [PATCH 126/201] ignore bandit rules --- haproxy-route-policy-operator/src/charm.py | 5 +++-- haproxy-route-policy-operator/tests/unit/test_charm.py | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index feff31fae..92617805c 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -33,8 +33,9 @@ DATABASE_RELATION = "database" HAPROXY_ROUTE_POLICY_PORT = 8080 -DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" -DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" +# Ignore bandit warnings here as these are labels +DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" # nosec +DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec class DjangoSecretKeyMissingError(Exception): diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index 21b782836..ac7b2d703 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -62,7 +62,8 @@ def test_config_changed_reconciles_snap_with_database_credentials(): ), testing.Secret( label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, - tracked_content={"username": "admin", "password": "admin"}, + # Ignore bandit warning as this is for testing. + tracked_content={"username": "admin", "password": "admin"}, # nosec ), ], ) @@ -99,7 +100,8 @@ def test_config_changed_reconciles_snap_with_database_credentials(): [ testing.Secret( label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, - tracked_content={"username": "admin", "password": "admin"}, + # Ignore bandit warning as this is for testing. + tracked_content={"username": "admin", "password": "admin"}, # nosec ) ], id="missing-secret-key", From 9b51388fce5d9f9c1b2c7d0ea7404f5b633a3a37 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 1 Apr 2026 10:29:19 +0200 Subject: [PATCH 127/201] add action, fix issue with command run --- haproxy-route-policy-operator/charmcraft.yaml | 11 +++++++++++ haproxy-route-policy-operator/src/charm.py | 19 +++++++++++++++++++ haproxy-route-policy-operator/src/policy.py | 2 ++ .../tests/integration/test_charm.py | 4 ++++ 4 files changed, 36 insertions(+) diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index a44df3f51..6ebf0591d 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -40,6 +40,17 @@ requires: limit: 1 optional: false +actions: + get-admin-credentials: + description: Retrieve the admin credentials to call the HAProxy Route Policy API. + output: + username: + description: The admin username. + type: string + password: + description: The admin password. + type: string + charm-libs: - lib: data_platform_libs.data_interfaces version: "0" diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 92617805c..695ad44f8 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -56,6 +56,9 @@ def __init__(self, *args: Any): self.framework.observe(self.on.upgrade_charm, self._reconcile) self.framework.observe(self.on.start, self._reconcile) self.framework.observe(self.on.config_changed, self._reconcile) + self.framework.observe( + self.on.get_admin_credentials_action, self._on_get_admin_credentials_action + ) self.database = DatabaseRequires( self, @@ -153,6 +156,22 @@ def _get_django_admin_credentials(self) -> dict[str, str]: "Waiting for the leader unit to generate the Django admin credentials." ) + def _on_get_admin_credentials_action(self, event: ops.ActionEvent) -> None: + """Handle the get-admin-credentials action.""" + try: + secret = self.model.get_secret( + label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL + ).get_content() + event.set_results( + { + "username": secret["username"], + "password": secret["password"], + } + ) + return + except ops.SecretNotFoundError: + event.fail("Admin credentials not found.") + if __name__ == "__main__": # pragma: nocover ops.main(HaproxyRoutePolicyCharm) diff --git a/haproxy-route-policy-operator/src/policy.py b/haproxy-route-policy-operator/src/policy.py index 54d4cc798..30b4bfe89 100644 --- a/haproxy-route-policy-operator/src/policy.py +++ b/haproxy-route-policy-operator/src/policy.py @@ -6,6 +6,7 @@ from __future__ import annotations import logging +import os import subprocess # nosec from typing import Any @@ -67,6 +68,7 @@ def create_or_update_user(username: str, password: str) -> None: subprocess.run( # nosec [f"{SNAP_NAME}.manage", "upsertsuperuser"], env={ + **os.environ, "DJANGO_SUPERUSER_PASSWORD": password, "DJANGO_SUPERUSER_USERNAME": username, }, diff --git a/haproxy-route-policy-operator/tests/integration/test_charm.py b/haproxy-route-policy-operator/tests/integration/test_charm.py index 38a5ab2c1..bb0f73ec2 100644 --- a/haproxy-route-policy-operator/tests/integration/test_charm.py +++ b/haproxy-route-policy-operator/tests/integration/test_charm.py @@ -27,3 +27,7 @@ def test_charm_becomes_active_after_relation_with_postgresql( juju.integrate(f"{application}:database", f"{postgresql_app}:database") juju.wait(lambda status: jubilant.all_active(status, application, postgresql_app)) + + result = juju.run(f"{application}/0", "get-admin-credentials") + assert result.results["username"] == "admin" + assert len(result.results["password"]) == 16 From c7b9a8862ee04b45331c495d9406c3e0cb11b480 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 1 Apr 2026 10:29:59 +0200 Subject: [PATCH 128/201] add change artifact --- docs/release-notes/artifacts/pr0422.yaml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0422.yaml diff --git a/docs/release-notes/artifacts/pr0422.yaml b/docs/release-notes/artifacts/pr0422.yaml new file mode 100644 index 000000000..a4be78b95 --- /dev/null +++ b/docs/release-notes/artifacts/pr0422.yaml @@ -0,0 +1,21 @@ +version_schema: 2 + +changes: + - title: Added leader-managed Django secrets and admin user bootstrap for route-policy operator + author: tphan025 + type: minor + description: > + Updated the `haproxy-route-policy-operator` charm to generate and share + Django runtime secrets from the leader unit, including a secret key and + admin credentials. During reconcile, the charm now configures the snap + with the generated secret key and upserts the Django admin user via the + snap management command. Non-leader units now wait until shared secrets are + available. Added unit tests covering reconcile behavior with existing, + missing, and leader-generated secrets. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/422 + related_doc: + related_issue: + visibility: public + highlight: false From 8f2758807974329b019b6c32191646833405d70f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 1 Apr 2026 10:35:54 +0200 Subject: [PATCH 129/201] add upsertsuperuser command --- .../policy/management/__init__.py | 2 ++ .../policy/management/commands/__init__.py | 2 ++ .../management/commands/upsertsuperuser.py | 34 +++++++++++++++++++ 3 files changed, 38 insertions(+) create mode 100644 haproxy-route-policy/policy/management/__init__.py create mode 100644 haproxy-route-policy/policy/management/commands/__init__.py create mode 100644 haproxy-route-policy/policy/management/commands/upsertsuperuser.py diff --git a/haproxy-route-policy/policy/management/__init__.py b/haproxy-route-policy/policy/management/__init__.py new file mode 100644 index 000000000..fa89e9d7f --- /dev/null +++ b/haproxy-route-policy/policy/management/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/haproxy-route-policy/policy/management/commands/__init__.py b/haproxy-route-policy/policy/management/commands/__init__.py new file mode 100644 index 000000000..fa89e9d7f --- /dev/null +++ b/haproxy-route-policy/policy/management/commands/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. diff --git a/haproxy-route-policy/policy/management/commands/upsertsuperuser.py b/haproxy-route-policy/policy/management/commands/upsertsuperuser.py new file mode 100644 index 000000000..6079857d0 --- /dev/null +++ b/haproxy-route-policy/policy/management/commands/upsertsuperuser.py @@ -0,0 +1,34 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +import os + +from django.core.management.base import BaseCommand, CommandError +from django.contrib.auth import get_user_model + +User = get_user_model() + + +class Command(BaseCommand): + help = "Update or create a superuser" + + def handle(self, *args, **options): + try: + username = os.environ["DJANGO_SUPERUSER_USERNAME"] + except KeyError: + raise CommandError("environment variable DJANGO_SUPERUSER_USERNAME not set") + try: + password = os.environ["DJANGO_SUPERUSER_PASSWORD"] + except KeyError: + raise CommandError("environment variable DJANGO_SUPERUSER_PASSWORD not set") + email = os.environ.get("DJANGO_SUPERUSER_EMAIL", "") + user, created = User.objects.get_or_create(username=username) + if created or not user.check_password(password): + user.set_password(password) + user.is_staff = True + user.is_superuser = True + user.email = email + user.save() + self.stdout.write( + self.style.SUCCESS(f"successfully updated/created user {username}") + ) From 5de029c821917a7f5d348863d31e368c5d8d6c16 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 1 Apr 2026 11:08:19 +0200 Subject: [PATCH 130/201] block until peer relation and set a value after creating a secret to trigger peer unit reconcile --- haproxy-route-policy-operator/src/charm.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 695ad44f8..7d109191e 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -36,6 +36,7 @@ # Ignore bandit warnings here as these are labels DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" # nosec DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec +PEER_RELATION_NAME = "haproxy-route-policy-peer" class DjangoSecretKeyMissingError(Exception): @@ -59,6 +60,8 @@ def __init__(self, *args: Any): self.framework.observe( self.on.get_admin_credentials_action, self._on_get_admin_credentials_action ) + self.framework.observe(self.on[PEER_RELATION_NAME].relation_joined, self._reconcile) + self.framework.observe(self.on[PEER_RELATION_NAME].relation_changed, self._reconcile) self.database = DatabaseRequires( self, @@ -70,13 +73,18 @@ def __init__(self, *args: Any): def _reconcile(self, _: ops.EventBase) -> None: """Reconcile snap configuration and service state.""" + peer_relation = self.model.get_relation(PEER_RELATION_NAME) + if not peer_relation: + self.unit.status = ops.WaitingStatus("Waiting for peer relation.") + return + try: install_snap() self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") database_information = DatabaseInformation.from_requirer(self, self.database) configure_snap( { - **self._get_django_secret_key(), + **self._get_django_secret_key(peer_relation), **database_information.haproxy_route_policy_snap_configuration, } ) @@ -84,7 +92,7 @@ def _reconcile(self, _: ops.EventBase) -> None: run_migrations() self.unit.status = ops.MaintenanceStatus("Updating Django admin user.") - username, password = self._get_django_admin_credentials().values() + username, password = self._get_django_admin_credentials(peer_relation).values() create_or_update_user(username, password) self.unit.status = ops.MaintenanceStatus("starting gunicorn service") @@ -109,7 +117,7 @@ def _reconcile(self, _: ops.EventBase) -> None: self.unit.status = ops.ActiveStatus() - def _get_django_secret_key(self) -> dict[str, str]: + def _get_django_secret_key(self, peer_relation: ops.Relation) -> dict[str, str]: """Get the Django secret key from the charm's config. Returns: @@ -127,12 +135,15 @@ def _get_django_secret_key(self) -> dict[str, str]: secret = self.app.add_secret( label=DJANGO_SECRET_KEY_SECRET_LABEL, content=django_secret_key_data ) + peer_relation.data[self.app]["django-secret-key"] = ( + secret.label or DJANGO_SECRET_KEY_SECRET_LABEL + ) return django_secret_key_data raise DjangoSecretKeyMissingError( "Waiting for the leader unit to generate the Django secret key." ) - def _get_django_admin_credentials(self) -> dict[str, str]: + def _get_django_admin_credentials(self, peer_relation: ops.Relation) -> dict[str, str]: """Get the Django admin user from the charm's config. Returns: @@ -151,6 +162,9 @@ def _get_django_admin_credentials(self) -> dict[str, str]: label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, content=django_admin_credentials_data, ) + peer_relation.data[self.app]["django-admin-credentials"] = ( + secret.label or DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL + ) return django_admin_credentials_data raise DjangoAdminCredentialsMissingError( "Waiting for the leader unit to generate the Django admin credentials." From 49174ce5539287e1a3162f7622bc48428529283b Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 00:59:17 +0200 Subject: [PATCH 131/201] add peer relation definition and only run migration/update user if leader --- haproxy-route-policy-operator/charmcraft.yaml | 4 ++++ haproxy-route-policy-operator/src/charm.py | 12 +++++++----- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 6ebf0591d..9dfdf7da9 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -54,3 +54,7 @@ actions: charm-libs: - lib: data_platform_libs.data_interfaces version: "0" + +peers: + haproxy-route-policy-peer: + interface: haproxy_route_policy_peer diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 7d109191e..a2c386d55 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -88,12 +88,14 @@ def _reconcile(self, _: ops.EventBase) -> None: **database_information.haproxy_route_policy_snap_configuration, } ) - self.unit.status = ops.MaintenanceStatus("running database migrations") - run_migrations() - self.unit.status = ops.MaintenanceStatus("Updating Django admin user.") - username, password = self._get_django_admin_credentials(peer_relation).values() - create_or_update_user(username, password) + if self.unit.is_leader(): + self.unit.status = ops.MaintenanceStatus("[leader] running database migrations") + run_migrations() + + self.unit.status = ops.MaintenanceStatus("[leader] updating Django admin user") + username, password = self._get_django_admin_credentials(peer_relation).values() + create_or_update_user(username, password) self.unit.status = ops.MaintenanceStatus("starting gunicorn service") start_gunicorn_service() From f81615e3bd4157e2b1310bdf9fbc8a45086e662c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 12:12:42 +0200 Subject: [PATCH 132/201] move juju secret handling to charm state, add charm state for policy related values, add unit tests --- haproxy-route-policy-operator/charmcraft.yaml | 9 + haproxy-route-policy-operator/pyproject.toml | 1 + haproxy-route-policy-operator/src/charm.py | 100 +++-------- .../src/state/policy.py | 164 ++++++++++++++++++ .../tests/unit/test_charm.py | 42 +++-- .../test_haproxy_route_policy_information.py | 118 +++++++++++++ haproxy-route-policy-operator/uv.lock | 11 ++ 7 files changed, 357 insertions(+), 88 deletions(-) create mode 100644 haproxy-route-policy-operator/src/state/policy.py create mode 100644 haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 9dfdf7da9..2e1d324cc 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -58,3 +58,12 @@ charm-libs: peers: haproxy-route-policy-peer: interface: haproxy_route_policy_peer + +config: + options: + allowed-hosts: + type: string + description: A comma-separated list of host/domain names that the dns-policy-app API + can serve. This configuration will set the DJANGO_ALLOWED_HOSTS environment + variable with its content being a JSON encoded list. + default: "0.0.0.0" \ No newline at end of file diff --git a/haproxy-route-policy-operator/pyproject.toml b/haproxy-route-policy-operator/pyproject.toml index cca4d5cf5..1d558099b 100644 --- a/haproxy-route-policy-operator/pyproject.toml +++ b/haproxy-route-policy-operator/pyproject.toml @@ -18,6 +18,7 @@ dependencies = [ "requests==2.32.5", "charmlibs-snap==1.0.1", "pydantic>=2.12.5", + "validators>=0.35.0", ] [dependency-groups] diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index a2c386d55..b73cc6c03 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -6,7 +6,6 @@ """haproxy-route-policy-operator charm.""" import logging -import secrets from typing import Any import ops @@ -28,23 +27,19 @@ DatabaseRelationMissingError, DatabaseRelationNotReadyError, ) +from state.policy import ( + DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, + PEER_RELATION_NAME, + DjangoAdminCredentialsMissingError, + DjangoSecretKeyMissingError, + HaproxyRoutePolicyInformation, + PeerRelationMissingError, +) logger = logging.getLogger(__name__) DATABASE_RELATION = "database" HAPROXY_ROUTE_POLICY_PORT = 8080 -# Ignore bandit warnings here as these are labels -DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" # nosec -DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec -PEER_RELATION_NAME = "haproxy-route-policy-peer" - - -class DjangoSecretKeyMissingError(Exception): - """Raised when the Django secret key is not generated by the leader unit.""" - - -class DjangoAdminCredentialsMissingError(Exception): - """Raised when the Django admin credentials are not generated by the leader unit.""" class HaproxyRoutePolicyCharm(ops.CharmBase): @@ -73,18 +68,14 @@ def __init__(self, *args: Any): def _reconcile(self, _: ops.EventBase) -> None: """Reconcile snap configuration and service state.""" - peer_relation = self.model.get_relation(PEER_RELATION_NAME) - if not peer_relation: - self.unit.status = ops.WaitingStatus("Waiting for peer relation.") - return - try: install_snap() self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") database_information = DatabaseInformation.from_requirer(self, self.database) + haproxy_route_policy_information = HaproxyRoutePolicyInformation.from_charm(self) configure_snap( { - **self._get_django_secret_key(peer_relation), + **haproxy_route_policy_information.allowed_hosts_snap_configuration, **database_information.haproxy_route_policy_snap_configuration, } ) @@ -94,17 +85,15 @@ def _reconcile(self, _: ops.EventBase) -> None: run_migrations() self.unit.status = ops.MaintenanceStatus("[leader] updating Django admin user") - username, password = self._get_django_admin_credentials(peer_relation).values() - create_or_update_user(username, password) + create_or_update_user( + haproxy_route_policy_information.admin_username, + haproxy_route_policy_information.admin_password, + ) self.unit.status = ops.MaintenanceStatus("starting gunicorn service") start_gunicorn_service() self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) - except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: - logger.exception("Failed to reconcile haproxy-route-policy service") - self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") - return except DatabaseRelationMissingError: self.unit.status = ops.BlockedStatus("Missing database relation.") return @@ -112,66 +101,21 @@ def _reconcile(self, _: ops.EventBase) -> None: logger.exception("Database relation not ready") self.unit.status = ops.WaitingStatus("waiting for complete database relation.") return + except PeerRelationMissingError: + logger.exception("Peer relation missing") + self.unit.status = ops.WaitingStatus("Waiting for peer relation.") + return except (DjangoSecretKeyMissingError, DjangoAdminCredentialsMissingError): logger.exception("Django shared configuration not ready") self.unit.status = ops.WaitingStatus("Waiting for leader to set shared configuration.") return + except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: + logger.exception("Failed to reconcile haproxy-route-policy service") + self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") + return self.unit.status = ops.ActiveStatus() - def _get_django_secret_key(self, peer_relation: ops.Relation) -> dict[str, str]: - """Get the Django secret key from the charm's config. - - Returns: - The Django secret key. - - Raises: - DjangoSecretKeyMissingError: If the secret key is not yet created by the leader. - """ - try: - secret = self.model.get_secret(label=DJANGO_SECRET_KEY_SECRET_LABEL) - return secret.get_content() - except ops.SecretNotFoundError: - if self.unit.is_leader(): - django_secret_key_data = {"secret-key": secrets.token_urlsafe(32)} - secret = self.app.add_secret( - label=DJANGO_SECRET_KEY_SECRET_LABEL, content=django_secret_key_data - ) - peer_relation.data[self.app]["django-secret-key"] = ( - secret.label or DJANGO_SECRET_KEY_SECRET_LABEL - ) - return django_secret_key_data - raise DjangoSecretKeyMissingError( - "Waiting for the leader unit to generate the Django secret key." - ) - - def _get_django_admin_credentials(self, peer_relation: ops.Relation) -> dict[str, str]: - """Get the Django admin user from the charm's config. - - Returns: - The Django admin user. - """ - try: - secret = self.model.get_secret(label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL) - return secret.get_content() - except ops.SecretNotFoundError: - if self.unit.is_leader(): - django_admin_credentials_data = { - "username": "admin", - "password": secrets.token_urlsafe(32), - } - secret = self.app.add_secret( - label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, - content=django_admin_credentials_data, - ) - peer_relation.data[self.app]["django-admin-credentials"] = ( - secret.label or DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL - ) - return django_admin_credentials_data - raise DjangoAdminCredentialsMissingError( - "Waiting for the leader unit to generate the Django admin credentials." - ) - def _on_get_admin_credentials_action(self, event: ops.ActionEvent) -> None: """Handle the get-admin-credentials action.""" try: diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py new file mode 100644 index 000000000..a552c9335 --- /dev/null +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Charm state for database information.""" + +import json +import secrets +from typing import Annotated, cast + +import ops +from pydantic import BeforeValidator, Field, IPvAnyAddress +from pydantic.dataclasses import dataclass +from validators import domain + +# Ignore bandit warnings here as these are labels +DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" # nosec +DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec +PEER_RELATION_NAME = "haproxy-route-policy-peer" +SECRET_LENGTH = 32 + + +class DjangoSecretKeyMissingError(Exception): + """Raised when the Django secret key is not generated by the leader unit.""" + + +class DjangoAdminCredentialsMissingError(Exception): + """Raised when the Django admin credentials are not generated by the leader unit.""" + + +class PeerRelationMissingError(Exception): + """Raised when the peer relation is missing.""" + + +def valid_fqdn(value: str) -> str: + """Validate if value is a valid fqdn. TLDs are not allowed. + + Raises: + ValueError: When value is not a valid domain. + + Args: + value: The value to validate. + """ + if not bool(domain(value)): + raise ValueError(f"Invalid domain: {value}") + return value + + +FQDN = Annotated[str, BeforeValidator(valid_fqdn)] + + +@dataclass +class HaproxyRoutePolicyInformation: + """Charm state for HAProxy Route Policy information. + + Attributes: + allowed_hosts: List of allowed hosts. + admin_username: Django admin username. + admin_password: Django admin password. + secret_key: Django secret key. + """ + + allowed_hosts: list[FQDN | IPvAnyAddress] = Field() + admin_username: str = Field() + admin_password: str = Field() + secret_key: str = Field() + + @property + def allowed_hosts_snap_configuration(self) -> dict[str, str]: + """Return snap configuration keys and values.""" + return { + "allowed-hosts": json.dumps(self.allowed_hosts), + } + + @classmethod + def from_charm(cls, charm: ops.CharmBase) -> "HaproxyRoutePolicyInformation": + """Create a HaproxyRoutePolicyInformation charm state. + + Returns: + HaproxyRoutePolicyInformation: The information. + + Raises: + PeerRelationMissingError: If the peer relation is missing. + """ + peer_relation = charm.model.get_relation(PEER_RELATION_NAME) + if not peer_relation: + raise PeerRelationMissingError("Peer relation is missing.") + + allowed_hosts = ( + [ + cast(IPvAnyAddress | FQDN, address) + for address in cast(str, charm.config.get("allowed-hosts")).split(",") + ] + if charm.config.get("allowed-hosts") + else [] + ) + admin_username, admin_password = _get_django_admin_credentials( + charm, peer_relation + ).values() + secret_key = _get_django_secret_key(charm, peer_relation)["secret-key"] + return cls( + allowed_hosts=allowed_hosts, + admin_username=admin_username, + admin_password=admin_password, + secret_key=secret_key, + ) + + +def _get_django_admin_credentials( + charm: ops.CharmBase, peer_relation: ops.Relation +) -> dict[str, str]: + """Get the Django admin user from the charm's config. + + Returns: + The Django admin user. + """ + try: + secret = charm.model.get_secret(label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL) + return secret.get_content() + except ops.SecretNotFoundError: + if charm.unit.is_leader(): + django_admin_credentials_data = { + "username": "admin", + "password": secrets.token_urlsafe(SECRET_LENGTH), + } + secret = charm.app.add_secret( + label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, + content=django_admin_credentials_data, + ) + peer_relation.data[charm.app]["django-admin-credentials"] = ( + secret.label or DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL + ) + return django_admin_credentials_data + raise DjangoAdminCredentialsMissingError( + "Waiting for the leader unit to generate the Django admin credentials." + ) + + +def _get_django_secret_key(charm: ops.CharmBase, peer_relation: ops.Relation) -> dict[str, str]: + """Get the Django secret key from the charm's config. + + Returns: + The Django secret key. + + Raises: + DjangoSecretKeyMissingError: If the secret key is not yet created by the leader. + """ + try: + secret = charm.model.get_secret(label=DJANGO_SECRET_KEY_SECRET_LABEL) + return secret.get_content() + except ops.SecretNotFoundError: + if charm.unit.is_leader(): + django_secret_key_data = {"secret-key": secrets.token_urlsafe(SECRET_LENGTH)} + secret = charm.app.add_secret( + label=DJANGO_SECRET_KEY_SECRET_LABEL, content=django_secret_key_data + ) + peer_relation.data[charm.app]["django-secret-key"] = ( + secret.label or DJANGO_SECRET_KEY_SECRET_LABEL + ) + return django_secret_key_data + raise DjangoSecretKeyMissingError( + "Waiting for the leader unit to generate the Django secret key." + ) diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index ac7b2d703..fa85b5ab6 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -3,15 +3,17 @@ """Unit tests for haproxy-route-policy-operator charm.""" +import secrets from unittest.mock import patch import pytest from ops import testing -from charm import ( +from charm import HaproxyRoutePolicyCharm +from state.policy import ( DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, DJANGO_SECRET_KEY_SECRET_LABEL, - HaproxyRoutePolicyCharm, + SECRET_LENGTH, ) @@ -29,6 +31,11 @@ def _database_relation() -> testing.Relation: ) +def _peer_relation() -> testing.PeerRelation: + """Build a peer relation.""" + return testing.PeerRelation("haproxy-route-policy-peer") + + def test_install_without_relation_sets_waiting_status(): """ arrange: create charm context without database relation. @@ -47,7 +54,14 @@ def test_install_without_relation_sets_waiting_status(): assert isinstance(out.unit_status, testing.BlockedStatus) -def test_config_changed_reconciles_snap_with_database_credentials(): +@pytest.mark.parametrize( + "is_leader", + [ + pytest.param(True, id="leader-unit"), + pytest.param(False, id="non-leader-unit"), + ], +) +def test_config_changed_reconciles_snap_with_database_credentials(is_leader): """ arrange: create charm context with valid database relation credentials. act: run config-changed event. @@ -55,17 +69,22 @@ def test_config_changed_reconciles_snap_with_database_credentials(): """ ctx = testing.Context(HaproxyRoutePolicyCharm) state = testing.State( - relations=[_database_relation()], + relations=[_database_relation(), _peer_relation()], secrets=[ testing.Secret( - label=DJANGO_SECRET_KEY_SECRET_LABEL, tracked_content={"secret-key": "test"} + label=DJANGO_SECRET_KEY_SECRET_LABEL, + tracked_content={"secret-key": secrets.token_urlsafe(SECRET_LENGTH)}, ), testing.Secret( label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, # Ignore bandit warning as this is for testing. - tracked_content={"username": "admin", "password": "admin"}, # nosec + tracked_content={ + "username": "admin", + "password": secrets.token_urlsafe(SECRET_LENGTH), + }, # nosec ), ], + leader=is_leader, ) with ( @@ -80,9 +99,10 @@ def test_config_changed_reconciles_snap_with_database_credentials(): assert out.unit_status == testing.ActiveStatus() install_snap_mock.assert_called_once() configure_mock.assert_called_once() - migrate_mock.assert_called_once() start_mock.assert_called_once() - create_or_update_user_mock.assert_called_once() + if is_leader: + migrate_mock.assert_called_once() + create_or_update_user_mock.assert_called_once() @pytest.mark.parametrize( @@ -115,7 +135,7 @@ def test_config_changed_missing_secrets(secrets): assert: snap is configured, migrations run, and service is started. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_database_relation()], secrets=secrets) + state = testing.State(relations=[_database_relation(), _peer_relation()], secrets=secrets) with ( patch("charm.install_snap"), @@ -136,7 +156,9 @@ def test_config_changed_leader_create_secrets(): assert: snap is configured, migrations run, and service is started. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_database_relation()], secrets=[], leader=True) + state = testing.State( + relations=[_database_relation(), _peer_relation()], secrets=[], leader=True + ) with ( patch("charm.install_snap"), diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py new file mode 100644 index 000000000..1d2928d74 --- /dev/null +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py @@ -0,0 +1,118 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for HAProxy Route Policy state dataclass.""" + +from typing import Any, cast + +import pytest +from pydantic import ValidationError + +from state.policy import HaproxyRoutePolicyInformation + + +def _build_state(allowed_hosts: list[str]) -> HaproxyRoutePolicyInformation: + """Build a valid state instance with overridable allowed hosts.""" + return HaproxyRoutePolicyInformation( + allowed_hosts=cast(list[Any], allowed_hosts), + admin_username="admin", + # Ignore bandit warning as this is for testing. + admin_password="secret", # nosec + secret_key="test-secret-key", + ) + + +@pytest.mark.parametrize( + "allowed_hosts, expected_allowed_hosts", + [ + pytest.param([], [], id="empty-list"), + pytest.param(["example.com"], ["example.com"], id="single-fqdn"), + pytest.param( + ["example.com", "api.example.com"], + ["example.com", "api.example.com"], + id="multiple-fqdn", + ), + pytest.param(["10.0.0.10"], ["10.0.0.10"], id="ipv4-address"), + pytest.param(["2001:db8::1"], ["2001:db8::1"], id="ipv6-address"), + ], +) +def test_haproxy_route_policy_information_init_valid_allowed_hosts( + allowed_hosts: list[str], expected_allowed_hosts: list[str] +): + """ + arrange: prepare valid host inputs. + act: initialize HaproxyRoutePolicyInformation. + assert: initialization succeeds and normalized hosts are stored. + """ + state = _build_state(allowed_hosts) + + assert [str(host) for host in state.allowed_hosts] == expected_allowed_hosts + + +@pytest.mark.parametrize( + "allowed_hosts", + [ + pytest.param(["invalid host"], id="space-in-host"), + pytest.param(["http://example.com"], id="url-not-host"), + pytest.param(["exa_mple.com"], id="underscore-in-label"), + ], +) +def test_haproxy_route_policy_information_init_invalid_allowed_hosts(allowed_hosts: list[str]): + """ + arrange: prepare invalid host inputs. + act: initialize HaproxyRoutePolicyInformation. + assert: pydantic validation error is raised. + """ + with pytest.raises(ValidationError): + _build_state(allowed_hosts) + + +@pytest.mark.parametrize( + "field_name, field_value", + [ + pytest.param("admin_username", None, id="missing-admin-username"), + pytest.param("admin_password", None, id="missing-admin-password"), + pytest.param("secret_key", None, id="missing-secret-key"), + ], +) +def test_haproxy_route_policy_information_init_rejects_none_string_fields( + field_name: str, field_value: None +): + """ + arrange: build state payload with missing required string field. + act: initialize HaproxyRoutePolicyInformation. + assert: pydantic validation error is raised. + """ + payload = { + "allowed_hosts": ["example.com"], + "admin_username": "admin", + # Ignore bandit warning as this is for testing. + "admin_password": "secret", # nosec + "secret_key": "test-secret-key", + } + payload[field_name] = field_value + + with pytest.raises(ValidationError): + HaproxyRoutePolicyInformation(**payload) + + +@pytest.mark.parametrize( + "allowed_hosts, expected", + [ + pytest.param([], {"allowed-hosts": "[]"}, id="empty"), + pytest.param( + ["example.com", "api.example.com"], + {"allowed-hosts": '["example.com", "api.example.com"]'}, + id="multiple-fqdn", + ), + ], +) +def test_allowed_hosts_snap_configuration(allowed_hosts: list[str], expected: dict[str, str]): + """ + arrange: initialize state with valid allowed hosts. + act: read snap configuration property. + assert: allowed-hosts is serialized to expected JSON string. + """ + state = _build_state(allowed_hosts) + + assert state.allowed_hosts_snap_configuration == expected diff --git a/haproxy-route-policy-operator/uv.lock b/haproxy-route-policy-operator/uv.lock index 7a47060b2..314d25858 100644 --- a/haproxy-route-policy-operator/uv.lock +++ b/haproxy-route-policy-operator/uv.lock @@ -463,6 +463,7 @@ dependencies = [ { name = "ops" }, { name = "pydantic" }, { name = "requests" }, + { name = "validators" }, ] [package.dev-dependencies] @@ -503,6 +504,7 @@ requires-dist = [ { name = "ops", specifier = "==3.5.2" }, { name = "pydantic", specifier = ">=2.12.5" }, { name = "requests", specifier = "==2.32.5" }, + { name = "validators", specifier = ">=0.35.0" }, ] [package.metadata.requires-dev] @@ -1565,6 +1567,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] +[[package]] +name = "validators" +version = "0.35.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/66/a435d9ae49850b2f071f7ebd8119dd4e84872b01630d6736761e6e7fd847/validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a", size = 73399, upload-time = "2025-05-01T05:42:06.7Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712, upload-time = "2025-05-01T05:42:04.203Z" }, +] + [[package]] name = "wcwidth" version = "0.6.0" From 75135c3aee4259405479245dfae1841c30b2f30f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 12:22:26 +0200 Subject: [PATCH 133/201] cast to string before dumping to json --- haproxy-route-policy-operator/src/state/policy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index a552c9335..654b80e7e 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -70,7 +70,7 @@ class HaproxyRoutePolicyInformation: def allowed_hosts_snap_configuration(self) -> dict[str, str]: """Return snap configuration keys and values.""" return { - "allowed-hosts": json.dumps(self.allowed_hosts), + "allowed-hosts": json.dumps([str(host) for host in self.allowed_hosts]), } @classmethod From 1d5f17cb55f048708c8fa8bff47c5fddd1c841d8 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 14:41:20 +0200 Subject: [PATCH 134/201] add change artifact --- docs/release-notes/artifacts/pr0423.yaml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0423.yaml diff --git a/docs/release-notes/artifacts/pr0423.yaml b/docs/release-notes/artifacts/pr0423.yaml new file mode 100644 index 000000000..56031316f --- /dev/null +++ b/docs/release-notes/artifacts/pr0423.yaml @@ -0,0 +1,23 @@ +version_schema: 2 + +changes: + - title: Added allowed-hosts configuration support to haproxy-route-policy-operator + author: tphan025 + type: minor + description: > + Added a new `allowed-hosts` charm configuration option for + `haproxy-route-policy-operator` and wired it into snap configuration as a + JSON-encoded `DJANGO_ALLOWED_HOSTS` value. Introduced a dedicated policy + state module to centralize shared operator information (allowed hosts, + admin credentials, secret key), including validation of hostnames/IP + addresses. Updated charm reconcile flow to consume the new state model and + refined leader/non-leader behavior for migration and admin-user updates. + Added unit tests for allowed-hosts validation/serialization and updated + charm unit tests for the new peer/state behavior. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/423 + related_doc: + related_issue: + visibility: public + highlight: false From 95245124ca1c163797d729b5e8165a717be15aff Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 3 Apr 2026 16:07:57 +0200 Subject: [PATCH 135/201] bootstrap lib --- haproxy-route-policy-operator/charmcraft.yaml | 2 + .../lib/charms/haproxy/v2/haproxy_route.py | 1609 +++++++++++++++++ .../v0/haproxy_route_policy.py | 61 + 3 files changed, 1672 insertions(+) create mode 100644 haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py create mode 100644 haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 2e1d324cc..6bab61a00 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -54,6 +54,8 @@ actions: charm-libs: - lib: data_platform_libs.data_interfaces version: "0" + - lib: haproxy.haproxy_route + version: "2" peers: haproxy-route-policy-peer: diff --git a/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py b/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py new file mode 100644 index 000000000..795c815ad --- /dev/null +++ b/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py @@ -0,0 +1,1609 @@ +# pylint: disable=too-many-lines +"""Haproxy-route interface library. + +## Getting Started + +To get started using the library, you just need to fetch the library using `charmcraft`. + +```shell +cd some-charm +charmcraft fetch-lib charms.haproxy.v2.haproxy_route +``` + +In the `metadata.yaml` of the charm, add the following: + +```yaml +requires: + backend: + interface: haproxy-route + limit: 1 +``` + +Then, to initialise the library: + +```python +from charms.haproxy.v2.haproxy_route import HaproxyRouteRequirer + +class SomeCharm(CharmBase): + def __init__(self, *args): + # ... + + # There are 2 ways you can use the requirer implementation: + # 1. To initialize the requirer with parameters: + self.haproxy_route_requirer = HaproxyRouteRequirer(self, + relation_name=, + service=, + ports=, + protocol=, + hosts=, + paths=, + hostname=, + additional_hostnames=, + check_interval=, + check_rise=, + check_fall=, + check_path=, + check_port=, + path_rewrite_expressions=, list of path rewrite expressions, + query_rewrite_expressions=, list of query rewrite expressions, + header_rewrite_expressions=, list of (header_name, rewrite_expression), + load_balancing_algorithm=, defaults to "leastconn", + load_balancing_cookie=, only used when load_balancing_algorithm is cookie + load_balancing_consistent_hashing=, to enable consistent hashing, + defaults to False, + rate_limit_connections_per_minute=, + rate_limit_policy=, + upload_limit=, + download_limit=, + retry_count=, + retry_redispatch=, + deny_paths=, + server_timeout=, + connect_timeout=, + queue_timeout=, + server_maxconn=, + unit_address=, + http_server_close=, + ) + + # 2.To initialize the requirer with no parameters, i.e + # self.haproxy_route_requirer = HaproxyRouteRequirer(self) + # This will simply initialize the requirer class and it won't perfom any action. + + # Afterwards regardless of how you initialized the requirer you can call the + # provide_haproxy_route_requirements method anywhere in your charm to update the requirer data. + # The method takes the same number of parameters as the requirer class. + # provide_haproxy_route_requirements(address=, port=, ...) + + self.framework.observe( + self.framework.on.config_changed, self._on_config_changed + ) + self.framework.observe( + self.haproxy_route_requirer.on.ready, self._on_endpoints_ready + ) + self.framework.observe( + self.haproxy_route_requirer.on.removed, self._on_endpoints_removed + ) + + def _on_config_changed(self, event: ConfigChangedEvent) -> None: + self.haproxy_route_requirer.provide_haproxy_route_requirements(...) + + def _on_endpoints_ready(self, _: EventBase) -> None: + # Handle endpoints ready event + ... + + def _on_endpoints_removed(self, _: EventBase) -> None: + # Handle endpoints removed event + ... + +## Using the library as the provider +The provider charm should expose the interface as shown below: +```yaml +provides: + haproxy-route: + interface: haproxy-route +``` +Note that this interface supports relating to multiple endpoints. + +Then, to initialise the library: +```python +from charms.haproxy.v2.haproxy_route import HaproxyRouteProvider + +class SomeCharm(CharmBase): + self.haproxy_route_provider = HaproxyRouteProvider(self) + self.framework.observe( + self.haproxy_route_provider.on.data_available, self._on_haproxy_route_data_available + ) + + def _on_haproxy_route_data_available(self, event: EventBase) -> None: + data = self.haproxy_route_provider.get_data(self.haproxy_route_provider.relations) + ... +""" + +import json +import logging +from collections import defaultdict +from enum import Enum +from functools import partial +from typing import Annotated, Any, Literal, MutableMapping, Optional, cast + +from ops import CharmBase, ModelError, RelationBrokenEvent +from ops.charm import CharmEvents +from ops.framework import EventBase, EventSource, Object +from ops.model import Relation +from pydantic import ( + AnyHttpUrl, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + IPvAnyAddress, + ValidationError, + field_validator, + model_validator, +) +from pydantic.dataclasses import dataclass +from typing_extensions import Self +from validators import domain + +# The unique Charmhub library identifier, never change it +LIBID = "08b6347482f6455486b5f5bb4dc4e6cf" + +# Increment this major API version when introducing breaking changes +LIBAPI = 2 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 1 + +logger = logging.getLogger(__name__) +HAPROXY_ROUTE_RELATION_NAME = "haproxy-route" +HAPROXY_CONFIG_INVALID_CHARACTERS = "\n\t#\\'\"\r$ " +HAPROXY_EXPR_INVALID_CHARACTERS = "\n" + + +def value_contains_invalid_characters( + invalid_characters: str, value: Optional[str] +) -> Optional[str]: + """Validate if value contains invalid config characters. + + Args: + invalid_characters: String with the list of invalid characters. + value: The value to validate. + + Raises: + ValueError: When value contains invalid characters. + + Returns: + The validated value. + """ + if value is None: + return value + + if [char for char in value if char in invalid_characters]: + raise ValueError(f"Relation data contains invalid character(s) {value}") + return value + + +def valid_domain_with_wildcard(value: str) -> str: + """Validate if value is a valid domain that can include a wildcard. + + The wildcard character (*) can't be at the TLD level, for example *.com is not valid. + This is supported natively by the library ( e.g domain("com") will raise a ValidationError ). + + Raises: + ValueError: When value is not a valid domain. + + Args: + value: The value to validate. + """ + fqdn = value[2:] if value.startswith("*.") else value + if not bool(domain(fqdn)): + raise ValueError(f"Invalid domain: {value}") + return value + + +VALIDSTR = Annotated[ + str, + BeforeValidator(partial(value_contains_invalid_characters, HAPROXY_CONFIG_INVALID_CHARACTERS)), +] +VALIDEXPRSTR = Annotated[ + str, + BeforeValidator(partial(value_contains_invalid_characters, HAPROXY_EXPR_INVALID_CHARACTERS)), +] + + +class DataValidationError(Exception): + """Raised when data validation fails.""" + + +class HaproxyRouteInvalidRelationDataError(Exception): + """Rasied when data validation of the haproxy-route relation fails.""" + + +class _DatabagModel(BaseModel): + """Base databag model. + + Attrs: + model_config: pydantic model configuration. + """ + + model_config = ConfigDict( + # tolerate additional keys in databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, + ) # type: ignore + """Pydantic config.""" + + @classmethod + def load(cls, databag: MutableMapping) -> "_DatabagModel": + """Load this model from a Juju json databag. + + Args: + databag: Databag content. + + Raises: + DataValidationError: When model validation failed. + + Returns: + _DatabagModel: The validated model. + """ + nest_under = cls.model_config.get("_NEST_UNDER") + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.model_fields.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) + except ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.error(str(e), exc_info=True) + raise DataValidationError(msg) from e + + @classmethod + def from_dict(cls, values: dict) -> "_DatabagModel": + """Load this model from a dict. + + Args: + values: Dict values. + + Raises: + DataValidationError: When model validation failed. + + Returns: + _DatabagModel: The validated model. + """ + try: + logger.info("Loading values from dictionary: %s", values) + return cls.model_validate(values) + except ValidationError as e: + msg = f"failed to validate: {values}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump( + self, databag: Optional[MutableMapping] = None, clear: bool = True + ) -> Optional[MutableMapping]: + """Write the contents of this model to Juju databag. + + Args: + databag: The databag to write to. + clear: Whether to clear the databag before writing. + + Returns: + MutableMapping: The databag. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump(mode="json", by_alias=True, exclude_defaults=True) + databag.update({k: json.dumps(v) for k, v in dct.items()}) + return databag + + +class ServerHealthCheck(BaseModel): + """Configuration model for backend server health checks. + + Attributes: + interval: Number of seconds between consecutive health check attempts. + rise: Number of consecutive successful health checks required for up. + fall: Number of consecutive failed health checks required for DOWN. + path: List of URL paths to use for HTTP health checks. + port: Customize port value for http-check. + """ + + interval: Optional[int] = Field( + description="The interval (in seconds) between health checks.", default=None + ) + rise: Optional[int] = Field( + description="How many successful health checks before server is considered up.", + default=None, + ) + fall: Optional[int] = Field( + description="How many failed health checks before server is considered down.", default=None + ) + path: Optional[VALIDSTR] = Field(description="The health check path.", default=None) + port: Optional[int] = Field(description="The health check port.", default=None) + + @model_validator(mode="after") + def check_all_required_fields_set(self) -> Self: + """Check that all required fields for health check are set. + + Raises: + ValueError: When validation fails. + + Returns: + The validated model. + """ + if not bool(self.interval) == bool(self.rise) == bool(self.fall): + raise ValueError("All three of interval, rise and fall must be set.") + return self + + +# tarpit is not yet implemented +class RateLimitPolicy(Enum): + """Enum of possible rate limiting policies. + + Attrs: + DENY: deny a client's HTTP request to return a 403 Forbidden error. + REJECT: closes the connection immediately without sending a response. + SILENT: disconnects immediately without notifying the client + that the connection has been closed. + """ + + DENY = "deny" + REJECT = "reject" + SILENT = "silent-drop" + + +class RateLimit(BaseModel): + """Configuration model for connection rate limiting. + + Attributes: + connections_per_minute: Number of connections allowed per minute for a client. + policy: Action to take when the rate limit is exceeded. + """ + + connections_per_minute: int = Field(description="How many connections are allowed per minute.") + policy: RateLimitPolicy = Field( + description="Configure the rate limit policy.", default=RateLimitPolicy.DENY + ) + + +class LoadBalancingAlgorithm(Enum): + """Enum of possible http_route types. + + Attrs: + LEASTCONN: The server with the lowest number of connections receives the connection. + SRCIP: Load balance using the hash of The source IP address. + ROUNDROBIN: Each server is used in turns, according to their weights. + COOKIE: Load balance using hash req.cookie(clientid). + """ + + LEASTCONN = "leastconn" + SRCIP = "source" + ROUNDROBIN = "roundrobin" + COOKIE = "cookie" + + +class LoadBalancingConfiguration(BaseModel): + """Configuration model for load balancing. + + Attributes: + algorithm: Algorithm to use for load balancing. + cookie: Cookie name to use when algorithm is set to cookie. + consistent_hashing: Use consistent hashing to avoid redirection + when servers are added/removed. + """ + + algorithm: LoadBalancingAlgorithm = Field( + description="Configure the load balancing algorithm for the service.", + default=LoadBalancingAlgorithm.LEASTCONN, + ) + cookie: Optional[VALIDSTR] = Field( + description="Only used when algorithm is COOKIE. Define the cookie to load balance on.", + default=None, + ) + # Note: Later when the generic LoadBalancingAlgorithm.HASH is implemented this attribute + # will also apply under that mode. + consistent_hashing: bool = Field( + description=( + "Only used when the `algorithm` is SRCIP or COOKIE. " + "Use consistent hashing to avoid redirection when servers are added/removed. " + "Default is False as it usually does not give a balanced distribution." + ), + default=False, + ) + + @model_validator(mode="after") + def validate_attributes(self) -> Self: + """Check that algorithm-specific configs are only set with their respective algorithm. + + Raises: + ValueError: When validation fails in one of these cases: + 1. self.cookie is not None when self.algorithm != COOKIE + 2. self.consistent_hashing is True when algorithm is neither COOKIE nor SRCIP + + Returns: + The validated model. + """ + if self.cookie is not None and self.algorithm != LoadBalancingAlgorithm.COOKIE: + raise ValueError("cookie only applies when algorithm is COOKIE.") + + if self.consistent_hashing and self.algorithm not in [ + LoadBalancingAlgorithm.COOKIE, + LoadBalancingAlgorithm.SRCIP, + ]: + raise ValueError("Consistent hashing only applies when algorithm is COOKIE or SRCIP.") + return self + + +class BandwidthLimit(BaseModel): + """Configuration model for bandwidth rate limiting. + + Attributes: + upload: Limit upload speed (bytes per second). + download: Limit download speed (bytes per second). + """ + + upload: Optional[int] = Field(description="Upload limit (bytes per seconds).", default=None) + download: Optional[int] = Field( + description="Download limit (bytes per seconds).", default=None + ) + + +# retry-on is not yet implemented +class Retry(BaseModel): + """Configuration model for retry. + + Attributes: + count: How many times should a request retry. + redispatch: Whether to redispatch failed requests to another server. + """ + + count: int = Field(description="How many times should a request retry.") + redispatch: bool = Field( + description="Whether to redispatch failed requests to another server.", default=False + ) + + +class TimeoutConfiguration(BaseModel): + """Configuration model for timeout. + + Attributes: + server: Timeout for requests from haproxy to backend servers. + connect: Timeout for client requests to haproxy. + queue: Timeout for requests waiting in the queue after server-maxconn is reached. + """ + + server: int = Field( + description="Timeout (in seconds) for requests from haproxy to backend servers.", + default=60, + ) + connect: int = Field( + description="Timeout (in seconds) for client requests to haproxy.", default=60 + ) + queue: int = Field( + description="Timeout (in seconds) for requests in the queue.", + default=60, + ) + + +class HaproxyRewriteMethod(Enum): + """Enum of possible HTTP rewrite methods. + + Attrs: + SET_PATH: The server with the lowest number of connections receives the connection. + SET_QUERY: Load balance using the hash of The source IP address. + SET_HEADER: Each server is used in turns, according to their weights. + """ + + SET_PATH = "set-path" + SET_QUERY = "set-query" + SET_HEADER = "set-header" + + +class RewriteConfiguration(BaseModel): + """Configuration model for HTTP rewrite. + + Attributes: + method: Which rewrite method to apply.One of set-path, set-query, set-header. + expression: Regular expression to use with the rewrite method. + header: The name of the header to rewrited. + """ + + method: HaproxyRewriteMethod = Field( + description="Which rewrite method to apply.One of set-path, set-query, set-header." + ) + expression: VALIDEXPRSTR = Field( + description="Regular expression to use with the rewrite method." + ) + header: Optional[VALIDSTR] = Field( + description="The name of the header to rewrite.", default=None + ) + + +class RequirerApplicationData(_DatabagModel): + """Configuration model for HAProxy route requirer application data. + + Attributes: + service: Name of the service requesting HAProxy routing. + ports: List of port numbers on which the service is listening. + protocol: The protocol that the service speaks. + hosts: List of backend server addresses. + paths: List of URL paths to route to this service. Defaults to an empty list. + hostname: Optional: The hostname of this service. + additional_hostnames: List of additional hostnames of this service. + Defaults to an empty list. + rewrites: List of RewriteConfiguration objects defining path, query, or header + rewrite rules. + check: ServerHealthCheck configuration for monitoring backend health. + load_balancing: Configuration for the load balancing strategy. + rate_limit: Optional configuration for limiting connection rates. + bandwidth_limit: Optional configuration for limiting upload and download bandwidth. + retry: Optional configuration for request retry behavior. + deny_paths: List of URL paths that should not be routed to the backend. + timeout: Configuration for server, client, and queue timeouts. + server_maxconn: Optional maximum number of connections per server. + http_server_close: Configure server close after request. + allow_http: Whether to allow HTTP traffic in addition to HTTPS. Defaults to False. + Warning: enabling HTTP is a security risk, make sure you apply the necessary precautions. + external_grpc_port: Optional external gRPC port. + """ + + service: VALIDSTR = Field(description="The name of the service.") + ports: list[int] = Field(description="The list of ports listening for this service.") + protocol: Literal["http", "https"] = Field( + description="The protocol that the service speaks.", + default="http", + ) + hosts: list[IPvAnyAddress] = Field( + description="The list of backend server addresses. Currently only support IP addresses.", + default=[], + ) + paths: list[VALIDSTR] = Field( + description="The list of paths to route to this service.", default=[] + ) + hostname: Optional[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = Field( + description="Hostname of this service.", default=None + ) + additional_hostnames: list[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = ( + Field(description="The list of additional hostnames of this service.", default=[]) + ) + rewrites: list[RewriteConfiguration] = Field( + description="The list of path rewrite rules.", default=[] + ) + check: Optional[ServerHealthCheck] = Field( + description="Configure health check for the service.", + default=None, + ) + load_balancing: LoadBalancingConfiguration = Field( + description="Configure loadbalancing.", default=LoadBalancingConfiguration() + ) + rate_limit: Optional[RateLimit] = Field( + description="Configure rate limit for the service.", default=None + ) + bandwidth_limit: BandwidthLimit = Field( + description="Configure bandwidth limit for the service.", default=BandwidthLimit() + ) + retry: Optional[Retry] = Field( + description="Configure retry for incoming requests.", default=None + ) + deny_paths: list[VALIDSTR] = Field( + description="Configure path that should not be routed to the backend", default=[] + ) + timeout: TimeoutConfiguration = Field( + description="Configure timeout", + default=TimeoutConfiguration(), + ) + server_maxconn: Optional[int] = Field( + description="Configure maximum connection per server", default=None + ) + http_server_close: bool = Field( + description="Configure server close after request", default=False + ) + allow_http: bool = Field( + description="Whether to allow HTTP traffic in addition to HTTPS.", default=False + ) + external_grpc_port: int | None = Field( + description="Optional external gRPC port.", default=None, gt=0, le=65535 + ) + + @field_validator("load_balancing") + @classmethod + def validate_load_balancing_configuration( + cls, configuration: LoadBalancingConfiguration + ) -> LoadBalancingConfiguration: + """Validate the parsed load balancing configuration. + + Args: + configuration: The configuration to validate. + + Raises: + ValueError: When cookie is not set under COOKIE load balancing mode. + + Returns: + LoadBalancingConfiguration: The validated configuration. + """ + if configuration.algorithm == LoadBalancingAlgorithm.COOKIE and not configuration.cookie: + raise ValueError("cookie must be set if load balacing algorithm is COOKIE.") + return configuration + + @field_validator("rewrites") + @classmethod + def validate_rewrites(cls, rewrites: list[RewriteConfiguration]) -> list[RewriteConfiguration]: + """Validate the parsed list of rewrite configurations. + + Args: + rewrites: The configurations to validate. + + Raises: + ValueError: When header is not set under SET_HEADER rewrite method. + + Returns: + list[RewriteConfiguration]: The validated configurations. + """ + for rewrite in rewrites: + if rewrite.method == HaproxyRewriteMethod.SET_HEADER and not rewrite.method: + raise ValueError("header must be set if rewrite method is SET_HEADER.") + return rewrites + + +class HaproxyRouteProviderAppData(_DatabagModel): + """haproxy-route provider databag schema. + + Attributes: + endpoints: The list of proxied endpoints that maps to the backend. + """ + + endpoints: list[AnyHttpUrl] + + +class RequirerUnitData(_DatabagModel): + """haproxy-route requirer unit data. + + Attributes: + address: IP address of the unit. + """ + + address: IPvAnyAddress = Field(description="IP address of the unit.") + + +@dataclass +class HaproxyRouteRequirerData: + """haproxy-route requirer data. + + Attributes: + relation_id: Id of the relation. + application_data: Application data. + units_data: Units data + """ + + relation_id: int + application_data: RequirerApplicationData + units_data: list[RequirerUnitData] + + +@dataclass +class HaproxyRouteRequirersData: + """haproxy-route requirers data. + + Attributes: + requirers_data: List of requirer data. + relation_ids_with_invalid_data: Set of relation ids that contains invalid data. + """ + + requirers_data: list[HaproxyRouteRequirerData] + relation_ids_with_invalid_data: set[int] + + @model_validator(mode="after") + def check_services_unique(self) -> Self: + """Check that requirers define unique services. + + Raises: + DataValidationError: When requirers declared duplicate services. + + Returns: + The validated model. + """ + services = [ + requirer_data.application_data.service for requirer_data in self.requirers_data + ] + if len(services) != len(set(services)): + raise DataValidationError("Services declaration by requirers must be unique.") + + return self + + @model_validator(mode="after") + def check_external_grpc_port_unique(self) -> Self: + """Check that external gRPC ports are unique across requirer applications. + If multiple requirer applications declare the same external gRPC port, + their relation ids are added to relation_ids_with_invalid_data. + + Returns: + The validated model. + """ + relation_ids_per_port: dict[int, list[int]] = defaultdict(list[int]) + for requirer_data in self.requirers_data: + if requirer_data.application_data.external_grpc_port: + relation_ids_per_port[requirer_data.application_data.external_grpc_port].append( + requirer_data.relation_id + ) + + self.relation_ids_with_invalid_data.update( + relation_id + for relation_ids in relation_ids_per_port.values() + for relation_id in relation_ids + if len(relation_ids) > 1 + ) + return self + + @model_validator(mode="after") + def check_grpc_requires_https(self) -> Self: + """Check that backends with external_grpc_port use https protocol. + If not, their relation ids are added to relation_ids_with_invalid_data. + + Returns: + Self: The validated model + """ + for requirer_data in self.requirers_data: + if all( + [ + requirer_data.application_data.external_grpc_port is not None, + requirer_data.application_data.protocol != "https", + requirer_data.relation_id, + ] + ): + self.relation_ids_with_invalid_data.add(requirer_data.relation_id) + return self + + +class HaproxyRouteDataAvailableEvent(EventBase): + """HaproxyRouteDataAvailableEvent custom event. + + This event indicates that the requirers data are available. + """ + + +class HaproxyRouteDataRemovedEvent(EventBase): + """HaproxyRouteDataRemovedEvent custom event. + + This event indicates that one of the endpoints was removed. + """ + + +class HaproxyRouteProviderEvents(CharmEvents): + """List of events that the TLS Certificates requirer charm can leverage. + + Attributes: + data_available: This event indicates that + the haproxy-route endpoints are available. + data_removed: This event indicates that one of the endpoints was removed. + """ + + data_available = EventSource(HaproxyRouteDataAvailableEvent) + data_removed = EventSource(HaproxyRouteDataRemovedEvent) + + +class HaproxyRouteProvider(Object): + """Haproxy-route interface provider implementation. + + Attributes: + on: Custom events of the provider. + relations: Related appliations. + """ + + on = HaproxyRouteProviderEvents() + + def __init__( + self, + charm: CharmBase, + relation_name: str = HAPROXY_ROUTE_RELATION_NAME, + raise_on_validation_error: bool = False, + ) -> None: + """Initialize the HaproxyRouteProvider. + + Args: + charm: The charm that is instantiating the library. + relation_name: The name of the relation. + raise_on_validation_error: Whether the library should raise + HaproxyRouteInvalidRelationDataError when requirer data validation fails. + If this is set to True the provider charm needs to also catch and handle the + thrown exception. + """ + super().__init__(charm, relation_name) + + self._relation_name = relation_name + self.charm = charm + self.raise_on_validation_error = raise_on_validation_error + on = self.charm.on + self.framework.observe(on[self._relation_name].relation_created, self._configure) + self.framework.observe(on[self._relation_name].relation_changed, self._configure) + self.framework.observe(on[self._relation_name].relation_broken, self._on_endpoint_removed) + self.framework.observe( + on[self._relation_name].relation_departed, self._on_endpoint_removed + ) + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this endpoint.""" + return list(self.charm.model.relations[self._relation_name]) + + def _configure(self, _event: EventBase) -> None: + """Handle relation events.""" + if relations := self.relations: + # Only for data validation + _ = self.get_data(relations) + self.on.data_available.emit() + + def _on_endpoint_removed(self, _: EventBase) -> None: + """Handle relation broken/departed events.""" + self.on.data_removed.emit() + + def get_data(self, relations: list[Relation]) -> HaproxyRouteRequirersData: + """Fetch requirer data. + + Args: + relations: A list of Relation instances to fetch data from. + + Raises: + HaproxyRouteInvalidRelationDataError: When requirer data validation fails. + + Returns: + HaproxyRouteRequirersData: Validated data from all haproxy-route requirers. + """ + requirers_data: list[HaproxyRouteRequirerData] = [] + relation_ids_with_invalid_data: set[int] = set() + for relation in relations: + try: + application_data = self._get_requirer_application_data(relation) + units_data = self._get_requirer_units_data(relation) + haproxy_route_requirer_data = HaproxyRouteRequirerData( + application_data=application_data, + units_data=units_data, + relation_id=relation.id, + ) + requirers_data.append(haproxy_route_requirer_data) + except DataValidationError as exc: + if self.raise_on_validation_error: + logger.error( + "haproxy-route data validation failed for relation %s: %s", + relation, + str(exc), + ) + raise HaproxyRouteInvalidRelationDataError( + f"haproxy-route data validation failed for relation: {relation}" + ) from exc + relation_ids_with_invalid_data.add(relation.id) + continue + return HaproxyRouteRequirersData( + requirers_data=requirers_data, + relation_ids_with_invalid_data=relation_ids_with_invalid_data, + ) + + def _get_requirer_units_data(self, relation: Relation) -> list[RequirerUnitData]: + """Fetch and validate the requirer's units data. + + Args: + relation: The relation to fetch unit data from. + + Raises: + DataValidationError: When unit data validation fails. + + Returns: + list[RequirerUnitData]: List of validated unit data from the requirer. + """ + requirer_units_data: list[RequirerUnitData] = [] + + for unit in relation.units: + databag = relation.data.get(unit) + if not databag: + logger.error( + "Requirer unit data does not exist even though the unit is still present." + ) + continue + try: + data = cast(RequirerUnitData, RequirerUnitData.load(databag)) + requirer_units_data.append(data) + except DataValidationError: + logger.error("Invalid requirer application data for %s", unit) + raise + return requirer_units_data + + def _get_requirer_application_data(self, relation: Relation) -> RequirerApplicationData: + """Fetch and validate the requirer's application databag. + + Args: + relation: The relation to fetch application data from. + + Raises: + DataValidationError: When requirer application data validation fails. + + Returns: + RequirerApplicationData: Validated application data from the requirer. + """ + try: + return cast( + RequirerApplicationData, RequirerApplicationData.load(relation.data[relation.app]) + ) + except DataValidationError: + logger.error("Invalid requirer application data for %s", relation.app.name) + raise + + def publish_proxied_endpoints(self, endpoints: list[str], relation: Relation) -> None: + """Publish to the app databag the proxied endpoints. + + Args: + endpoints: The list of proxied endpoints to publish. + relation: The relation with the requirer application. + """ + HaproxyRouteProviderAppData(endpoints=[cast(AnyHttpUrl, e) for e in endpoints]).dump( + relation.data[self.charm.app], clear=True + ) + + +class HaproxyRouteEnpointsReadyEvent(EventBase): + """HaproxyRouteEnpointsReadyEvent custom event.""" + + +class HaproxyRouteEndpointsRemovedEvent(EventBase): + """HaproxyRouteEndpointsRemovedEvent custom event.""" + + +class HaproxyRouteRequirerEvents(CharmEvents): + """List of events that the TLS Certificates requirer charm can leverage. + + Attributes: + ready: when the provider proxied endpoints are ready. + removed: when the provider + """ + + ready = EventSource(HaproxyRouteEnpointsReadyEvent) + removed = EventSource(HaproxyRouteEndpointsRemovedEvent) + + +class HaproxyRouteRequirer(Object): + """haproxy-route interface requirer implementation. + + Attributes: + on: Custom events of the requirer. + """ + + on = HaproxyRouteRequirerEvents() + + # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals + def __init__( + self, + charm: CharmBase, + relation_name: str, + service: Optional[str] = None, + ports: Optional[list[int]] = None, + protocol: Literal["http", "https"] = "http", + hosts: Optional[list[IPvAnyAddress]] = None, + paths: Optional[list[str]] = None, + hostname: Optional[str] = None, + additional_hostnames: Optional[list[str]] = None, + check_interval: Optional[int] = None, + check_rise: Optional[int] = None, + check_fall: Optional[int] = None, + check_path: Optional[str] = None, + check_port: Optional[int] = None, + path_rewrite_expressions: Optional[list[str]] = None, + query_rewrite_expressions: Optional[list[str]] = None, + header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, + load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, + load_balancing_cookie: Optional[str] = None, + load_balancing_consistent_hashing: bool = False, + rate_limit_connections_per_minute: Optional[int] = None, + rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, + upload_limit: Optional[int] = None, + download_limit: Optional[int] = None, + retry_count: Optional[int] = None, + retry_redispatch: bool = False, + deny_paths: Optional[list[str]] = None, + server_timeout: int = 60, + connect_timeout: int = 60, + queue_timeout: int = 60, + server_maxconn: Optional[int] = None, + unit_address: Optional[str] = None, + http_server_close: bool = False, + allow_http: bool = False, + ) -> None: + """Initialize the HaproxyRouteRequirer. + + Args: + charm: The charm that is instantiating the library. + relation_name: The name of the relation to bind to. + service: The name of the service to route traffic to. + ports: List of ports the service is listening on. + protocol: The protocol that the service speaks. + hosts: List of backend server addresses. Currently only support IP addresses. + paths: List of URL paths to route to this service. + hostname: Hostname of this service. + additional_hostnames: Additional hostnames of this service. + check_interval: Interval between health checks in seconds. + check_rise: Number of successful health checks before server is considered up. + check_fall: Number of failed health checks before server is considered down. + check_path: The path to use for server health checks. + check_port: The port to use for http-check. + path_rewrite_expressions: List of regex expressions for path rewrites. + query_rewrite_expressions: List of regex expressions for query rewrites. + header_rewrite_expressions: List of tuples containing header name + and rewrite expression. + load_balancing_algorithm: Algorithm to use for load balancing. + load_balancing_cookie: Cookie name to use when algorithm is set to cookie. + load_balancing_consistent_hashing: Whether to use consistent hashing. + rate_limit_connections_per_minute: Maximum connections allowed per minute. + rate_limit_policy: Policy to apply when rate limit is reached. + upload_limit: Maximum upload bandwidth in bytes per second. + download_limit: Maximum download bandwidth in bytes per second. + retry_count: Number of times to retry failed requests. + retry_redispatch: Whether to redispatch failed requests to another server. + deny_paths: List of paths that should not be routed to the backend. + server_timeout: Timeout for requests from haproxy to backend servers in seconds. + connect_timeout: Timeout for client requests to haproxy in seconds. + queue_timeout: Timeout for requests waiting in queue in seconds. + server_maxconn: Maximum connections per server. + unit_address: IP address of the unit (if not provided, will use binding address). + http_server_close: Configure server close after request. + allow_http: Whether to allow HTTP traffic in addition to HTTPS. + Warning: enabling HTTP is a security risk, + make sure you apply the necessary precautions. + """ + super().__init__(charm, relation_name) + + self._relation_name = relation_name + self.relation = self.model.get_relation(self._relation_name) + self.charm = charm + self.app = self.charm.app + + # build the full application data + self._application_data = self._generate_application_data( + service, + ports, + protocol, + hosts, + paths, + hostname, + additional_hostnames, + check_interval, + check_rise, + check_fall, + check_path, + check_port, + path_rewrite_expressions, + query_rewrite_expressions, + header_rewrite_expressions, + load_balancing_algorithm, + load_balancing_cookie, + load_balancing_consistent_hashing, + rate_limit_connections_per_minute, + rate_limit_policy, + upload_limit, + download_limit, + retry_count, + retry_redispatch, + deny_paths, + server_timeout, + connect_timeout, + queue_timeout, + server_maxconn, + http_server_close, + allow_http, + ) + self._unit_address = unit_address + + on = self.charm.on + self.framework.observe(on[self._relation_name].relation_created, self._configure) + self.framework.observe(on[self._relation_name].relation_changed, self._configure) + self.framework.observe(on[self._relation_name].relation_broken, self._on_relation_broken) + + def _configure(self, _: EventBase) -> None: + """Handle relation events.""" + self.update_relation_data() + if self.relation and self.get_proxied_endpoints(): + # This event is only emitted when the provider databag changes + # which only happens when relevant changes happened + # Additionally this event is purely informational and it's up to the requirer to + # fetch the proxied endpoints in their code using get_proxied_endpoints + self.on.ready.emit() + + def _on_relation_broken(self, _: RelationBrokenEvent) -> None: + """Handle relation broken event.""" + self.on.removed.emit() + + # pylint: disable=too-many-arguments,too-many-positional-arguments + def provide_haproxy_route_requirements( + self, + service: str, + ports: list[int], + protocol: Literal["http", "https"] = "http", + hosts: Optional[list[IPvAnyAddress]] = None, + paths: Optional[list[str]] = None, + hostname: Optional[str] = None, + additional_hostnames: Optional[list[str]] = None, + check_interval: Optional[int] = None, + check_rise: Optional[int] = None, + check_fall: Optional[int] = None, + check_path: Optional[str] = None, + check_port: Optional[int] = None, + path_rewrite_expressions: Optional[list[str]] = None, + query_rewrite_expressions: Optional[list[str]] = None, + header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, + load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, + load_balancing_cookie: Optional[str] = None, + load_balancing_consistent_hashing: bool = False, + rate_limit_connections_per_minute: Optional[int] = None, + rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, + upload_limit: Optional[int] = None, + download_limit: Optional[int] = None, + retry_count: Optional[int] = None, + retry_redispatch: bool = False, + deny_paths: Optional[list[str]] = None, + server_timeout: int = 60, + connect_timeout: int = 60, + queue_timeout: int = 60, + server_maxconn: Optional[int] = None, + unit_address: Optional[str] = None, + http_server_close: bool = False, + allow_http: bool = False, + external_grpc_port: Optional[int] = None, + ) -> None: + """Update haproxy-route requirements data in the relation. + + Args: + service: The name of the service to route traffic to. + ports: List of ports the service is listening on. + protocol: The protocol that the serive speaks, deafults to "http". + hosts: List of backend server addresses. Currently only support IP addresses. + paths: List of URL paths to route to this service. + hostname: Hostname of this service. + additional_hostnames: Additional hostnames of this service. + check_interval: Interval between health checks in seconds. + check_rise: Number of successful health checks before server is considered up. + check_fall: Number of failed health checks before server is considered down. + check_path: The path to use for server health checks. + check_port: The port to use for http-check. + path_rewrite_expressions: List of regex expressions for path rewrites. + query_rewrite_expressions: List of regex expressions for query rewrites. + header_rewrite_expressions: List of tuples containing header name + and rewrite expression. + load_balancing_algorithm: Algorithm to use for load balancing. + load_balancing_cookie: Cookie name to use when algorithm is set to cookie. + load_balancing_consistent_hashing: Whether to use consistent hashing. + rate_limit_connections_per_minute: Maximum connections allowed per minute. + rate_limit_policy: Policy to apply when rate limit is reached. + upload_limit: Maximum upload bandwidth in bytes per second. + download_limit: Maximum download bandwidth in bytes per second. + retry_count: Number of times to retry failed requests. + retry_redispatch: Whether to redispatch failed requests to another server. + deny_paths: List of paths that should not be routed to the backend. + server_timeout: Timeout for requests from haproxy to backend servers in seconds. + connect_timeout: Timeout for client requests to haproxy in seconds. + queue_timeout: Timeout for requests waiting in queue in seconds. + server_maxconn: Maximum connections per server. + unit_address: IP address of the unit (if not provided, will use binding address). + http_server_close: Configure server close after request. + allow_http: Whether to allow HTTP traffic in addition to HTTPS. + Warning: enabling HTTP is a security risk, + make sure you apply the necessary precautions. + external_grpc_port: Optional external gRPC port. + """ + self._unit_address = unit_address + self._application_data = self._generate_application_data( + service, + ports, + protocol, + hosts, + paths, + hostname, + additional_hostnames, + check_interval, + check_rise, + check_fall, + check_path, + check_port, + path_rewrite_expressions, + query_rewrite_expressions, + header_rewrite_expressions, + load_balancing_algorithm, + load_balancing_cookie, + load_balancing_consistent_hashing, + rate_limit_connections_per_minute, + rate_limit_policy, + upload_limit, + download_limit, + retry_count, + retry_redispatch, + deny_paths, + server_timeout, + connect_timeout, + queue_timeout, + server_maxconn, + http_server_close, + allow_http, + external_grpc_port, + ) + self.update_relation_data() + + # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals + def _generate_application_data( # noqa: C901 + self, + service: Optional[str] = None, + ports: Optional[list[int]] = None, + protocol: Literal["http", "https"] = "http", + hosts: Optional[list[IPvAnyAddress]] = None, + paths: Optional[list[str]] = None, + hostname: Optional[str] = None, + additional_hostnames: Optional[list[str]] = None, + check_interval: Optional[int] = None, + check_rise: Optional[int] = None, + check_fall: Optional[int] = None, + check_path: Optional[str] = None, + check_port: Optional[int] = None, + path_rewrite_expressions: Optional[list[str]] = None, + query_rewrite_expressions: Optional[list[str]] = None, + header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, + load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, + load_balancing_cookie: Optional[str] = None, + load_balancing_consistent_hashing: bool = False, + rate_limit_connections_per_minute: Optional[int] = None, + rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, + upload_limit: Optional[int] = None, + download_limit: Optional[int] = None, + retry_count: Optional[int] = None, + retry_redispatch: bool = False, + deny_paths: Optional[list[str]] = None, + server_timeout: int = 60, + connect_timeout: int = 60, + queue_timeout: int = 60, + server_maxconn: Optional[int] = None, + http_server_close: bool = False, + allow_http: bool = False, + external_grpc_port: Optional[int] = None, + ) -> dict[str, Any]: + """Generate the complete application data structure. + + Args: + service: The name of the service to route traffic to. + ports: List of ports the service is listening on. + protocol: The protocol that the service speaks. + hosts: List of backend server addresses. Currently only support IP addresses. + paths: List of URL paths to route to this service. + hostname: Hostname of this service. + additional_hostnames: Additional hostnames of this service. + check_interval: Interval between health checks in seconds. + check_rise: Number of successful health checks before server is considered up. + check_fall: Number of failed health checks before server is considered down. + check_path: The path to use for server health checks. + check_port: The port to use for http-check. + path_rewrite_expressions: List of regex expressions for path rewrites. + query_rewrite_expressions: List of regex expressions for query rewrites. + header_rewrite_expressions: List of tuples containing header name and + rewrite expression. + load_balancing_algorithm: Algorithm to use for load balancing. + load_balancing_cookie: Cookie name to use when algorithm is set to cookie. + load_balancing_consistent_hashing: Whether to use consistent hashing. + rate_limit_connections_per_minute: Maximum connections allowed per minute. + rate_limit_policy: Policy to apply when rate limit is reached. + upload_limit: Maximum upload bandwidth in bytes per second. + download_limit: Maximum download bandwidth in bytes per second. + retry_count: Number of times to retry failed requests. + retry_redispatch: Whether to redispatch failed requests to another server. + deny_paths: List of paths that should not be routed to the backend. + server_timeout: Timeout for requests from haproxy to backend servers in seconds. + connect_timeout: Timeout for client requests to haproxy in seconds. + queue_timeout: Timeout for requests waiting in queue in seconds. + server_maxconn: Maximum connections per server. + http_server_close: Configure server close after request. + allow_http: Whether to allow HTTP traffic in addition to HTTPS. + Warning: enabling HTTP is a security risk, + make sure you apply the necessary precautions. + external_grpc_port: Optional external gRPC port. + + Returns: + dict: A dictionary containing the complete application data structure. + """ + # Apply default value to list parameters to avoid problems with mutable default args. + if not ports: + ports = [] + if not hosts: + hosts = [] + if not paths: + paths = [] + if not additional_hostnames: + additional_hostnames = [] + if not path_rewrite_expressions: + path_rewrite_expressions = [] + if not query_rewrite_expressions: + query_rewrite_expressions = [] + if not header_rewrite_expressions: + header_rewrite_expressions = [] + if not deny_paths: + deny_paths = [] + + application_data: dict[str, Any] = { + "service": service, + "ports": ports, + "protocol": protocol, + "hosts": hosts, + "paths": paths, + "hostname": hostname, + "additional_hostnames": additional_hostnames, + "load_balancing": { + "algorithm": load_balancing_algorithm, + "cookie": load_balancing_cookie, + "consistent_hashing": load_balancing_consistent_hashing, + }, + "timeout": { + "server": server_timeout, + "connect": connect_timeout, + "queue": queue_timeout, + }, + "bandwidth_limit": { + "download": download_limit, + "upload": upload_limit, + }, + "deny_paths": deny_paths, + "server_maxconn": server_maxconn, + "rewrites": self._generate_rewrite_configuration( + path_rewrite_expressions, + query_rewrite_expressions, + header_rewrite_expressions, + ), + "http_server_close": http_server_close, + "allow_http": allow_http, + "external_grpc_port": external_grpc_port, + } + + if allow_http: + logger.warning( + "HTTP traffic is allowed alongside HTTPS. " + "This is a security risk, make sure you apply the necessary precautions." + ) + + if check := self._generate_server_healthcheck_configuration( + check_interval, check_rise, check_fall, check_path, check_port + ): + application_data["check"] = check + + if rate_limit := self._generate_rate_limit_configuration( + rate_limit_connections_per_minute, rate_limit_policy + ): + application_data["rate_limit"] = rate_limit + + if retry := self._generate_retry_configuration(retry_count, retry_redispatch): + application_data["retry"] = retry + return application_data + + def _generate_server_healthcheck_configuration( + self, + interval: Optional[int], + rise: Optional[int], + fall: Optional[int], + path: Optional[str], + port: Optional[int], + ) -> dict[str, int | Optional[str]]: + """Generate configuration for server health checks. + + Args: + interval: Time between health checks in seconds. + rise: Number of successful checks before marking server as up. + fall: Number of failed checks before marking server as down. + path: The path to use for health checks. + port: The port to use for http-check. + + Returns: + dict[str, int | Optional[str]]: Health check configuration dictionary. + """ + server_healthcheck_configuration: dict[str, int | Optional[str]] = {} + if interval and rise and fall: + server_healthcheck_configuration = { + "interval": interval, + "rise": rise, + "fall": fall, + "path": path, + "port": port, + } + return server_healthcheck_configuration + + def _generate_rewrite_configuration( + self, + path_rewrite_expressions: list[str], + query_rewrite_expressions: list[str], + header_rewrite_expressions: list[tuple[str, str]], + ) -> list[dict[str, str | HaproxyRewriteMethod]]: + """Generate rewrite configuration from provided expressions. + + Args: + path_rewrite_expressions: List of path rewrite expressions. + query_rewrite_expressions: List of query rewrite expressions. + header_rewrite_expressions: List of header name and expression tuples. + + Returns: + list[dict[str, str]]: List of generated rewrite configurations. + """ + # rewrite configuration + rewrite_configurations: list[dict[str, str | HaproxyRewriteMethod]] = [] + for expression in path_rewrite_expressions: + rewrite_configurations.append( + {"method": HaproxyRewriteMethod.SET_PATH, "expression": expression} + ) + for expression in query_rewrite_expressions: + rewrite_configurations.append( + {"method": HaproxyRewriteMethod.SET_QUERY, "expression": expression} + ) + for header, expression in header_rewrite_expressions: + rewrite_configurations.append( + { + "method": HaproxyRewriteMethod.SET_HEADER, + "expression": expression, + "header": header, + } + ) + return rewrite_configurations + + def _generate_rate_limit_configuration( + self, rate_limit_connections_per_minute: Optional[int], rate_limit_policy: RateLimitPolicy + ) -> dict[str, Any]: + """Generate rate limit configuration. + + Args: + rate_limit_connections_per_minute: Maximum connections allowed per minute. + rate_limit_policy: Policy to apply when rate limit is reached. + + Returns: + dict[str, Any]: Rate limit configuration, or empty dict if no limits are set. + """ + rate_limit_configuration = {} + if rate_limit_connections_per_minute: + rate_limit_configuration = { + "connections_per_minute": rate_limit_connections_per_minute, + "policy": rate_limit_policy, + } + return rate_limit_configuration + + def _generate_retry_configuration( + self, count: Optional[int], redispatch: bool + ) -> dict[str, Any]: + """Generate retry configuration. + + Args: + count: Number of times to retry failed requests. + redispatch: Whether to redispatch failed requests to another server. + + Returns: + dict[str, Any]: Retry configuration dictionary, or empty dict if retry not configured. + """ + retry_configuration = {} + if count: + retry_configuration = { + "count": count, + "redispatch": redispatch, + } + return retry_configuration + + def update_relation_data(self) -> None: + """Update both application and unit data in the relation.""" + if not self._application_data.get("service") and not self._application_data.get("ports"): + logger.warning("Required field(s) are missing, skipping update of the relation data.") + return + + if relation := self.relation: + self._update_application_data(relation) + self._update_unit_data(relation) + + def _update_application_data(self, relation: Relation) -> None: + """Update application data in the relation databag. + + Args: + relation: The relation instance. + """ + if self.charm.unit.is_leader(): + application_data = self._prepare_application_data() + application_data.dump(relation.data[self.app], clear=True) + + def _update_unit_data(self, relation: Relation) -> None: + """Prepare and update the unit data in the relation databag. + + Args: + relation: The relation instance. + """ + unit_data = self._prepare_unit_data() + unit_data.dump(relation.data[self.charm.unit], clear=True) + + def _prepare_application_data(self) -> RequirerApplicationData: + """Prepare and validate the application data. + + Raises: + DataValidationError: When validation of application data fails. + + Returns: + RequirerApplicationData: The validated application data model. + """ + try: + return cast( + RequirerApplicationData, RequirerApplicationData.from_dict(self._application_data) + ) + except ValidationError as exc: + logger.error("Validation error when preparing requirer application data.") + raise DataValidationError( + "Validation error when preparing requirer application data." + ) from exc + + def _prepare_unit_data(self) -> RequirerUnitData: + """Prepare and validate unit data. + + Raises: + DataValidationError: When no address or unit IP is available. + + Returns: + RequirerUnitData: The validated unit data model. + """ + address = self._unit_address + if not address: + network_binding = self.charm.model.get_binding(self._relation_name) + if ( + network_binding is not None + and (bind_address := network_binding.network.bind_address) is not None + ): + address = str(bind_address) + else: + logger.error("No unit IP available.") + raise DataValidationError("No unit IP available.") + return RequirerUnitData(address=cast(IPvAnyAddress, address)) + + def get_proxied_endpoints(self) -> list[AnyHttpUrl]: + """The full ingress URL to reach the current unit. + + Returns: + The provider URL or None if the URL isn't available yet or is not valid. + """ + relation = self.relation + if not relation or not relation.app: + return [] + + # Fetch the provider's app databag + try: + databag = relation.data[relation.app] + except ModelError: + logger.exception("Error reading remote app data.") + return [] + + if not databag: # not ready yet + return [] + + try: + provider_data = cast( + HaproxyRouteProviderAppData, HaproxyRouteProviderAppData.load(databag) + ) + return provider_data.endpoints + except DataValidationError: + logger.exception("Invalid provider url.") + return [] diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py new file mode 100644 index 000000000..167eff41b --- /dev/null +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -0,0 +1,61 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""TODO: Add a proper docstring here. + +This is a placeholder docstring for this charm library. Docstrings are +presented on Charmhub and updated whenever you push a new version of the +library. + +Complete documentation about creating and documenting libraries can be found +in the SDK docs at https://juju.is/docs/sdk/libraries. + +See `charmcraft publish-lib` and `charmcraft fetch-lib` for details of how to +share and consume charm libraries. They serve to enhance collaboration +between charmers. Use a charmer's libraries for classes that handle +integration with their charm. + +Bear in mind that new revisions of the different major API versions (v0, v1, +v2 etc) are maintained independently. You can continue to update v0 and v1 +after you have pushed v3. + +Markdown is supported, following the CommonMark specification. +""" + +from charms.haproxy.v2.haproxy_route import RequirerApplicationData +from pydantic import Field, IPvAnyAddress +from pydantic.dataclasses import dataclass + +# The unique Charmhub library identifier, never change it +LIBID = "24c99d77895e481d8661288f95884ee4" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 1 + + +@dataclass +class HaproxyRoutePolicyData: + """Dataclass to store the data for the haproxy-route-policy interface.""" + + requests: list[RequirerApplicationData] + + +class RequirerUnitData(_DatabagModel): + """haproxy-route requirer unit data. + + Attributes: + address: IP address of the unit. + """ + + address: IPvAnyAddress = Field(description="IP address of the unit.") + + +@dataclass +class HaproxyRoutePolicyProviderData(HaproxyRoutePolicyData): + """Dataclass to store the data for the haproxy-route-policy provider interface.""" + + units_data: list[RequirerUnitData] From bc72418f3f1c862dda5b47bb2c5ba709ce5c8fa1 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 14:25:03 +0200 Subject: [PATCH 136/201] update fetch logic for admin credentials, update tests, update juju version to 3.6 --- haproxy-route-policy-operator/charmcraft.yaml | 2 +- haproxy-route-policy-operator/src/charm.py | 7 +++++-- haproxy-route-policy-operator/tests/unit/test_charm.py | 9 +++++---- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 9dfdf7da9..99724d42c 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -31,7 +31,7 @@ links: - https://launchpad.net/~canonical-is-devops assumes: - - juju >= 3.3 + - juju >= 3.6 requires: database: diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index a2c386d55..049ad0346 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -94,8 +94,11 @@ def _reconcile(self, _: ops.EventBase) -> None: run_migrations() self.unit.status = ops.MaintenanceStatus("[leader] updating Django admin user") - username, password = self._get_django_admin_credentials(peer_relation).values() - create_or_update_user(username, password) + credentials = self._get_django_admin_credentials(peer_relation) + if (username := credentials.get("username")) and ( + password := credentials.get("password") + ): + create_or_update_user(username, password) self.unit.status = ops.MaintenanceStatus("starting gunicorn service") start_gunicorn_service() diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index ac7b2d703..2db0c4612 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -110,9 +110,9 @@ def test_config_changed_reconciles_snap_with_database_credentials(): ) def test_config_changed_missing_secrets(secrets): """ - arrange: create charm context with valid database relation credentials. + arrange: create charm context with missing secrets from leader unit. act: run config-changed event. - assert: snap is configured, migrations run, and service is started. + assert: unit in waiting status. """ ctx = testing.Context(HaproxyRoutePolicyCharm) state = testing.State(relations=[_database_relation()], secrets=secrets) @@ -131,9 +131,9 @@ def test_config_changed_missing_secrets(secrets): def test_config_changed_leader_create_secrets(): """ - arrange: create charm context with valid database relation credentials. + arrange: create charm context with missing secrets as the leader unit. act: run config-changed event. - assert: snap is configured, migrations run, and service is started. + assert: secrets are created. """ ctx = testing.Context(HaproxyRoutePolicyCharm) state = testing.State(relations=[_database_relation()], secrets=[], leader=True) @@ -148,3 +148,4 @@ def test_config_changed_leader_create_secrets(): out = ctx.run(ctx.on.config_changed(), state) assert len(list(out.secrets)) == 2 + assert out.unit_status == testing.ActiveStatus() From 91ba2e32ba4181af0989aa27412b26ffbd7d089c Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 12:12:42 +0200 Subject: [PATCH 137/201] move juju secret handling to charm state, add charm state for policy related values, add unit tests --- haproxy-route-policy-operator/charmcraft.yaml | 9 + haproxy-route-policy-operator/pyproject.toml | 1 + haproxy-route-policy-operator/src/charm.py | 94 ++-------- .../src/state/policy.py | 164 ++++++++++++++++++ .../tests/unit/test_charm.py | 42 +++-- .../test_haproxy_route_policy_information.py | 118 +++++++++++++ haproxy-route-policy-operator/uv.lock | 10 ++ 7 files changed, 352 insertions(+), 86 deletions(-) create mode 100644 haproxy-route-policy-operator/src/state/policy.py create mode 100644 haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 99724d42c..97c4cc1cc 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -58,3 +58,12 @@ charm-libs: peers: haproxy-route-policy-peer: interface: haproxy_route_policy_peer + +config: + options: + allowed-hosts: + type: string + description: A comma-separated list of host/domain names that the dns-policy-app API + can serve. This configuration will set the DJANGO_ALLOWED_HOSTS environment + variable with its content being a JSON encoded list. + default: "0.0.0.0" \ No newline at end of file diff --git a/haproxy-route-policy-operator/pyproject.toml b/haproxy-route-policy-operator/pyproject.toml index 11799028f..4fbcdddcb 100644 --- a/haproxy-route-policy-operator/pyproject.toml +++ b/haproxy-route-policy-operator/pyproject.toml @@ -18,6 +18,7 @@ dependencies = [ "requests==2.33.1", "charmlibs-snap==1.0.1", "pydantic>=2.12.5", + "validators>=0.35.0", ] [dependency-groups] diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 049ad0346..f4261d890 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -6,7 +6,6 @@ """haproxy-route-policy-operator charm.""" import logging -import secrets from typing import Any import ops @@ -28,23 +27,19 @@ DatabaseRelationMissingError, DatabaseRelationNotReadyError, ) +from state.policy import ( + DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, + PEER_RELATION_NAME, + DjangoAdminCredentialsMissingError, + DjangoSecretKeyMissingError, + HaproxyRoutePolicyInformation, + PeerRelationMissingError, +) logger = logging.getLogger(__name__) DATABASE_RELATION = "database" HAPROXY_ROUTE_POLICY_PORT = 8080 -# Ignore bandit warnings here as these are labels -DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" # nosec -DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec -PEER_RELATION_NAME = "haproxy-route-policy-peer" - - -class DjangoSecretKeyMissingError(Exception): - """Raised when the Django secret key is not generated by the leader unit.""" - - -class DjangoAdminCredentialsMissingError(Exception): - """Raised when the Django admin credentials are not generated by the leader unit.""" class HaproxyRoutePolicyCharm(ops.CharmBase): @@ -73,18 +68,14 @@ def __init__(self, *args: Any): def _reconcile(self, _: ops.EventBase) -> None: """Reconcile snap configuration and service state.""" - peer_relation = self.model.get_relation(PEER_RELATION_NAME) - if not peer_relation: - self.unit.status = ops.WaitingStatus("Waiting for peer relation.") - return - try: install_snap() self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") database_information = DatabaseInformation.from_requirer(self, self.database) + haproxy_route_policy_information = HaproxyRoutePolicyInformation.from_charm(self) configure_snap( { - **self._get_django_secret_key(peer_relation), + **haproxy_route_policy_information.allowed_hosts_snap_configuration, **database_information.haproxy_route_policy_snap_configuration, } ) @@ -104,10 +95,6 @@ def _reconcile(self, _: ops.EventBase) -> None: start_gunicorn_service() self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) - except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: - logger.exception("Failed to reconcile haproxy-route-policy service") - self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") - return except DatabaseRelationMissingError: self.unit.status = ops.BlockedStatus("Missing database relation.") return @@ -115,66 +102,21 @@ def _reconcile(self, _: ops.EventBase) -> None: logger.exception("Database relation not ready") self.unit.status = ops.WaitingStatus("waiting for complete database relation.") return + except PeerRelationMissingError: + logger.exception("Peer relation missing") + self.unit.status = ops.WaitingStatus("Waiting for peer relation.") + return except (DjangoSecretKeyMissingError, DjangoAdminCredentialsMissingError): logger.exception("Django shared configuration not ready") self.unit.status = ops.WaitingStatus("Waiting for leader to set shared configuration.") return + except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: + logger.exception("Failed to reconcile haproxy-route-policy service") + self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") + return self.unit.status = ops.ActiveStatus() - def _get_django_secret_key(self, peer_relation: ops.Relation) -> dict[str, str]: - """Get the Django secret key from the charm's config. - - Returns: - The Django secret key. - - Raises: - DjangoSecretKeyMissingError: If the secret key is not yet created by the leader. - """ - try: - secret = self.model.get_secret(label=DJANGO_SECRET_KEY_SECRET_LABEL) - return secret.get_content() - except ops.SecretNotFoundError: - if self.unit.is_leader(): - django_secret_key_data = {"secret-key": secrets.token_urlsafe(32)} - secret = self.app.add_secret( - label=DJANGO_SECRET_KEY_SECRET_LABEL, content=django_secret_key_data - ) - peer_relation.data[self.app]["django-secret-key"] = ( - secret.label or DJANGO_SECRET_KEY_SECRET_LABEL - ) - return django_secret_key_data - raise DjangoSecretKeyMissingError( - "Waiting for the leader unit to generate the Django secret key." - ) - - def _get_django_admin_credentials(self, peer_relation: ops.Relation) -> dict[str, str]: - """Get the Django admin user from the charm's config. - - Returns: - The Django admin user. - """ - try: - secret = self.model.get_secret(label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL) - return secret.get_content() - except ops.SecretNotFoundError: - if self.unit.is_leader(): - django_admin_credentials_data = { - "username": "admin", - "password": secrets.token_urlsafe(32), - } - secret = self.app.add_secret( - label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, - content=django_admin_credentials_data, - ) - peer_relation.data[self.app]["django-admin-credentials"] = ( - secret.label or DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL - ) - return django_admin_credentials_data - raise DjangoAdminCredentialsMissingError( - "Waiting for the leader unit to generate the Django admin credentials." - ) - def _on_get_admin_credentials_action(self, event: ops.ActionEvent) -> None: """Handle the get-admin-credentials action.""" try: diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py new file mode 100644 index 000000000..a552c9335 --- /dev/null +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 + +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Charm state for database information.""" + +import json +import secrets +from typing import Annotated, cast + +import ops +from pydantic import BeforeValidator, Field, IPvAnyAddress +from pydantic.dataclasses import dataclass +from validators import domain + +# Ignore bandit warnings here as these are labels +DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" # nosec +DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec +PEER_RELATION_NAME = "haproxy-route-policy-peer" +SECRET_LENGTH = 32 + + +class DjangoSecretKeyMissingError(Exception): + """Raised when the Django secret key is not generated by the leader unit.""" + + +class DjangoAdminCredentialsMissingError(Exception): + """Raised when the Django admin credentials are not generated by the leader unit.""" + + +class PeerRelationMissingError(Exception): + """Raised when the peer relation is missing.""" + + +def valid_fqdn(value: str) -> str: + """Validate if value is a valid fqdn. TLDs are not allowed. + + Raises: + ValueError: When value is not a valid domain. + + Args: + value: The value to validate. + """ + if not bool(domain(value)): + raise ValueError(f"Invalid domain: {value}") + return value + + +FQDN = Annotated[str, BeforeValidator(valid_fqdn)] + + +@dataclass +class HaproxyRoutePolicyInformation: + """Charm state for HAProxy Route Policy information. + + Attributes: + allowed_hosts: List of allowed hosts. + admin_username: Django admin username. + admin_password: Django admin password. + secret_key: Django secret key. + """ + + allowed_hosts: list[FQDN | IPvAnyAddress] = Field() + admin_username: str = Field() + admin_password: str = Field() + secret_key: str = Field() + + @property + def allowed_hosts_snap_configuration(self) -> dict[str, str]: + """Return snap configuration keys and values.""" + return { + "allowed-hosts": json.dumps(self.allowed_hosts), + } + + @classmethod + def from_charm(cls, charm: ops.CharmBase) -> "HaproxyRoutePolicyInformation": + """Create a HaproxyRoutePolicyInformation charm state. + + Returns: + HaproxyRoutePolicyInformation: The information. + + Raises: + PeerRelationMissingError: If the peer relation is missing. + """ + peer_relation = charm.model.get_relation(PEER_RELATION_NAME) + if not peer_relation: + raise PeerRelationMissingError("Peer relation is missing.") + + allowed_hosts = ( + [ + cast(IPvAnyAddress | FQDN, address) + for address in cast(str, charm.config.get("allowed-hosts")).split(",") + ] + if charm.config.get("allowed-hosts") + else [] + ) + admin_username, admin_password = _get_django_admin_credentials( + charm, peer_relation + ).values() + secret_key = _get_django_secret_key(charm, peer_relation)["secret-key"] + return cls( + allowed_hosts=allowed_hosts, + admin_username=admin_username, + admin_password=admin_password, + secret_key=secret_key, + ) + + +def _get_django_admin_credentials( + charm: ops.CharmBase, peer_relation: ops.Relation +) -> dict[str, str]: + """Get the Django admin user from the charm's config. + + Returns: + The Django admin user. + """ + try: + secret = charm.model.get_secret(label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL) + return secret.get_content() + except ops.SecretNotFoundError: + if charm.unit.is_leader(): + django_admin_credentials_data = { + "username": "admin", + "password": secrets.token_urlsafe(SECRET_LENGTH), + } + secret = charm.app.add_secret( + label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, + content=django_admin_credentials_data, + ) + peer_relation.data[charm.app]["django-admin-credentials"] = ( + secret.label or DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL + ) + return django_admin_credentials_data + raise DjangoAdminCredentialsMissingError( + "Waiting for the leader unit to generate the Django admin credentials." + ) + + +def _get_django_secret_key(charm: ops.CharmBase, peer_relation: ops.Relation) -> dict[str, str]: + """Get the Django secret key from the charm's config. + + Returns: + The Django secret key. + + Raises: + DjangoSecretKeyMissingError: If the secret key is not yet created by the leader. + """ + try: + secret = charm.model.get_secret(label=DJANGO_SECRET_KEY_SECRET_LABEL) + return secret.get_content() + except ops.SecretNotFoundError: + if charm.unit.is_leader(): + django_secret_key_data = {"secret-key": secrets.token_urlsafe(SECRET_LENGTH)} + secret = charm.app.add_secret( + label=DJANGO_SECRET_KEY_SECRET_LABEL, content=django_secret_key_data + ) + peer_relation.data[charm.app]["django-secret-key"] = ( + secret.label or DJANGO_SECRET_KEY_SECRET_LABEL + ) + return django_secret_key_data + raise DjangoSecretKeyMissingError( + "Waiting for the leader unit to generate the Django secret key." + ) diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index 2db0c4612..ab3396ef8 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -3,15 +3,17 @@ """Unit tests for haproxy-route-policy-operator charm.""" +import secrets from unittest.mock import patch import pytest from ops import testing -from charm import ( +from charm import HaproxyRoutePolicyCharm +from state.policy import ( DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, DJANGO_SECRET_KEY_SECRET_LABEL, - HaproxyRoutePolicyCharm, + SECRET_LENGTH, ) @@ -29,6 +31,11 @@ def _database_relation() -> testing.Relation: ) +def _peer_relation() -> testing.PeerRelation: + """Build a peer relation.""" + return testing.PeerRelation("haproxy-route-policy-peer") + + def test_install_without_relation_sets_waiting_status(): """ arrange: create charm context without database relation. @@ -47,7 +54,14 @@ def test_install_without_relation_sets_waiting_status(): assert isinstance(out.unit_status, testing.BlockedStatus) -def test_config_changed_reconciles_snap_with_database_credentials(): +@pytest.mark.parametrize( + "is_leader", + [ + pytest.param(True, id="leader-unit"), + pytest.param(False, id="non-leader-unit"), + ], +) +def test_config_changed_reconciles_snap_with_database_credentials(is_leader): """ arrange: create charm context with valid database relation credentials. act: run config-changed event. @@ -55,17 +69,22 @@ def test_config_changed_reconciles_snap_with_database_credentials(): """ ctx = testing.Context(HaproxyRoutePolicyCharm) state = testing.State( - relations=[_database_relation()], + relations=[_database_relation(), _peer_relation()], secrets=[ testing.Secret( - label=DJANGO_SECRET_KEY_SECRET_LABEL, tracked_content={"secret-key": "test"} + label=DJANGO_SECRET_KEY_SECRET_LABEL, + tracked_content={"secret-key": secrets.token_urlsafe(SECRET_LENGTH)}, ), testing.Secret( label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, # Ignore bandit warning as this is for testing. - tracked_content={"username": "admin", "password": "admin"}, # nosec + tracked_content={ + "username": "admin", + "password": secrets.token_urlsafe(SECRET_LENGTH), + }, # nosec ), ], + leader=is_leader, ) with ( @@ -80,9 +99,10 @@ def test_config_changed_reconciles_snap_with_database_credentials(): assert out.unit_status == testing.ActiveStatus() install_snap_mock.assert_called_once() configure_mock.assert_called_once() - migrate_mock.assert_called_once() start_mock.assert_called_once() - create_or_update_user_mock.assert_called_once() + if is_leader: + migrate_mock.assert_called_once() + create_or_update_user_mock.assert_called_once() @pytest.mark.parametrize( @@ -115,7 +135,7 @@ def test_config_changed_missing_secrets(secrets): assert: unit in waiting status. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_database_relation()], secrets=secrets) + state = testing.State(relations=[_database_relation(), _peer_relation()], secrets=secrets) with ( patch("charm.install_snap"), @@ -136,7 +156,9 @@ def test_config_changed_leader_create_secrets(): assert: secrets are created. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_database_relation()], secrets=[], leader=True) + state = testing.State( + relations=[_database_relation(), _peer_relation()], secrets=[], leader=True + ) with ( patch("charm.install_snap"), diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py new file mode 100644 index 000000000..1d2928d74 --- /dev/null +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py @@ -0,0 +1,118 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for HAProxy Route Policy state dataclass.""" + +from typing import Any, cast + +import pytest +from pydantic import ValidationError + +from state.policy import HaproxyRoutePolicyInformation + + +def _build_state(allowed_hosts: list[str]) -> HaproxyRoutePolicyInformation: + """Build a valid state instance with overridable allowed hosts.""" + return HaproxyRoutePolicyInformation( + allowed_hosts=cast(list[Any], allowed_hosts), + admin_username="admin", + # Ignore bandit warning as this is for testing. + admin_password="secret", # nosec + secret_key="test-secret-key", + ) + + +@pytest.mark.parametrize( + "allowed_hosts, expected_allowed_hosts", + [ + pytest.param([], [], id="empty-list"), + pytest.param(["example.com"], ["example.com"], id="single-fqdn"), + pytest.param( + ["example.com", "api.example.com"], + ["example.com", "api.example.com"], + id="multiple-fqdn", + ), + pytest.param(["10.0.0.10"], ["10.0.0.10"], id="ipv4-address"), + pytest.param(["2001:db8::1"], ["2001:db8::1"], id="ipv6-address"), + ], +) +def test_haproxy_route_policy_information_init_valid_allowed_hosts( + allowed_hosts: list[str], expected_allowed_hosts: list[str] +): + """ + arrange: prepare valid host inputs. + act: initialize HaproxyRoutePolicyInformation. + assert: initialization succeeds and normalized hosts are stored. + """ + state = _build_state(allowed_hosts) + + assert [str(host) for host in state.allowed_hosts] == expected_allowed_hosts + + +@pytest.mark.parametrize( + "allowed_hosts", + [ + pytest.param(["invalid host"], id="space-in-host"), + pytest.param(["http://example.com"], id="url-not-host"), + pytest.param(["exa_mple.com"], id="underscore-in-label"), + ], +) +def test_haproxy_route_policy_information_init_invalid_allowed_hosts(allowed_hosts: list[str]): + """ + arrange: prepare invalid host inputs. + act: initialize HaproxyRoutePolicyInformation. + assert: pydantic validation error is raised. + """ + with pytest.raises(ValidationError): + _build_state(allowed_hosts) + + +@pytest.mark.parametrize( + "field_name, field_value", + [ + pytest.param("admin_username", None, id="missing-admin-username"), + pytest.param("admin_password", None, id="missing-admin-password"), + pytest.param("secret_key", None, id="missing-secret-key"), + ], +) +def test_haproxy_route_policy_information_init_rejects_none_string_fields( + field_name: str, field_value: None +): + """ + arrange: build state payload with missing required string field. + act: initialize HaproxyRoutePolicyInformation. + assert: pydantic validation error is raised. + """ + payload = { + "allowed_hosts": ["example.com"], + "admin_username": "admin", + # Ignore bandit warning as this is for testing. + "admin_password": "secret", # nosec + "secret_key": "test-secret-key", + } + payload[field_name] = field_value + + with pytest.raises(ValidationError): + HaproxyRoutePolicyInformation(**payload) + + +@pytest.mark.parametrize( + "allowed_hosts, expected", + [ + pytest.param([], {"allowed-hosts": "[]"}, id="empty"), + pytest.param( + ["example.com", "api.example.com"], + {"allowed-hosts": '["example.com", "api.example.com"]'}, + id="multiple-fqdn", + ), + ], +) +def test_allowed_hosts_snap_configuration(allowed_hosts: list[str], expected: dict[str, str]): + """ + arrange: initialize state with valid allowed hosts. + act: read snap configuration property. + assert: allowed-hosts is serialized to expected JSON string. + """ + state = _build_state(allowed_hosts) + + assert state.allowed_hosts_snap_configuration == expected diff --git a/haproxy-route-policy-operator/uv.lock b/haproxy-route-policy-operator/uv.lock index c8c29eed4..4f8d2f929 100644 --- a/haproxy-route-policy-operator/uv.lock +++ b/haproxy-route-policy-operator/uv.lock @@ -463,6 +463,7 @@ dependencies = [ { name = "ops" }, { name = "pydantic" }, { name = "requests" }, + { name = "validators" }, ] [package.dev-dependencies] @@ -1565,6 +1566,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] +[[package]] +name = "validators" +version = "0.35.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/66/a435d9ae49850b2f071f7ebd8119dd4e84872b01630d6736761e6e7fd847/validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a", size = 73399, upload-time = "2025-05-01T05:42:06.7Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712, upload-time = "2025-05-01T05:42:04.203Z" }, +] + [[package]] name = "wcwidth" version = "0.6.0" From e9d071dbb8bcb4bf548252361808ffc15cfbcbfb Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 12:22:26 +0200 Subject: [PATCH 138/201] cast to string before dumping to json --- haproxy-route-policy-operator/src/state/policy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index a552c9335..654b80e7e 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -70,7 +70,7 @@ class HaproxyRoutePolicyInformation: def allowed_hosts_snap_configuration(self) -> dict[str, str]: """Return snap configuration keys and values.""" return { - "allowed-hosts": json.dumps(self.allowed_hosts), + "allowed-hosts": json.dumps([str(host) for host in self.allowed_hosts]), } @classmethod From 3a15a1fde4b786115be6c10740f6a0066019fd0b Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 2 Apr 2026 14:41:20 +0200 Subject: [PATCH 139/201] add change artifact --- docs/release-notes/artifacts/pr0423.yaml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0423.yaml diff --git a/docs/release-notes/artifacts/pr0423.yaml b/docs/release-notes/artifacts/pr0423.yaml new file mode 100644 index 000000000..56031316f --- /dev/null +++ b/docs/release-notes/artifacts/pr0423.yaml @@ -0,0 +1,23 @@ +version_schema: 2 + +changes: + - title: Added allowed-hosts configuration support to haproxy-route-policy-operator + author: tphan025 + type: minor + description: > + Added a new `allowed-hosts` charm configuration option for + `haproxy-route-policy-operator` and wired it into snap configuration as a + JSON-encoded `DJANGO_ALLOWED_HOSTS` value. Introduced a dedicated policy + state module to centralize shared operator information (allowed hosts, + admin credentials, secret key), including validation of hostnames/IP + addresses. Updated charm reconcile flow to consume the new state model and + refined leader/non-leader behavior for migration and admin-user updates. + Added unit tests for allowed-hosts validation/serialization and updated + charm unit tests for the new peer/state behavior. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/423 + related_doc: + related_issue: + visibility: public + highlight: false From 6d469387aa87f664b2b5d8020bc16a39bcba41dd Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 14:57:29 +0200 Subject: [PATCH 140/201] update handling of credentials --- haproxy-route-policy-operator/src/charm.py | 20 ++++++++++++------- .../src/state/policy.py | 16 ++++++++++----- 2 files changed, 24 insertions(+), 12 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index f4261d890..ad8b45a47 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -30,6 +30,7 @@ from state.policy import ( DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, PEER_RELATION_NAME, + DjangoAdminCredentialsInvalidError, DjangoAdminCredentialsMissingError, DjangoSecretKeyMissingError, HaproxyRoutePolicyInformation, @@ -85,11 +86,10 @@ def _reconcile(self, _: ops.EventBase) -> None: run_migrations() self.unit.status = ops.MaintenanceStatus("[leader] updating Django admin user") - credentials = self._get_django_admin_credentials(peer_relation) - if (username := credentials.get("username")) and ( - password := credentials.get("password") - ): - create_or_update_user(username, password) + create_or_update_user( + haproxy_route_policy_information.admin_username, + haproxy_route_policy_information.admin_password, + ) self.unit.status = ops.MaintenanceStatus("starting gunicorn service") start_gunicorn_service() @@ -106,9 +106,15 @@ def _reconcile(self, _: ops.EventBase) -> None: logger.exception("Peer relation missing") self.unit.status = ops.WaitingStatus("Waiting for peer relation.") return - except (DjangoSecretKeyMissingError, DjangoAdminCredentialsMissingError): + except ( + DjangoSecretKeyMissingError, + DjangoAdminCredentialsMissingError, + DjangoAdminCredentialsInvalidError, + ): logger.exception("Django shared configuration not ready") - self.unit.status = ops.WaitingStatus("Waiting for leader to set shared configuration.") + self.unit.status = ops.WaitingStatus( + "Waiting for complete shared configuration from leader." + ) return except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: logger.exception("Failed to reconcile haproxy-route-policy service") diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index 654b80e7e..36f61b2c4 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -29,6 +29,10 @@ class DjangoAdminCredentialsMissingError(Exception): """Raised when the Django admin credentials are not generated by the leader unit.""" +class DjangoAdminCredentialsInvalidError(Exception): + """Raised when the Django admin credentials are invalid.""" + + class PeerRelationMissingError(Exception): """Raised when the peer relation is missing.""" @@ -95,14 +99,16 @@ def from_charm(cls, charm: ops.CharmBase) -> "HaproxyRoutePolicyInformation": if charm.config.get("allowed-hosts") else [] ) - admin_username, admin_password = _get_django_admin_credentials( - charm, peer_relation - ).values() + credentials = _get_django_admin_credentials(charm, peer_relation) + if not (credentials.get("username") and credentials.get("password")): + raise DjangoAdminCredentialsInvalidError( + "Waiting for the leader unit to generate the Django admin credentials." + ) secret_key = _get_django_secret_key(charm, peer_relation)["secret-key"] return cls( allowed_hosts=allowed_hosts, - admin_username=admin_username, - admin_password=admin_password, + admin_username=credentials["username"], + admin_password=credentials["password"], secret_key=secret_key, ) From e3933c56a5364f2e11872aeb1c5953a59392a245 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 3 Apr 2026 16:07:57 +0200 Subject: [PATCH 141/201] bootstrap lib --- haproxy-route-policy-operator/charmcraft.yaml | 2 + .../lib/charms/haproxy/v2/haproxy_route.py | 1609 +++++++++++++++++ .../v0/haproxy_route_policy.py | 61 + 3 files changed, 1672 insertions(+) create mode 100644 haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py create mode 100644 haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 97c4cc1cc..9d6035fb1 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -54,6 +54,8 @@ actions: charm-libs: - lib: data_platform_libs.data_interfaces version: "0" + - lib: haproxy.haproxy_route + version: "2" peers: haproxy-route-policy-peer: diff --git a/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py b/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py new file mode 100644 index 000000000..795c815ad --- /dev/null +++ b/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py @@ -0,0 +1,1609 @@ +# pylint: disable=too-many-lines +"""Haproxy-route interface library. + +## Getting Started + +To get started using the library, you just need to fetch the library using `charmcraft`. + +```shell +cd some-charm +charmcraft fetch-lib charms.haproxy.v2.haproxy_route +``` + +In the `metadata.yaml` of the charm, add the following: + +```yaml +requires: + backend: + interface: haproxy-route + limit: 1 +``` + +Then, to initialise the library: + +```python +from charms.haproxy.v2.haproxy_route import HaproxyRouteRequirer + +class SomeCharm(CharmBase): + def __init__(self, *args): + # ... + + # There are 2 ways you can use the requirer implementation: + # 1. To initialize the requirer with parameters: + self.haproxy_route_requirer = HaproxyRouteRequirer(self, + relation_name=, + service=, + ports=, + protocol=, + hosts=, + paths=, + hostname=, + additional_hostnames=, + check_interval=, + check_rise=, + check_fall=, + check_path=, + check_port=, + path_rewrite_expressions=, list of path rewrite expressions, + query_rewrite_expressions=, list of query rewrite expressions, + header_rewrite_expressions=, list of (header_name, rewrite_expression), + load_balancing_algorithm=, defaults to "leastconn", + load_balancing_cookie=, only used when load_balancing_algorithm is cookie + load_balancing_consistent_hashing=, to enable consistent hashing, + defaults to False, + rate_limit_connections_per_minute=, + rate_limit_policy=, + upload_limit=, + download_limit=, + retry_count=, + retry_redispatch=, + deny_paths=, + server_timeout=, + connect_timeout=, + queue_timeout=, + server_maxconn=, + unit_address=, + http_server_close=, + ) + + # 2.To initialize the requirer with no parameters, i.e + # self.haproxy_route_requirer = HaproxyRouteRequirer(self) + # This will simply initialize the requirer class and it won't perfom any action. + + # Afterwards regardless of how you initialized the requirer you can call the + # provide_haproxy_route_requirements method anywhere in your charm to update the requirer data. + # The method takes the same number of parameters as the requirer class. + # provide_haproxy_route_requirements(address=, port=, ...) + + self.framework.observe( + self.framework.on.config_changed, self._on_config_changed + ) + self.framework.observe( + self.haproxy_route_requirer.on.ready, self._on_endpoints_ready + ) + self.framework.observe( + self.haproxy_route_requirer.on.removed, self._on_endpoints_removed + ) + + def _on_config_changed(self, event: ConfigChangedEvent) -> None: + self.haproxy_route_requirer.provide_haproxy_route_requirements(...) + + def _on_endpoints_ready(self, _: EventBase) -> None: + # Handle endpoints ready event + ... + + def _on_endpoints_removed(self, _: EventBase) -> None: + # Handle endpoints removed event + ... + +## Using the library as the provider +The provider charm should expose the interface as shown below: +```yaml +provides: + haproxy-route: + interface: haproxy-route +``` +Note that this interface supports relating to multiple endpoints. + +Then, to initialise the library: +```python +from charms.haproxy.v2.haproxy_route import HaproxyRouteProvider + +class SomeCharm(CharmBase): + self.haproxy_route_provider = HaproxyRouteProvider(self) + self.framework.observe( + self.haproxy_route_provider.on.data_available, self._on_haproxy_route_data_available + ) + + def _on_haproxy_route_data_available(self, event: EventBase) -> None: + data = self.haproxy_route_provider.get_data(self.haproxy_route_provider.relations) + ... +""" + +import json +import logging +from collections import defaultdict +from enum import Enum +from functools import partial +from typing import Annotated, Any, Literal, MutableMapping, Optional, cast + +from ops import CharmBase, ModelError, RelationBrokenEvent +from ops.charm import CharmEvents +from ops.framework import EventBase, EventSource, Object +from ops.model import Relation +from pydantic import ( + AnyHttpUrl, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + IPvAnyAddress, + ValidationError, + field_validator, + model_validator, +) +from pydantic.dataclasses import dataclass +from typing_extensions import Self +from validators import domain + +# The unique Charmhub library identifier, never change it +LIBID = "08b6347482f6455486b5f5bb4dc4e6cf" + +# Increment this major API version when introducing breaking changes +LIBAPI = 2 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 1 + +logger = logging.getLogger(__name__) +HAPROXY_ROUTE_RELATION_NAME = "haproxy-route" +HAPROXY_CONFIG_INVALID_CHARACTERS = "\n\t#\\'\"\r$ " +HAPROXY_EXPR_INVALID_CHARACTERS = "\n" + + +def value_contains_invalid_characters( + invalid_characters: str, value: Optional[str] +) -> Optional[str]: + """Validate if value contains invalid config characters. + + Args: + invalid_characters: String with the list of invalid characters. + value: The value to validate. + + Raises: + ValueError: When value contains invalid characters. + + Returns: + The validated value. + """ + if value is None: + return value + + if [char for char in value if char in invalid_characters]: + raise ValueError(f"Relation data contains invalid character(s) {value}") + return value + + +def valid_domain_with_wildcard(value: str) -> str: + """Validate if value is a valid domain that can include a wildcard. + + The wildcard character (*) can't be at the TLD level, for example *.com is not valid. + This is supported natively by the library ( e.g domain("com") will raise a ValidationError ). + + Raises: + ValueError: When value is not a valid domain. + + Args: + value: The value to validate. + """ + fqdn = value[2:] if value.startswith("*.") else value + if not bool(domain(fqdn)): + raise ValueError(f"Invalid domain: {value}") + return value + + +VALIDSTR = Annotated[ + str, + BeforeValidator(partial(value_contains_invalid_characters, HAPROXY_CONFIG_INVALID_CHARACTERS)), +] +VALIDEXPRSTR = Annotated[ + str, + BeforeValidator(partial(value_contains_invalid_characters, HAPROXY_EXPR_INVALID_CHARACTERS)), +] + + +class DataValidationError(Exception): + """Raised when data validation fails.""" + + +class HaproxyRouteInvalidRelationDataError(Exception): + """Rasied when data validation of the haproxy-route relation fails.""" + + +class _DatabagModel(BaseModel): + """Base databag model. + + Attrs: + model_config: pydantic model configuration. + """ + + model_config = ConfigDict( + # tolerate additional keys in databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, + ) # type: ignore + """Pydantic config.""" + + @classmethod + def load(cls, databag: MutableMapping) -> "_DatabagModel": + """Load this model from a Juju json databag. + + Args: + databag: Databag content. + + Raises: + DataValidationError: When model validation failed. + + Returns: + _DatabagModel: The validated model. + """ + nest_under = cls.model_config.get("_NEST_UNDER") + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.model_fields.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) + except ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.error(str(e), exc_info=True) + raise DataValidationError(msg) from e + + @classmethod + def from_dict(cls, values: dict) -> "_DatabagModel": + """Load this model from a dict. + + Args: + values: Dict values. + + Raises: + DataValidationError: When model validation failed. + + Returns: + _DatabagModel: The validated model. + """ + try: + logger.info("Loading values from dictionary: %s", values) + return cls.model_validate(values) + except ValidationError as e: + msg = f"failed to validate: {values}" + logger.debug(msg, exc_info=True) + raise DataValidationError(msg) from e + + def dump( + self, databag: Optional[MutableMapping] = None, clear: bool = True + ) -> Optional[MutableMapping]: + """Write the contents of this model to Juju databag. + + Args: + databag: The databag to write to. + clear: Whether to clear the databag before writing. + + Returns: + MutableMapping: The databag. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump(mode="json", by_alias=True, exclude_defaults=True) + databag.update({k: json.dumps(v) for k, v in dct.items()}) + return databag + + +class ServerHealthCheck(BaseModel): + """Configuration model for backend server health checks. + + Attributes: + interval: Number of seconds between consecutive health check attempts. + rise: Number of consecutive successful health checks required for up. + fall: Number of consecutive failed health checks required for DOWN. + path: List of URL paths to use for HTTP health checks. + port: Customize port value for http-check. + """ + + interval: Optional[int] = Field( + description="The interval (in seconds) between health checks.", default=None + ) + rise: Optional[int] = Field( + description="How many successful health checks before server is considered up.", + default=None, + ) + fall: Optional[int] = Field( + description="How many failed health checks before server is considered down.", default=None + ) + path: Optional[VALIDSTR] = Field(description="The health check path.", default=None) + port: Optional[int] = Field(description="The health check port.", default=None) + + @model_validator(mode="after") + def check_all_required_fields_set(self) -> Self: + """Check that all required fields for health check are set. + + Raises: + ValueError: When validation fails. + + Returns: + The validated model. + """ + if not bool(self.interval) == bool(self.rise) == bool(self.fall): + raise ValueError("All three of interval, rise and fall must be set.") + return self + + +# tarpit is not yet implemented +class RateLimitPolicy(Enum): + """Enum of possible rate limiting policies. + + Attrs: + DENY: deny a client's HTTP request to return a 403 Forbidden error. + REJECT: closes the connection immediately without sending a response. + SILENT: disconnects immediately without notifying the client + that the connection has been closed. + """ + + DENY = "deny" + REJECT = "reject" + SILENT = "silent-drop" + + +class RateLimit(BaseModel): + """Configuration model for connection rate limiting. + + Attributes: + connections_per_minute: Number of connections allowed per minute for a client. + policy: Action to take when the rate limit is exceeded. + """ + + connections_per_minute: int = Field(description="How many connections are allowed per minute.") + policy: RateLimitPolicy = Field( + description="Configure the rate limit policy.", default=RateLimitPolicy.DENY + ) + + +class LoadBalancingAlgorithm(Enum): + """Enum of possible http_route types. + + Attrs: + LEASTCONN: The server with the lowest number of connections receives the connection. + SRCIP: Load balance using the hash of The source IP address. + ROUNDROBIN: Each server is used in turns, according to their weights. + COOKIE: Load balance using hash req.cookie(clientid). + """ + + LEASTCONN = "leastconn" + SRCIP = "source" + ROUNDROBIN = "roundrobin" + COOKIE = "cookie" + + +class LoadBalancingConfiguration(BaseModel): + """Configuration model for load balancing. + + Attributes: + algorithm: Algorithm to use for load balancing. + cookie: Cookie name to use when algorithm is set to cookie. + consistent_hashing: Use consistent hashing to avoid redirection + when servers are added/removed. + """ + + algorithm: LoadBalancingAlgorithm = Field( + description="Configure the load balancing algorithm for the service.", + default=LoadBalancingAlgorithm.LEASTCONN, + ) + cookie: Optional[VALIDSTR] = Field( + description="Only used when algorithm is COOKIE. Define the cookie to load balance on.", + default=None, + ) + # Note: Later when the generic LoadBalancingAlgorithm.HASH is implemented this attribute + # will also apply under that mode. + consistent_hashing: bool = Field( + description=( + "Only used when the `algorithm` is SRCIP or COOKIE. " + "Use consistent hashing to avoid redirection when servers are added/removed. " + "Default is False as it usually does not give a balanced distribution." + ), + default=False, + ) + + @model_validator(mode="after") + def validate_attributes(self) -> Self: + """Check that algorithm-specific configs are only set with their respective algorithm. + + Raises: + ValueError: When validation fails in one of these cases: + 1. self.cookie is not None when self.algorithm != COOKIE + 2. self.consistent_hashing is True when algorithm is neither COOKIE nor SRCIP + + Returns: + The validated model. + """ + if self.cookie is not None and self.algorithm != LoadBalancingAlgorithm.COOKIE: + raise ValueError("cookie only applies when algorithm is COOKIE.") + + if self.consistent_hashing and self.algorithm not in [ + LoadBalancingAlgorithm.COOKIE, + LoadBalancingAlgorithm.SRCIP, + ]: + raise ValueError("Consistent hashing only applies when algorithm is COOKIE or SRCIP.") + return self + + +class BandwidthLimit(BaseModel): + """Configuration model for bandwidth rate limiting. + + Attributes: + upload: Limit upload speed (bytes per second). + download: Limit download speed (bytes per second). + """ + + upload: Optional[int] = Field(description="Upload limit (bytes per seconds).", default=None) + download: Optional[int] = Field( + description="Download limit (bytes per seconds).", default=None + ) + + +# retry-on is not yet implemented +class Retry(BaseModel): + """Configuration model for retry. + + Attributes: + count: How many times should a request retry. + redispatch: Whether to redispatch failed requests to another server. + """ + + count: int = Field(description="How many times should a request retry.") + redispatch: bool = Field( + description="Whether to redispatch failed requests to another server.", default=False + ) + + +class TimeoutConfiguration(BaseModel): + """Configuration model for timeout. + + Attributes: + server: Timeout for requests from haproxy to backend servers. + connect: Timeout for client requests to haproxy. + queue: Timeout for requests waiting in the queue after server-maxconn is reached. + """ + + server: int = Field( + description="Timeout (in seconds) for requests from haproxy to backend servers.", + default=60, + ) + connect: int = Field( + description="Timeout (in seconds) for client requests to haproxy.", default=60 + ) + queue: int = Field( + description="Timeout (in seconds) for requests in the queue.", + default=60, + ) + + +class HaproxyRewriteMethod(Enum): + """Enum of possible HTTP rewrite methods. + + Attrs: + SET_PATH: The server with the lowest number of connections receives the connection. + SET_QUERY: Load balance using the hash of The source IP address. + SET_HEADER: Each server is used in turns, according to their weights. + """ + + SET_PATH = "set-path" + SET_QUERY = "set-query" + SET_HEADER = "set-header" + + +class RewriteConfiguration(BaseModel): + """Configuration model for HTTP rewrite. + + Attributes: + method: Which rewrite method to apply.One of set-path, set-query, set-header. + expression: Regular expression to use with the rewrite method. + header: The name of the header to rewrited. + """ + + method: HaproxyRewriteMethod = Field( + description="Which rewrite method to apply.One of set-path, set-query, set-header." + ) + expression: VALIDEXPRSTR = Field( + description="Regular expression to use with the rewrite method." + ) + header: Optional[VALIDSTR] = Field( + description="The name of the header to rewrite.", default=None + ) + + +class RequirerApplicationData(_DatabagModel): + """Configuration model for HAProxy route requirer application data. + + Attributes: + service: Name of the service requesting HAProxy routing. + ports: List of port numbers on which the service is listening. + protocol: The protocol that the service speaks. + hosts: List of backend server addresses. + paths: List of URL paths to route to this service. Defaults to an empty list. + hostname: Optional: The hostname of this service. + additional_hostnames: List of additional hostnames of this service. + Defaults to an empty list. + rewrites: List of RewriteConfiguration objects defining path, query, or header + rewrite rules. + check: ServerHealthCheck configuration for monitoring backend health. + load_balancing: Configuration for the load balancing strategy. + rate_limit: Optional configuration for limiting connection rates. + bandwidth_limit: Optional configuration for limiting upload and download bandwidth. + retry: Optional configuration for request retry behavior. + deny_paths: List of URL paths that should not be routed to the backend. + timeout: Configuration for server, client, and queue timeouts. + server_maxconn: Optional maximum number of connections per server. + http_server_close: Configure server close after request. + allow_http: Whether to allow HTTP traffic in addition to HTTPS. Defaults to False. + Warning: enabling HTTP is a security risk, make sure you apply the necessary precautions. + external_grpc_port: Optional external gRPC port. + """ + + service: VALIDSTR = Field(description="The name of the service.") + ports: list[int] = Field(description="The list of ports listening for this service.") + protocol: Literal["http", "https"] = Field( + description="The protocol that the service speaks.", + default="http", + ) + hosts: list[IPvAnyAddress] = Field( + description="The list of backend server addresses. Currently only support IP addresses.", + default=[], + ) + paths: list[VALIDSTR] = Field( + description="The list of paths to route to this service.", default=[] + ) + hostname: Optional[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = Field( + description="Hostname of this service.", default=None + ) + additional_hostnames: list[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = ( + Field(description="The list of additional hostnames of this service.", default=[]) + ) + rewrites: list[RewriteConfiguration] = Field( + description="The list of path rewrite rules.", default=[] + ) + check: Optional[ServerHealthCheck] = Field( + description="Configure health check for the service.", + default=None, + ) + load_balancing: LoadBalancingConfiguration = Field( + description="Configure loadbalancing.", default=LoadBalancingConfiguration() + ) + rate_limit: Optional[RateLimit] = Field( + description="Configure rate limit for the service.", default=None + ) + bandwidth_limit: BandwidthLimit = Field( + description="Configure bandwidth limit for the service.", default=BandwidthLimit() + ) + retry: Optional[Retry] = Field( + description="Configure retry for incoming requests.", default=None + ) + deny_paths: list[VALIDSTR] = Field( + description="Configure path that should not be routed to the backend", default=[] + ) + timeout: TimeoutConfiguration = Field( + description="Configure timeout", + default=TimeoutConfiguration(), + ) + server_maxconn: Optional[int] = Field( + description="Configure maximum connection per server", default=None + ) + http_server_close: bool = Field( + description="Configure server close after request", default=False + ) + allow_http: bool = Field( + description="Whether to allow HTTP traffic in addition to HTTPS.", default=False + ) + external_grpc_port: int | None = Field( + description="Optional external gRPC port.", default=None, gt=0, le=65535 + ) + + @field_validator("load_balancing") + @classmethod + def validate_load_balancing_configuration( + cls, configuration: LoadBalancingConfiguration + ) -> LoadBalancingConfiguration: + """Validate the parsed load balancing configuration. + + Args: + configuration: The configuration to validate. + + Raises: + ValueError: When cookie is not set under COOKIE load balancing mode. + + Returns: + LoadBalancingConfiguration: The validated configuration. + """ + if configuration.algorithm == LoadBalancingAlgorithm.COOKIE and not configuration.cookie: + raise ValueError("cookie must be set if load balacing algorithm is COOKIE.") + return configuration + + @field_validator("rewrites") + @classmethod + def validate_rewrites(cls, rewrites: list[RewriteConfiguration]) -> list[RewriteConfiguration]: + """Validate the parsed list of rewrite configurations. + + Args: + rewrites: The configurations to validate. + + Raises: + ValueError: When header is not set under SET_HEADER rewrite method. + + Returns: + list[RewriteConfiguration]: The validated configurations. + """ + for rewrite in rewrites: + if rewrite.method == HaproxyRewriteMethod.SET_HEADER and not rewrite.method: + raise ValueError("header must be set if rewrite method is SET_HEADER.") + return rewrites + + +class HaproxyRouteProviderAppData(_DatabagModel): + """haproxy-route provider databag schema. + + Attributes: + endpoints: The list of proxied endpoints that maps to the backend. + """ + + endpoints: list[AnyHttpUrl] + + +class RequirerUnitData(_DatabagModel): + """haproxy-route requirer unit data. + + Attributes: + address: IP address of the unit. + """ + + address: IPvAnyAddress = Field(description="IP address of the unit.") + + +@dataclass +class HaproxyRouteRequirerData: + """haproxy-route requirer data. + + Attributes: + relation_id: Id of the relation. + application_data: Application data. + units_data: Units data + """ + + relation_id: int + application_data: RequirerApplicationData + units_data: list[RequirerUnitData] + + +@dataclass +class HaproxyRouteRequirersData: + """haproxy-route requirers data. + + Attributes: + requirers_data: List of requirer data. + relation_ids_with_invalid_data: Set of relation ids that contains invalid data. + """ + + requirers_data: list[HaproxyRouteRequirerData] + relation_ids_with_invalid_data: set[int] + + @model_validator(mode="after") + def check_services_unique(self) -> Self: + """Check that requirers define unique services. + + Raises: + DataValidationError: When requirers declared duplicate services. + + Returns: + The validated model. + """ + services = [ + requirer_data.application_data.service for requirer_data in self.requirers_data + ] + if len(services) != len(set(services)): + raise DataValidationError("Services declaration by requirers must be unique.") + + return self + + @model_validator(mode="after") + def check_external_grpc_port_unique(self) -> Self: + """Check that external gRPC ports are unique across requirer applications. + If multiple requirer applications declare the same external gRPC port, + their relation ids are added to relation_ids_with_invalid_data. + + Returns: + The validated model. + """ + relation_ids_per_port: dict[int, list[int]] = defaultdict(list[int]) + for requirer_data in self.requirers_data: + if requirer_data.application_data.external_grpc_port: + relation_ids_per_port[requirer_data.application_data.external_grpc_port].append( + requirer_data.relation_id + ) + + self.relation_ids_with_invalid_data.update( + relation_id + for relation_ids in relation_ids_per_port.values() + for relation_id in relation_ids + if len(relation_ids) > 1 + ) + return self + + @model_validator(mode="after") + def check_grpc_requires_https(self) -> Self: + """Check that backends with external_grpc_port use https protocol. + If not, their relation ids are added to relation_ids_with_invalid_data. + + Returns: + Self: The validated model + """ + for requirer_data in self.requirers_data: + if all( + [ + requirer_data.application_data.external_grpc_port is not None, + requirer_data.application_data.protocol != "https", + requirer_data.relation_id, + ] + ): + self.relation_ids_with_invalid_data.add(requirer_data.relation_id) + return self + + +class HaproxyRouteDataAvailableEvent(EventBase): + """HaproxyRouteDataAvailableEvent custom event. + + This event indicates that the requirers data are available. + """ + + +class HaproxyRouteDataRemovedEvent(EventBase): + """HaproxyRouteDataRemovedEvent custom event. + + This event indicates that one of the endpoints was removed. + """ + + +class HaproxyRouteProviderEvents(CharmEvents): + """List of events that the TLS Certificates requirer charm can leverage. + + Attributes: + data_available: This event indicates that + the haproxy-route endpoints are available. + data_removed: This event indicates that one of the endpoints was removed. + """ + + data_available = EventSource(HaproxyRouteDataAvailableEvent) + data_removed = EventSource(HaproxyRouteDataRemovedEvent) + + +class HaproxyRouteProvider(Object): + """Haproxy-route interface provider implementation. + + Attributes: + on: Custom events of the provider. + relations: Related appliations. + """ + + on = HaproxyRouteProviderEvents() + + def __init__( + self, + charm: CharmBase, + relation_name: str = HAPROXY_ROUTE_RELATION_NAME, + raise_on_validation_error: bool = False, + ) -> None: + """Initialize the HaproxyRouteProvider. + + Args: + charm: The charm that is instantiating the library. + relation_name: The name of the relation. + raise_on_validation_error: Whether the library should raise + HaproxyRouteInvalidRelationDataError when requirer data validation fails. + If this is set to True the provider charm needs to also catch and handle the + thrown exception. + """ + super().__init__(charm, relation_name) + + self._relation_name = relation_name + self.charm = charm + self.raise_on_validation_error = raise_on_validation_error + on = self.charm.on + self.framework.observe(on[self._relation_name].relation_created, self._configure) + self.framework.observe(on[self._relation_name].relation_changed, self._configure) + self.framework.observe(on[self._relation_name].relation_broken, self._on_endpoint_removed) + self.framework.observe( + on[self._relation_name].relation_departed, self._on_endpoint_removed + ) + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this endpoint.""" + return list(self.charm.model.relations[self._relation_name]) + + def _configure(self, _event: EventBase) -> None: + """Handle relation events.""" + if relations := self.relations: + # Only for data validation + _ = self.get_data(relations) + self.on.data_available.emit() + + def _on_endpoint_removed(self, _: EventBase) -> None: + """Handle relation broken/departed events.""" + self.on.data_removed.emit() + + def get_data(self, relations: list[Relation]) -> HaproxyRouteRequirersData: + """Fetch requirer data. + + Args: + relations: A list of Relation instances to fetch data from. + + Raises: + HaproxyRouteInvalidRelationDataError: When requirer data validation fails. + + Returns: + HaproxyRouteRequirersData: Validated data from all haproxy-route requirers. + """ + requirers_data: list[HaproxyRouteRequirerData] = [] + relation_ids_with_invalid_data: set[int] = set() + for relation in relations: + try: + application_data = self._get_requirer_application_data(relation) + units_data = self._get_requirer_units_data(relation) + haproxy_route_requirer_data = HaproxyRouteRequirerData( + application_data=application_data, + units_data=units_data, + relation_id=relation.id, + ) + requirers_data.append(haproxy_route_requirer_data) + except DataValidationError as exc: + if self.raise_on_validation_error: + logger.error( + "haproxy-route data validation failed for relation %s: %s", + relation, + str(exc), + ) + raise HaproxyRouteInvalidRelationDataError( + f"haproxy-route data validation failed for relation: {relation}" + ) from exc + relation_ids_with_invalid_data.add(relation.id) + continue + return HaproxyRouteRequirersData( + requirers_data=requirers_data, + relation_ids_with_invalid_data=relation_ids_with_invalid_data, + ) + + def _get_requirer_units_data(self, relation: Relation) -> list[RequirerUnitData]: + """Fetch and validate the requirer's units data. + + Args: + relation: The relation to fetch unit data from. + + Raises: + DataValidationError: When unit data validation fails. + + Returns: + list[RequirerUnitData]: List of validated unit data from the requirer. + """ + requirer_units_data: list[RequirerUnitData] = [] + + for unit in relation.units: + databag = relation.data.get(unit) + if not databag: + logger.error( + "Requirer unit data does not exist even though the unit is still present." + ) + continue + try: + data = cast(RequirerUnitData, RequirerUnitData.load(databag)) + requirer_units_data.append(data) + except DataValidationError: + logger.error("Invalid requirer application data for %s", unit) + raise + return requirer_units_data + + def _get_requirer_application_data(self, relation: Relation) -> RequirerApplicationData: + """Fetch and validate the requirer's application databag. + + Args: + relation: The relation to fetch application data from. + + Raises: + DataValidationError: When requirer application data validation fails. + + Returns: + RequirerApplicationData: Validated application data from the requirer. + """ + try: + return cast( + RequirerApplicationData, RequirerApplicationData.load(relation.data[relation.app]) + ) + except DataValidationError: + logger.error("Invalid requirer application data for %s", relation.app.name) + raise + + def publish_proxied_endpoints(self, endpoints: list[str], relation: Relation) -> None: + """Publish to the app databag the proxied endpoints. + + Args: + endpoints: The list of proxied endpoints to publish. + relation: The relation with the requirer application. + """ + HaproxyRouteProviderAppData(endpoints=[cast(AnyHttpUrl, e) for e in endpoints]).dump( + relation.data[self.charm.app], clear=True + ) + + +class HaproxyRouteEnpointsReadyEvent(EventBase): + """HaproxyRouteEnpointsReadyEvent custom event.""" + + +class HaproxyRouteEndpointsRemovedEvent(EventBase): + """HaproxyRouteEndpointsRemovedEvent custom event.""" + + +class HaproxyRouteRequirerEvents(CharmEvents): + """List of events that the TLS Certificates requirer charm can leverage. + + Attributes: + ready: when the provider proxied endpoints are ready. + removed: when the provider + """ + + ready = EventSource(HaproxyRouteEnpointsReadyEvent) + removed = EventSource(HaproxyRouteEndpointsRemovedEvent) + + +class HaproxyRouteRequirer(Object): + """haproxy-route interface requirer implementation. + + Attributes: + on: Custom events of the requirer. + """ + + on = HaproxyRouteRequirerEvents() + + # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals + def __init__( + self, + charm: CharmBase, + relation_name: str, + service: Optional[str] = None, + ports: Optional[list[int]] = None, + protocol: Literal["http", "https"] = "http", + hosts: Optional[list[IPvAnyAddress]] = None, + paths: Optional[list[str]] = None, + hostname: Optional[str] = None, + additional_hostnames: Optional[list[str]] = None, + check_interval: Optional[int] = None, + check_rise: Optional[int] = None, + check_fall: Optional[int] = None, + check_path: Optional[str] = None, + check_port: Optional[int] = None, + path_rewrite_expressions: Optional[list[str]] = None, + query_rewrite_expressions: Optional[list[str]] = None, + header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, + load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, + load_balancing_cookie: Optional[str] = None, + load_balancing_consistent_hashing: bool = False, + rate_limit_connections_per_minute: Optional[int] = None, + rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, + upload_limit: Optional[int] = None, + download_limit: Optional[int] = None, + retry_count: Optional[int] = None, + retry_redispatch: bool = False, + deny_paths: Optional[list[str]] = None, + server_timeout: int = 60, + connect_timeout: int = 60, + queue_timeout: int = 60, + server_maxconn: Optional[int] = None, + unit_address: Optional[str] = None, + http_server_close: bool = False, + allow_http: bool = False, + ) -> None: + """Initialize the HaproxyRouteRequirer. + + Args: + charm: The charm that is instantiating the library. + relation_name: The name of the relation to bind to. + service: The name of the service to route traffic to. + ports: List of ports the service is listening on. + protocol: The protocol that the service speaks. + hosts: List of backend server addresses. Currently only support IP addresses. + paths: List of URL paths to route to this service. + hostname: Hostname of this service. + additional_hostnames: Additional hostnames of this service. + check_interval: Interval between health checks in seconds. + check_rise: Number of successful health checks before server is considered up. + check_fall: Number of failed health checks before server is considered down. + check_path: The path to use for server health checks. + check_port: The port to use for http-check. + path_rewrite_expressions: List of regex expressions for path rewrites. + query_rewrite_expressions: List of regex expressions for query rewrites. + header_rewrite_expressions: List of tuples containing header name + and rewrite expression. + load_balancing_algorithm: Algorithm to use for load balancing. + load_balancing_cookie: Cookie name to use when algorithm is set to cookie. + load_balancing_consistent_hashing: Whether to use consistent hashing. + rate_limit_connections_per_minute: Maximum connections allowed per minute. + rate_limit_policy: Policy to apply when rate limit is reached. + upload_limit: Maximum upload bandwidth in bytes per second. + download_limit: Maximum download bandwidth in bytes per second. + retry_count: Number of times to retry failed requests. + retry_redispatch: Whether to redispatch failed requests to another server. + deny_paths: List of paths that should not be routed to the backend. + server_timeout: Timeout for requests from haproxy to backend servers in seconds. + connect_timeout: Timeout for client requests to haproxy in seconds. + queue_timeout: Timeout for requests waiting in queue in seconds. + server_maxconn: Maximum connections per server. + unit_address: IP address of the unit (if not provided, will use binding address). + http_server_close: Configure server close after request. + allow_http: Whether to allow HTTP traffic in addition to HTTPS. + Warning: enabling HTTP is a security risk, + make sure you apply the necessary precautions. + """ + super().__init__(charm, relation_name) + + self._relation_name = relation_name + self.relation = self.model.get_relation(self._relation_name) + self.charm = charm + self.app = self.charm.app + + # build the full application data + self._application_data = self._generate_application_data( + service, + ports, + protocol, + hosts, + paths, + hostname, + additional_hostnames, + check_interval, + check_rise, + check_fall, + check_path, + check_port, + path_rewrite_expressions, + query_rewrite_expressions, + header_rewrite_expressions, + load_balancing_algorithm, + load_balancing_cookie, + load_balancing_consistent_hashing, + rate_limit_connections_per_minute, + rate_limit_policy, + upload_limit, + download_limit, + retry_count, + retry_redispatch, + deny_paths, + server_timeout, + connect_timeout, + queue_timeout, + server_maxconn, + http_server_close, + allow_http, + ) + self._unit_address = unit_address + + on = self.charm.on + self.framework.observe(on[self._relation_name].relation_created, self._configure) + self.framework.observe(on[self._relation_name].relation_changed, self._configure) + self.framework.observe(on[self._relation_name].relation_broken, self._on_relation_broken) + + def _configure(self, _: EventBase) -> None: + """Handle relation events.""" + self.update_relation_data() + if self.relation and self.get_proxied_endpoints(): + # This event is only emitted when the provider databag changes + # which only happens when relevant changes happened + # Additionally this event is purely informational and it's up to the requirer to + # fetch the proxied endpoints in their code using get_proxied_endpoints + self.on.ready.emit() + + def _on_relation_broken(self, _: RelationBrokenEvent) -> None: + """Handle relation broken event.""" + self.on.removed.emit() + + # pylint: disable=too-many-arguments,too-many-positional-arguments + def provide_haproxy_route_requirements( + self, + service: str, + ports: list[int], + protocol: Literal["http", "https"] = "http", + hosts: Optional[list[IPvAnyAddress]] = None, + paths: Optional[list[str]] = None, + hostname: Optional[str] = None, + additional_hostnames: Optional[list[str]] = None, + check_interval: Optional[int] = None, + check_rise: Optional[int] = None, + check_fall: Optional[int] = None, + check_path: Optional[str] = None, + check_port: Optional[int] = None, + path_rewrite_expressions: Optional[list[str]] = None, + query_rewrite_expressions: Optional[list[str]] = None, + header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, + load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, + load_balancing_cookie: Optional[str] = None, + load_balancing_consistent_hashing: bool = False, + rate_limit_connections_per_minute: Optional[int] = None, + rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, + upload_limit: Optional[int] = None, + download_limit: Optional[int] = None, + retry_count: Optional[int] = None, + retry_redispatch: bool = False, + deny_paths: Optional[list[str]] = None, + server_timeout: int = 60, + connect_timeout: int = 60, + queue_timeout: int = 60, + server_maxconn: Optional[int] = None, + unit_address: Optional[str] = None, + http_server_close: bool = False, + allow_http: bool = False, + external_grpc_port: Optional[int] = None, + ) -> None: + """Update haproxy-route requirements data in the relation. + + Args: + service: The name of the service to route traffic to. + ports: List of ports the service is listening on. + protocol: The protocol that the serive speaks, deafults to "http". + hosts: List of backend server addresses. Currently only support IP addresses. + paths: List of URL paths to route to this service. + hostname: Hostname of this service. + additional_hostnames: Additional hostnames of this service. + check_interval: Interval between health checks in seconds. + check_rise: Number of successful health checks before server is considered up. + check_fall: Number of failed health checks before server is considered down. + check_path: The path to use for server health checks. + check_port: The port to use for http-check. + path_rewrite_expressions: List of regex expressions for path rewrites. + query_rewrite_expressions: List of regex expressions for query rewrites. + header_rewrite_expressions: List of tuples containing header name + and rewrite expression. + load_balancing_algorithm: Algorithm to use for load balancing. + load_balancing_cookie: Cookie name to use when algorithm is set to cookie. + load_balancing_consistent_hashing: Whether to use consistent hashing. + rate_limit_connections_per_minute: Maximum connections allowed per minute. + rate_limit_policy: Policy to apply when rate limit is reached. + upload_limit: Maximum upload bandwidth in bytes per second. + download_limit: Maximum download bandwidth in bytes per second. + retry_count: Number of times to retry failed requests. + retry_redispatch: Whether to redispatch failed requests to another server. + deny_paths: List of paths that should not be routed to the backend. + server_timeout: Timeout for requests from haproxy to backend servers in seconds. + connect_timeout: Timeout for client requests to haproxy in seconds. + queue_timeout: Timeout for requests waiting in queue in seconds. + server_maxconn: Maximum connections per server. + unit_address: IP address of the unit (if not provided, will use binding address). + http_server_close: Configure server close after request. + allow_http: Whether to allow HTTP traffic in addition to HTTPS. + Warning: enabling HTTP is a security risk, + make sure you apply the necessary precautions. + external_grpc_port: Optional external gRPC port. + """ + self._unit_address = unit_address + self._application_data = self._generate_application_data( + service, + ports, + protocol, + hosts, + paths, + hostname, + additional_hostnames, + check_interval, + check_rise, + check_fall, + check_path, + check_port, + path_rewrite_expressions, + query_rewrite_expressions, + header_rewrite_expressions, + load_balancing_algorithm, + load_balancing_cookie, + load_balancing_consistent_hashing, + rate_limit_connections_per_minute, + rate_limit_policy, + upload_limit, + download_limit, + retry_count, + retry_redispatch, + deny_paths, + server_timeout, + connect_timeout, + queue_timeout, + server_maxconn, + http_server_close, + allow_http, + external_grpc_port, + ) + self.update_relation_data() + + # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals + def _generate_application_data( # noqa: C901 + self, + service: Optional[str] = None, + ports: Optional[list[int]] = None, + protocol: Literal["http", "https"] = "http", + hosts: Optional[list[IPvAnyAddress]] = None, + paths: Optional[list[str]] = None, + hostname: Optional[str] = None, + additional_hostnames: Optional[list[str]] = None, + check_interval: Optional[int] = None, + check_rise: Optional[int] = None, + check_fall: Optional[int] = None, + check_path: Optional[str] = None, + check_port: Optional[int] = None, + path_rewrite_expressions: Optional[list[str]] = None, + query_rewrite_expressions: Optional[list[str]] = None, + header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, + load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, + load_balancing_cookie: Optional[str] = None, + load_balancing_consistent_hashing: bool = False, + rate_limit_connections_per_minute: Optional[int] = None, + rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, + upload_limit: Optional[int] = None, + download_limit: Optional[int] = None, + retry_count: Optional[int] = None, + retry_redispatch: bool = False, + deny_paths: Optional[list[str]] = None, + server_timeout: int = 60, + connect_timeout: int = 60, + queue_timeout: int = 60, + server_maxconn: Optional[int] = None, + http_server_close: bool = False, + allow_http: bool = False, + external_grpc_port: Optional[int] = None, + ) -> dict[str, Any]: + """Generate the complete application data structure. + + Args: + service: The name of the service to route traffic to. + ports: List of ports the service is listening on. + protocol: The protocol that the service speaks. + hosts: List of backend server addresses. Currently only support IP addresses. + paths: List of URL paths to route to this service. + hostname: Hostname of this service. + additional_hostnames: Additional hostnames of this service. + check_interval: Interval between health checks in seconds. + check_rise: Number of successful health checks before server is considered up. + check_fall: Number of failed health checks before server is considered down. + check_path: The path to use for server health checks. + check_port: The port to use for http-check. + path_rewrite_expressions: List of regex expressions for path rewrites. + query_rewrite_expressions: List of regex expressions for query rewrites. + header_rewrite_expressions: List of tuples containing header name and + rewrite expression. + load_balancing_algorithm: Algorithm to use for load balancing. + load_balancing_cookie: Cookie name to use when algorithm is set to cookie. + load_balancing_consistent_hashing: Whether to use consistent hashing. + rate_limit_connections_per_minute: Maximum connections allowed per minute. + rate_limit_policy: Policy to apply when rate limit is reached. + upload_limit: Maximum upload bandwidth in bytes per second. + download_limit: Maximum download bandwidth in bytes per second. + retry_count: Number of times to retry failed requests. + retry_redispatch: Whether to redispatch failed requests to another server. + deny_paths: List of paths that should not be routed to the backend. + server_timeout: Timeout for requests from haproxy to backend servers in seconds. + connect_timeout: Timeout for client requests to haproxy in seconds. + queue_timeout: Timeout for requests waiting in queue in seconds. + server_maxconn: Maximum connections per server. + http_server_close: Configure server close after request. + allow_http: Whether to allow HTTP traffic in addition to HTTPS. + Warning: enabling HTTP is a security risk, + make sure you apply the necessary precautions. + external_grpc_port: Optional external gRPC port. + + Returns: + dict: A dictionary containing the complete application data structure. + """ + # Apply default value to list parameters to avoid problems with mutable default args. + if not ports: + ports = [] + if not hosts: + hosts = [] + if not paths: + paths = [] + if not additional_hostnames: + additional_hostnames = [] + if not path_rewrite_expressions: + path_rewrite_expressions = [] + if not query_rewrite_expressions: + query_rewrite_expressions = [] + if not header_rewrite_expressions: + header_rewrite_expressions = [] + if not deny_paths: + deny_paths = [] + + application_data: dict[str, Any] = { + "service": service, + "ports": ports, + "protocol": protocol, + "hosts": hosts, + "paths": paths, + "hostname": hostname, + "additional_hostnames": additional_hostnames, + "load_balancing": { + "algorithm": load_balancing_algorithm, + "cookie": load_balancing_cookie, + "consistent_hashing": load_balancing_consistent_hashing, + }, + "timeout": { + "server": server_timeout, + "connect": connect_timeout, + "queue": queue_timeout, + }, + "bandwidth_limit": { + "download": download_limit, + "upload": upload_limit, + }, + "deny_paths": deny_paths, + "server_maxconn": server_maxconn, + "rewrites": self._generate_rewrite_configuration( + path_rewrite_expressions, + query_rewrite_expressions, + header_rewrite_expressions, + ), + "http_server_close": http_server_close, + "allow_http": allow_http, + "external_grpc_port": external_grpc_port, + } + + if allow_http: + logger.warning( + "HTTP traffic is allowed alongside HTTPS. " + "This is a security risk, make sure you apply the necessary precautions." + ) + + if check := self._generate_server_healthcheck_configuration( + check_interval, check_rise, check_fall, check_path, check_port + ): + application_data["check"] = check + + if rate_limit := self._generate_rate_limit_configuration( + rate_limit_connections_per_minute, rate_limit_policy + ): + application_data["rate_limit"] = rate_limit + + if retry := self._generate_retry_configuration(retry_count, retry_redispatch): + application_data["retry"] = retry + return application_data + + def _generate_server_healthcheck_configuration( + self, + interval: Optional[int], + rise: Optional[int], + fall: Optional[int], + path: Optional[str], + port: Optional[int], + ) -> dict[str, int | Optional[str]]: + """Generate configuration for server health checks. + + Args: + interval: Time between health checks in seconds. + rise: Number of successful checks before marking server as up. + fall: Number of failed checks before marking server as down. + path: The path to use for health checks. + port: The port to use for http-check. + + Returns: + dict[str, int | Optional[str]]: Health check configuration dictionary. + """ + server_healthcheck_configuration: dict[str, int | Optional[str]] = {} + if interval and rise and fall: + server_healthcheck_configuration = { + "interval": interval, + "rise": rise, + "fall": fall, + "path": path, + "port": port, + } + return server_healthcheck_configuration + + def _generate_rewrite_configuration( + self, + path_rewrite_expressions: list[str], + query_rewrite_expressions: list[str], + header_rewrite_expressions: list[tuple[str, str]], + ) -> list[dict[str, str | HaproxyRewriteMethod]]: + """Generate rewrite configuration from provided expressions. + + Args: + path_rewrite_expressions: List of path rewrite expressions. + query_rewrite_expressions: List of query rewrite expressions. + header_rewrite_expressions: List of header name and expression tuples. + + Returns: + list[dict[str, str]]: List of generated rewrite configurations. + """ + # rewrite configuration + rewrite_configurations: list[dict[str, str | HaproxyRewriteMethod]] = [] + for expression in path_rewrite_expressions: + rewrite_configurations.append( + {"method": HaproxyRewriteMethod.SET_PATH, "expression": expression} + ) + for expression in query_rewrite_expressions: + rewrite_configurations.append( + {"method": HaproxyRewriteMethod.SET_QUERY, "expression": expression} + ) + for header, expression in header_rewrite_expressions: + rewrite_configurations.append( + { + "method": HaproxyRewriteMethod.SET_HEADER, + "expression": expression, + "header": header, + } + ) + return rewrite_configurations + + def _generate_rate_limit_configuration( + self, rate_limit_connections_per_minute: Optional[int], rate_limit_policy: RateLimitPolicy + ) -> dict[str, Any]: + """Generate rate limit configuration. + + Args: + rate_limit_connections_per_minute: Maximum connections allowed per minute. + rate_limit_policy: Policy to apply when rate limit is reached. + + Returns: + dict[str, Any]: Rate limit configuration, or empty dict if no limits are set. + """ + rate_limit_configuration = {} + if rate_limit_connections_per_minute: + rate_limit_configuration = { + "connections_per_minute": rate_limit_connections_per_minute, + "policy": rate_limit_policy, + } + return rate_limit_configuration + + def _generate_retry_configuration( + self, count: Optional[int], redispatch: bool + ) -> dict[str, Any]: + """Generate retry configuration. + + Args: + count: Number of times to retry failed requests. + redispatch: Whether to redispatch failed requests to another server. + + Returns: + dict[str, Any]: Retry configuration dictionary, or empty dict if retry not configured. + """ + retry_configuration = {} + if count: + retry_configuration = { + "count": count, + "redispatch": redispatch, + } + return retry_configuration + + def update_relation_data(self) -> None: + """Update both application and unit data in the relation.""" + if not self._application_data.get("service") and not self._application_data.get("ports"): + logger.warning("Required field(s) are missing, skipping update of the relation data.") + return + + if relation := self.relation: + self._update_application_data(relation) + self._update_unit_data(relation) + + def _update_application_data(self, relation: Relation) -> None: + """Update application data in the relation databag. + + Args: + relation: The relation instance. + """ + if self.charm.unit.is_leader(): + application_data = self._prepare_application_data() + application_data.dump(relation.data[self.app], clear=True) + + def _update_unit_data(self, relation: Relation) -> None: + """Prepare and update the unit data in the relation databag. + + Args: + relation: The relation instance. + """ + unit_data = self._prepare_unit_data() + unit_data.dump(relation.data[self.charm.unit], clear=True) + + def _prepare_application_data(self) -> RequirerApplicationData: + """Prepare and validate the application data. + + Raises: + DataValidationError: When validation of application data fails. + + Returns: + RequirerApplicationData: The validated application data model. + """ + try: + return cast( + RequirerApplicationData, RequirerApplicationData.from_dict(self._application_data) + ) + except ValidationError as exc: + logger.error("Validation error when preparing requirer application data.") + raise DataValidationError( + "Validation error when preparing requirer application data." + ) from exc + + def _prepare_unit_data(self) -> RequirerUnitData: + """Prepare and validate unit data. + + Raises: + DataValidationError: When no address or unit IP is available. + + Returns: + RequirerUnitData: The validated unit data model. + """ + address = self._unit_address + if not address: + network_binding = self.charm.model.get_binding(self._relation_name) + if ( + network_binding is not None + and (bind_address := network_binding.network.bind_address) is not None + ): + address = str(bind_address) + else: + logger.error("No unit IP available.") + raise DataValidationError("No unit IP available.") + return RequirerUnitData(address=cast(IPvAnyAddress, address)) + + def get_proxied_endpoints(self) -> list[AnyHttpUrl]: + """The full ingress URL to reach the current unit. + + Returns: + The provider URL or None if the URL isn't available yet or is not valid. + """ + relation = self.relation + if not relation or not relation.app: + return [] + + # Fetch the provider's app databag + try: + databag = relation.data[relation.app] + except ModelError: + logger.exception("Error reading remote app data.") + return [] + + if not databag: # not ready yet + return [] + + try: + provider_data = cast( + HaproxyRouteProviderAppData, HaproxyRouteProviderAppData.load(databag) + ) + return provider_data.endpoints + except DataValidationError: + logger.exception("Invalid provider url.") + return [] diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py new file mode 100644 index 000000000..167eff41b --- /dev/null +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -0,0 +1,61 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""TODO: Add a proper docstring here. + +This is a placeholder docstring for this charm library. Docstrings are +presented on Charmhub and updated whenever you push a new version of the +library. + +Complete documentation about creating and documenting libraries can be found +in the SDK docs at https://juju.is/docs/sdk/libraries. + +See `charmcraft publish-lib` and `charmcraft fetch-lib` for details of how to +share and consume charm libraries. They serve to enhance collaboration +between charmers. Use a charmer's libraries for classes that handle +integration with their charm. + +Bear in mind that new revisions of the different major API versions (v0, v1, +v2 etc) are maintained independently. You can continue to update v0 and v1 +after you have pushed v3. + +Markdown is supported, following the CommonMark specification. +""" + +from charms.haproxy.v2.haproxy_route import RequirerApplicationData +from pydantic import Field, IPvAnyAddress +from pydantic.dataclasses import dataclass + +# The unique Charmhub library identifier, never change it +LIBID = "24c99d77895e481d8661288f95884ee4" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 1 + + +@dataclass +class HaproxyRoutePolicyData: + """Dataclass to store the data for the haproxy-route-policy interface.""" + + requests: list[RequirerApplicationData] + + +class RequirerUnitData(_DatabagModel): + """haproxy-route requirer unit data. + + Attributes: + address: IP address of the unit. + """ + + address: IPvAnyAddress = Field(description="IP address of the unit.") + + +@dataclass +class HaproxyRoutePolicyProviderData(HaproxyRoutePolicyData): + """Dataclass to store the data for the haproxy-route-policy provider interface.""" + + units_data: list[RequirerUnitData] From e80bcb79e9455d7216aa77cee19fd494224f67a6 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 3 Apr 2026 22:41:13 +0200 Subject: [PATCH 142/201] bootstrap haproxy-route-policy lib --- haproxy-route-policy-operator/charmcraft.yaml | 2 - .../lib/charms/haproxy/v2/haproxy_route.py | 1609 ----------------- .../v0/haproxy_route_policy.py | 354 +++- .../unit/test_haproxy_route_policy_lib.py | 168 ++ 4 files changed, 489 insertions(+), 1644 deletions(-) delete mode 100644 haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py create mode 100644 haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 9d6035fb1..97c4cc1cc 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -54,8 +54,6 @@ actions: charm-libs: - lib: data_platform_libs.data_interfaces version: "0" - - lib: haproxy.haproxy_route - version: "2" peers: haproxy-route-policy-peer: diff --git a/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py b/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py deleted file mode 100644 index 795c815ad..000000000 --- a/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py +++ /dev/null @@ -1,1609 +0,0 @@ -# pylint: disable=too-many-lines -"""Haproxy-route interface library. - -## Getting Started - -To get started using the library, you just need to fetch the library using `charmcraft`. - -```shell -cd some-charm -charmcraft fetch-lib charms.haproxy.v2.haproxy_route -``` - -In the `metadata.yaml` of the charm, add the following: - -```yaml -requires: - backend: - interface: haproxy-route - limit: 1 -``` - -Then, to initialise the library: - -```python -from charms.haproxy.v2.haproxy_route import HaproxyRouteRequirer - -class SomeCharm(CharmBase): - def __init__(self, *args): - # ... - - # There are 2 ways you can use the requirer implementation: - # 1. To initialize the requirer with parameters: - self.haproxy_route_requirer = HaproxyRouteRequirer(self, - relation_name=, - service=, - ports=, - protocol=, - hosts=, - paths=, - hostname=, - additional_hostnames=, - check_interval=, - check_rise=, - check_fall=, - check_path=, - check_port=, - path_rewrite_expressions=, list of path rewrite expressions, - query_rewrite_expressions=, list of query rewrite expressions, - header_rewrite_expressions=, list of (header_name, rewrite_expression), - load_balancing_algorithm=, defaults to "leastconn", - load_balancing_cookie=, only used when load_balancing_algorithm is cookie - load_balancing_consistent_hashing=, to enable consistent hashing, - defaults to False, - rate_limit_connections_per_minute=, - rate_limit_policy=, - upload_limit=, - download_limit=, - retry_count=, - retry_redispatch=, - deny_paths=, - server_timeout=, - connect_timeout=, - queue_timeout=, - server_maxconn=, - unit_address=, - http_server_close=, - ) - - # 2.To initialize the requirer with no parameters, i.e - # self.haproxy_route_requirer = HaproxyRouteRequirer(self) - # This will simply initialize the requirer class and it won't perfom any action. - - # Afterwards regardless of how you initialized the requirer you can call the - # provide_haproxy_route_requirements method anywhere in your charm to update the requirer data. - # The method takes the same number of parameters as the requirer class. - # provide_haproxy_route_requirements(address=, port=, ...) - - self.framework.observe( - self.framework.on.config_changed, self._on_config_changed - ) - self.framework.observe( - self.haproxy_route_requirer.on.ready, self._on_endpoints_ready - ) - self.framework.observe( - self.haproxy_route_requirer.on.removed, self._on_endpoints_removed - ) - - def _on_config_changed(self, event: ConfigChangedEvent) -> None: - self.haproxy_route_requirer.provide_haproxy_route_requirements(...) - - def _on_endpoints_ready(self, _: EventBase) -> None: - # Handle endpoints ready event - ... - - def _on_endpoints_removed(self, _: EventBase) -> None: - # Handle endpoints removed event - ... - -## Using the library as the provider -The provider charm should expose the interface as shown below: -```yaml -provides: - haproxy-route: - interface: haproxy-route -``` -Note that this interface supports relating to multiple endpoints. - -Then, to initialise the library: -```python -from charms.haproxy.v2.haproxy_route import HaproxyRouteProvider - -class SomeCharm(CharmBase): - self.haproxy_route_provider = HaproxyRouteProvider(self) - self.framework.observe( - self.haproxy_route_provider.on.data_available, self._on_haproxy_route_data_available - ) - - def _on_haproxy_route_data_available(self, event: EventBase) -> None: - data = self.haproxy_route_provider.get_data(self.haproxy_route_provider.relations) - ... -""" - -import json -import logging -from collections import defaultdict -from enum import Enum -from functools import partial -from typing import Annotated, Any, Literal, MutableMapping, Optional, cast - -from ops import CharmBase, ModelError, RelationBrokenEvent -from ops.charm import CharmEvents -from ops.framework import EventBase, EventSource, Object -from ops.model import Relation -from pydantic import ( - AnyHttpUrl, - BaseModel, - BeforeValidator, - ConfigDict, - Field, - IPvAnyAddress, - ValidationError, - field_validator, - model_validator, -) -from pydantic.dataclasses import dataclass -from typing_extensions import Self -from validators import domain - -# The unique Charmhub library identifier, never change it -LIBID = "08b6347482f6455486b5f5bb4dc4e6cf" - -# Increment this major API version when introducing breaking changes -LIBAPI = 2 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 1 - -logger = logging.getLogger(__name__) -HAPROXY_ROUTE_RELATION_NAME = "haproxy-route" -HAPROXY_CONFIG_INVALID_CHARACTERS = "\n\t#\\'\"\r$ " -HAPROXY_EXPR_INVALID_CHARACTERS = "\n" - - -def value_contains_invalid_characters( - invalid_characters: str, value: Optional[str] -) -> Optional[str]: - """Validate if value contains invalid config characters. - - Args: - invalid_characters: String with the list of invalid characters. - value: The value to validate. - - Raises: - ValueError: When value contains invalid characters. - - Returns: - The validated value. - """ - if value is None: - return value - - if [char for char in value if char in invalid_characters]: - raise ValueError(f"Relation data contains invalid character(s) {value}") - return value - - -def valid_domain_with_wildcard(value: str) -> str: - """Validate if value is a valid domain that can include a wildcard. - - The wildcard character (*) can't be at the TLD level, for example *.com is not valid. - This is supported natively by the library ( e.g domain("com") will raise a ValidationError ). - - Raises: - ValueError: When value is not a valid domain. - - Args: - value: The value to validate. - """ - fqdn = value[2:] if value.startswith("*.") else value - if not bool(domain(fqdn)): - raise ValueError(f"Invalid domain: {value}") - return value - - -VALIDSTR = Annotated[ - str, - BeforeValidator(partial(value_contains_invalid_characters, HAPROXY_CONFIG_INVALID_CHARACTERS)), -] -VALIDEXPRSTR = Annotated[ - str, - BeforeValidator(partial(value_contains_invalid_characters, HAPROXY_EXPR_INVALID_CHARACTERS)), -] - - -class DataValidationError(Exception): - """Raised when data validation fails.""" - - -class HaproxyRouteInvalidRelationDataError(Exception): - """Rasied when data validation of the haproxy-route relation fails.""" - - -class _DatabagModel(BaseModel): - """Base databag model. - - Attrs: - model_config: pydantic model configuration. - """ - - model_config = ConfigDict( - # tolerate additional keys in databag - extra="ignore", - # Allow instantiating this class by field name (instead of forcing alias). - populate_by_name=True, - # Custom config key: whether to nest the whole datastructure (as json) - # under a field or spread it out at the toplevel. - _NEST_UNDER=None, - ) # type: ignore - """Pydantic config.""" - - @classmethod - def load(cls, databag: MutableMapping) -> "_DatabagModel": - """Load this model from a Juju json databag. - - Args: - databag: Databag content. - - Raises: - DataValidationError: When model validation failed. - - Returns: - _DatabagModel: The validated model. - """ - nest_under = cls.model_config.get("_NEST_UNDER") - if nest_under: - return cls.model_validate(json.loads(databag[nest_under])) - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.model_fields.items()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.model_validate_json(json.dumps(data)) - except ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.error(str(e), exc_info=True) - raise DataValidationError(msg) from e - - @classmethod - def from_dict(cls, values: dict) -> "_DatabagModel": - """Load this model from a dict. - - Args: - values: Dict values. - - Raises: - DataValidationError: When model validation failed. - - Returns: - _DatabagModel: The validated model. - """ - try: - logger.info("Loading values from dictionary: %s", values) - return cls.model_validate(values) - except ValidationError as e: - msg = f"failed to validate: {values}" - logger.debug(msg, exc_info=True) - raise DataValidationError(msg) from e - - def dump( - self, databag: Optional[MutableMapping] = None, clear: bool = True - ) -> Optional[MutableMapping]: - """Write the contents of this model to Juju databag. - - Args: - databag: The databag to write to. - clear: Whether to clear the databag before writing. - - Returns: - MutableMapping: The databag. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - nest_under = self.model_config.get("_NEST_UNDER") - if nest_under: - databag[nest_under] = self.model_dump_json( - by_alias=True, - # skip keys whose values are default - exclude_defaults=True, - ) - return databag - - dct = self.model_dump(mode="json", by_alias=True, exclude_defaults=True) - databag.update({k: json.dumps(v) for k, v in dct.items()}) - return databag - - -class ServerHealthCheck(BaseModel): - """Configuration model for backend server health checks. - - Attributes: - interval: Number of seconds between consecutive health check attempts. - rise: Number of consecutive successful health checks required for up. - fall: Number of consecutive failed health checks required for DOWN. - path: List of URL paths to use for HTTP health checks. - port: Customize port value for http-check. - """ - - interval: Optional[int] = Field( - description="The interval (in seconds) between health checks.", default=None - ) - rise: Optional[int] = Field( - description="How many successful health checks before server is considered up.", - default=None, - ) - fall: Optional[int] = Field( - description="How many failed health checks before server is considered down.", default=None - ) - path: Optional[VALIDSTR] = Field(description="The health check path.", default=None) - port: Optional[int] = Field(description="The health check port.", default=None) - - @model_validator(mode="after") - def check_all_required_fields_set(self) -> Self: - """Check that all required fields for health check are set. - - Raises: - ValueError: When validation fails. - - Returns: - The validated model. - """ - if not bool(self.interval) == bool(self.rise) == bool(self.fall): - raise ValueError("All three of interval, rise and fall must be set.") - return self - - -# tarpit is not yet implemented -class RateLimitPolicy(Enum): - """Enum of possible rate limiting policies. - - Attrs: - DENY: deny a client's HTTP request to return a 403 Forbidden error. - REJECT: closes the connection immediately without sending a response. - SILENT: disconnects immediately without notifying the client - that the connection has been closed. - """ - - DENY = "deny" - REJECT = "reject" - SILENT = "silent-drop" - - -class RateLimit(BaseModel): - """Configuration model for connection rate limiting. - - Attributes: - connections_per_minute: Number of connections allowed per minute for a client. - policy: Action to take when the rate limit is exceeded. - """ - - connections_per_minute: int = Field(description="How many connections are allowed per minute.") - policy: RateLimitPolicy = Field( - description="Configure the rate limit policy.", default=RateLimitPolicy.DENY - ) - - -class LoadBalancingAlgorithm(Enum): - """Enum of possible http_route types. - - Attrs: - LEASTCONN: The server with the lowest number of connections receives the connection. - SRCIP: Load balance using the hash of The source IP address. - ROUNDROBIN: Each server is used in turns, according to their weights. - COOKIE: Load balance using hash req.cookie(clientid). - """ - - LEASTCONN = "leastconn" - SRCIP = "source" - ROUNDROBIN = "roundrobin" - COOKIE = "cookie" - - -class LoadBalancingConfiguration(BaseModel): - """Configuration model for load balancing. - - Attributes: - algorithm: Algorithm to use for load balancing. - cookie: Cookie name to use when algorithm is set to cookie. - consistent_hashing: Use consistent hashing to avoid redirection - when servers are added/removed. - """ - - algorithm: LoadBalancingAlgorithm = Field( - description="Configure the load balancing algorithm for the service.", - default=LoadBalancingAlgorithm.LEASTCONN, - ) - cookie: Optional[VALIDSTR] = Field( - description="Only used when algorithm is COOKIE. Define the cookie to load balance on.", - default=None, - ) - # Note: Later when the generic LoadBalancingAlgorithm.HASH is implemented this attribute - # will also apply under that mode. - consistent_hashing: bool = Field( - description=( - "Only used when the `algorithm` is SRCIP or COOKIE. " - "Use consistent hashing to avoid redirection when servers are added/removed. " - "Default is False as it usually does not give a balanced distribution." - ), - default=False, - ) - - @model_validator(mode="after") - def validate_attributes(self) -> Self: - """Check that algorithm-specific configs are only set with their respective algorithm. - - Raises: - ValueError: When validation fails in one of these cases: - 1. self.cookie is not None when self.algorithm != COOKIE - 2. self.consistent_hashing is True when algorithm is neither COOKIE nor SRCIP - - Returns: - The validated model. - """ - if self.cookie is not None and self.algorithm != LoadBalancingAlgorithm.COOKIE: - raise ValueError("cookie only applies when algorithm is COOKIE.") - - if self.consistent_hashing and self.algorithm not in [ - LoadBalancingAlgorithm.COOKIE, - LoadBalancingAlgorithm.SRCIP, - ]: - raise ValueError("Consistent hashing only applies when algorithm is COOKIE or SRCIP.") - return self - - -class BandwidthLimit(BaseModel): - """Configuration model for bandwidth rate limiting. - - Attributes: - upload: Limit upload speed (bytes per second). - download: Limit download speed (bytes per second). - """ - - upload: Optional[int] = Field(description="Upload limit (bytes per seconds).", default=None) - download: Optional[int] = Field( - description="Download limit (bytes per seconds).", default=None - ) - - -# retry-on is not yet implemented -class Retry(BaseModel): - """Configuration model for retry. - - Attributes: - count: How many times should a request retry. - redispatch: Whether to redispatch failed requests to another server. - """ - - count: int = Field(description="How many times should a request retry.") - redispatch: bool = Field( - description="Whether to redispatch failed requests to another server.", default=False - ) - - -class TimeoutConfiguration(BaseModel): - """Configuration model for timeout. - - Attributes: - server: Timeout for requests from haproxy to backend servers. - connect: Timeout for client requests to haproxy. - queue: Timeout for requests waiting in the queue after server-maxconn is reached. - """ - - server: int = Field( - description="Timeout (in seconds) for requests from haproxy to backend servers.", - default=60, - ) - connect: int = Field( - description="Timeout (in seconds) for client requests to haproxy.", default=60 - ) - queue: int = Field( - description="Timeout (in seconds) for requests in the queue.", - default=60, - ) - - -class HaproxyRewriteMethod(Enum): - """Enum of possible HTTP rewrite methods. - - Attrs: - SET_PATH: The server with the lowest number of connections receives the connection. - SET_QUERY: Load balance using the hash of The source IP address. - SET_HEADER: Each server is used in turns, according to their weights. - """ - - SET_PATH = "set-path" - SET_QUERY = "set-query" - SET_HEADER = "set-header" - - -class RewriteConfiguration(BaseModel): - """Configuration model for HTTP rewrite. - - Attributes: - method: Which rewrite method to apply.One of set-path, set-query, set-header. - expression: Regular expression to use with the rewrite method. - header: The name of the header to rewrited. - """ - - method: HaproxyRewriteMethod = Field( - description="Which rewrite method to apply.One of set-path, set-query, set-header." - ) - expression: VALIDEXPRSTR = Field( - description="Regular expression to use with the rewrite method." - ) - header: Optional[VALIDSTR] = Field( - description="The name of the header to rewrite.", default=None - ) - - -class RequirerApplicationData(_DatabagModel): - """Configuration model for HAProxy route requirer application data. - - Attributes: - service: Name of the service requesting HAProxy routing. - ports: List of port numbers on which the service is listening. - protocol: The protocol that the service speaks. - hosts: List of backend server addresses. - paths: List of URL paths to route to this service. Defaults to an empty list. - hostname: Optional: The hostname of this service. - additional_hostnames: List of additional hostnames of this service. - Defaults to an empty list. - rewrites: List of RewriteConfiguration objects defining path, query, or header - rewrite rules. - check: ServerHealthCheck configuration for monitoring backend health. - load_balancing: Configuration for the load balancing strategy. - rate_limit: Optional configuration for limiting connection rates. - bandwidth_limit: Optional configuration for limiting upload and download bandwidth. - retry: Optional configuration for request retry behavior. - deny_paths: List of URL paths that should not be routed to the backend. - timeout: Configuration for server, client, and queue timeouts. - server_maxconn: Optional maximum number of connections per server. - http_server_close: Configure server close after request. - allow_http: Whether to allow HTTP traffic in addition to HTTPS. Defaults to False. - Warning: enabling HTTP is a security risk, make sure you apply the necessary precautions. - external_grpc_port: Optional external gRPC port. - """ - - service: VALIDSTR = Field(description="The name of the service.") - ports: list[int] = Field(description="The list of ports listening for this service.") - protocol: Literal["http", "https"] = Field( - description="The protocol that the service speaks.", - default="http", - ) - hosts: list[IPvAnyAddress] = Field( - description="The list of backend server addresses. Currently only support IP addresses.", - default=[], - ) - paths: list[VALIDSTR] = Field( - description="The list of paths to route to this service.", default=[] - ) - hostname: Optional[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = Field( - description="Hostname of this service.", default=None - ) - additional_hostnames: list[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = ( - Field(description="The list of additional hostnames of this service.", default=[]) - ) - rewrites: list[RewriteConfiguration] = Field( - description="The list of path rewrite rules.", default=[] - ) - check: Optional[ServerHealthCheck] = Field( - description="Configure health check for the service.", - default=None, - ) - load_balancing: LoadBalancingConfiguration = Field( - description="Configure loadbalancing.", default=LoadBalancingConfiguration() - ) - rate_limit: Optional[RateLimit] = Field( - description="Configure rate limit for the service.", default=None - ) - bandwidth_limit: BandwidthLimit = Field( - description="Configure bandwidth limit for the service.", default=BandwidthLimit() - ) - retry: Optional[Retry] = Field( - description="Configure retry for incoming requests.", default=None - ) - deny_paths: list[VALIDSTR] = Field( - description="Configure path that should not be routed to the backend", default=[] - ) - timeout: TimeoutConfiguration = Field( - description="Configure timeout", - default=TimeoutConfiguration(), - ) - server_maxconn: Optional[int] = Field( - description="Configure maximum connection per server", default=None - ) - http_server_close: bool = Field( - description="Configure server close after request", default=False - ) - allow_http: bool = Field( - description="Whether to allow HTTP traffic in addition to HTTPS.", default=False - ) - external_grpc_port: int | None = Field( - description="Optional external gRPC port.", default=None, gt=0, le=65535 - ) - - @field_validator("load_balancing") - @classmethod - def validate_load_balancing_configuration( - cls, configuration: LoadBalancingConfiguration - ) -> LoadBalancingConfiguration: - """Validate the parsed load balancing configuration. - - Args: - configuration: The configuration to validate. - - Raises: - ValueError: When cookie is not set under COOKIE load balancing mode. - - Returns: - LoadBalancingConfiguration: The validated configuration. - """ - if configuration.algorithm == LoadBalancingAlgorithm.COOKIE and not configuration.cookie: - raise ValueError("cookie must be set if load balacing algorithm is COOKIE.") - return configuration - - @field_validator("rewrites") - @classmethod - def validate_rewrites(cls, rewrites: list[RewriteConfiguration]) -> list[RewriteConfiguration]: - """Validate the parsed list of rewrite configurations. - - Args: - rewrites: The configurations to validate. - - Raises: - ValueError: When header is not set under SET_HEADER rewrite method. - - Returns: - list[RewriteConfiguration]: The validated configurations. - """ - for rewrite in rewrites: - if rewrite.method == HaproxyRewriteMethod.SET_HEADER and not rewrite.method: - raise ValueError("header must be set if rewrite method is SET_HEADER.") - return rewrites - - -class HaproxyRouteProviderAppData(_DatabagModel): - """haproxy-route provider databag schema. - - Attributes: - endpoints: The list of proxied endpoints that maps to the backend. - """ - - endpoints: list[AnyHttpUrl] - - -class RequirerUnitData(_DatabagModel): - """haproxy-route requirer unit data. - - Attributes: - address: IP address of the unit. - """ - - address: IPvAnyAddress = Field(description="IP address of the unit.") - - -@dataclass -class HaproxyRouteRequirerData: - """haproxy-route requirer data. - - Attributes: - relation_id: Id of the relation. - application_data: Application data. - units_data: Units data - """ - - relation_id: int - application_data: RequirerApplicationData - units_data: list[RequirerUnitData] - - -@dataclass -class HaproxyRouteRequirersData: - """haproxy-route requirers data. - - Attributes: - requirers_data: List of requirer data. - relation_ids_with_invalid_data: Set of relation ids that contains invalid data. - """ - - requirers_data: list[HaproxyRouteRequirerData] - relation_ids_with_invalid_data: set[int] - - @model_validator(mode="after") - def check_services_unique(self) -> Self: - """Check that requirers define unique services. - - Raises: - DataValidationError: When requirers declared duplicate services. - - Returns: - The validated model. - """ - services = [ - requirer_data.application_data.service for requirer_data in self.requirers_data - ] - if len(services) != len(set(services)): - raise DataValidationError("Services declaration by requirers must be unique.") - - return self - - @model_validator(mode="after") - def check_external_grpc_port_unique(self) -> Self: - """Check that external gRPC ports are unique across requirer applications. - If multiple requirer applications declare the same external gRPC port, - their relation ids are added to relation_ids_with_invalid_data. - - Returns: - The validated model. - """ - relation_ids_per_port: dict[int, list[int]] = defaultdict(list[int]) - for requirer_data in self.requirers_data: - if requirer_data.application_data.external_grpc_port: - relation_ids_per_port[requirer_data.application_data.external_grpc_port].append( - requirer_data.relation_id - ) - - self.relation_ids_with_invalid_data.update( - relation_id - for relation_ids in relation_ids_per_port.values() - for relation_id in relation_ids - if len(relation_ids) > 1 - ) - return self - - @model_validator(mode="after") - def check_grpc_requires_https(self) -> Self: - """Check that backends with external_grpc_port use https protocol. - If not, their relation ids are added to relation_ids_with_invalid_data. - - Returns: - Self: The validated model - """ - for requirer_data in self.requirers_data: - if all( - [ - requirer_data.application_data.external_grpc_port is not None, - requirer_data.application_data.protocol != "https", - requirer_data.relation_id, - ] - ): - self.relation_ids_with_invalid_data.add(requirer_data.relation_id) - return self - - -class HaproxyRouteDataAvailableEvent(EventBase): - """HaproxyRouteDataAvailableEvent custom event. - - This event indicates that the requirers data are available. - """ - - -class HaproxyRouteDataRemovedEvent(EventBase): - """HaproxyRouteDataRemovedEvent custom event. - - This event indicates that one of the endpoints was removed. - """ - - -class HaproxyRouteProviderEvents(CharmEvents): - """List of events that the TLS Certificates requirer charm can leverage. - - Attributes: - data_available: This event indicates that - the haproxy-route endpoints are available. - data_removed: This event indicates that one of the endpoints was removed. - """ - - data_available = EventSource(HaproxyRouteDataAvailableEvent) - data_removed = EventSource(HaproxyRouteDataRemovedEvent) - - -class HaproxyRouteProvider(Object): - """Haproxy-route interface provider implementation. - - Attributes: - on: Custom events of the provider. - relations: Related appliations. - """ - - on = HaproxyRouteProviderEvents() - - def __init__( - self, - charm: CharmBase, - relation_name: str = HAPROXY_ROUTE_RELATION_NAME, - raise_on_validation_error: bool = False, - ) -> None: - """Initialize the HaproxyRouteProvider. - - Args: - charm: The charm that is instantiating the library. - relation_name: The name of the relation. - raise_on_validation_error: Whether the library should raise - HaproxyRouteInvalidRelationDataError when requirer data validation fails. - If this is set to True the provider charm needs to also catch and handle the - thrown exception. - """ - super().__init__(charm, relation_name) - - self._relation_name = relation_name - self.charm = charm - self.raise_on_validation_error = raise_on_validation_error - on = self.charm.on - self.framework.observe(on[self._relation_name].relation_created, self._configure) - self.framework.observe(on[self._relation_name].relation_changed, self._configure) - self.framework.observe(on[self._relation_name].relation_broken, self._on_endpoint_removed) - self.framework.observe( - on[self._relation_name].relation_departed, self._on_endpoint_removed - ) - - @property - def relations(self) -> list[Relation]: - """The list of Relation instances associated with this endpoint.""" - return list(self.charm.model.relations[self._relation_name]) - - def _configure(self, _event: EventBase) -> None: - """Handle relation events.""" - if relations := self.relations: - # Only for data validation - _ = self.get_data(relations) - self.on.data_available.emit() - - def _on_endpoint_removed(self, _: EventBase) -> None: - """Handle relation broken/departed events.""" - self.on.data_removed.emit() - - def get_data(self, relations: list[Relation]) -> HaproxyRouteRequirersData: - """Fetch requirer data. - - Args: - relations: A list of Relation instances to fetch data from. - - Raises: - HaproxyRouteInvalidRelationDataError: When requirer data validation fails. - - Returns: - HaproxyRouteRequirersData: Validated data from all haproxy-route requirers. - """ - requirers_data: list[HaproxyRouteRequirerData] = [] - relation_ids_with_invalid_data: set[int] = set() - for relation in relations: - try: - application_data = self._get_requirer_application_data(relation) - units_data = self._get_requirer_units_data(relation) - haproxy_route_requirer_data = HaproxyRouteRequirerData( - application_data=application_data, - units_data=units_data, - relation_id=relation.id, - ) - requirers_data.append(haproxy_route_requirer_data) - except DataValidationError as exc: - if self.raise_on_validation_error: - logger.error( - "haproxy-route data validation failed for relation %s: %s", - relation, - str(exc), - ) - raise HaproxyRouteInvalidRelationDataError( - f"haproxy-route data validation failed for relation: {relation}" - ) from exc - relation_ids_with_invalid_data.add(relation.id) - continue - return HaproxyRouteRequirersData( - requirers_data=requirers_data, - relation_ids_with_invalid_data=relation_ids_with_invalid_data, - ) - - def _get_requirer_units_data(self, relation: Relation) -> list[RequirerUnitData]: - """Fetch and validate the requirer's units data. - - Args: - relation: The relation to fetch unit data from. - - Raises: - DataValidationError: When unit data validation fails. - - Returns: - list[RequirerUnitData]: List of validated unit data from the requirer. - """ - requirer_units_data: list[RequirerUnitData] = [] - - for unit in relation.units: - databag = relation.data.get(unit) - if not databag: - logger.error( - "Requirer unit data does not exist even though the unit is still present." - ) - continue - try: - data = cast(RequirerUnitData, RequirerUnitData.load(databag)) - requirer_units_data.append(data) - except DataValidationError: - logger.error("Invalid requirer application data for %s", unit) - raise - return requirer_units_data - - def _get_requirer_application_data(self, relation: Relation) -> RequirerApplicationData: - """Fetch and validate the requirer's application databag. - - Args: - relation: The relation to fetch application data from. - - Raises: - DataValidationError: When requirer application data validation fails. - - Returns: - RequirerApplicationData: Validated application data from the requirer. - """ - try: - return cast( - RequirerApplicationData, RequirerApplicationData.load(relation.data[relation.app]) - ) - except DataValidationError: - logger.error("Invalid requirer application data for %s", relation.app.name) - raise - - def publish_proxied_endpoints(self, endpoints: list[str], relation: Relation) -> None: - """Publish to the app databag the proxied endpoints. - - Args: - endpoints: The list of proxied endpoints to publish. - relation: The relation with the requirer application. - """ - HaproxyRouteProviderAppData(endpoints=[cast(AnyHttpUrl, e) for e in endpoints]).dump( - relation.data[self.charm.app], clear=True - ) - - -class HaproxyRouteEnpointsReadyEvent(EventBase): - """HaproxyRouteEnpointsReadyEvent custom event.""" - - -class HaproxyRouteEndpointsRemovedEvent(EventBase): - """HaproxyRouteEndpointsRemovedEvent custom event.""" - - -class HaproxyRouteRequirerEvents(CharmEvents): - """List of events that the TLS Certificates requirer charm can leverage. - - Attributes: - ready: when the provider proxied endpoints are ready. - removed: when the provider - """ - - ready = EventSource(HaproxyRouteEnpointsReadyEvent) - removed = EventSource(HaproxyRouteEndpointsRemovedEvent) - - -class HaproxyRouteRequirer(Object): - """haproxy-route interface requirer implementation. - - Attributes: - on: Custom events of the requirer. - """ - - on = HaproxyRouteRequirerEvents() - - # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals - def __init__( - self, - charm: CharmBase, - relation_name: str, - service: Optional[str] = None, - ports: Optional[list[int]] = None, - protocol: Literal["http", "https"] = "http", - hosts: Optional[list[IPvAnyAddress]] = None, - paths: Optional[list[str]] = None, - hostname: Optional[str] = None, - additional_hostnames: Optional[list[str]] = None, - check_interval: Optional[int] = None, - check_rise: Optional[int] = None, - check_fall: Optional[int] = None, - check_path: Optional[str] = None, - check_port: Optional[int] = None, - path_rewrite_expressions: Optional[list[str]] = None, - query_rewrite_expressions: Optional[list[str]] = None, - header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, - load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, - load_balancing_cookie: Optional[str] = None, - load_balancing_consistent_hashing: bool = False, - rate_limit_connections_per_minute: Optional[int] = None, - rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, - upload_limit: Optional[int] = None, - download_limit: Optional[int] = None, - retry_count: Optional[int] = None, - retry_redispatch: bool = False, - deny_paths: Optional[list[str]] = None, - server_timeout: int = 60, - connect_timeout: int = 60, - queue_timeout: int = 60, - server_maxconn: Optional[int] = None, - unit_address: Optional[str] = None, - http_server_close: bool = False, - allow_http: bool = False, - ) -> None: - """Initialize the HaproxyRouteRequirer. - - Args: - charm: The charm that is instantiating the library. - relation_name: The name of the relation to bind to. - service: The name of the service to route traffic to. - ports: List of ports the service is listening on. - protocol: The protocol that the service speaks. - hosts: List of backend server addresses. Currently only support IP addresses. - paths: List of URL paths to route to this service. - hostname: Hostname of this service. - additional_hostnames: Additional hostnames of this service. - check_interval: Interval between health checks in seconds. - check_rise: Number of successful health checks before server is considered up. - check_fall: Number of failed health checks before server is considered down. - check_path: The path to use for server health checks. - check_port: The port to use for http-check. - path_rewrite_expressions: List of regex expressions for path rewrites. - query_rewrite_expressions: List of regex expressions for query rewrites. - header_rewrite_expressions: List of tuples containing header name - and rewrite expression. - load_balancing_algorithm: Algorithm to use for load balancing. - load_balancing_cookie: Cookie name to use when algorithm is set to cookie. - load_balancing_consistent_hashing: Whether to use consistent hashing. - rate_limit_connections_per_minute: Maximum connections allowed per minute. - rate_limit_policy: Policy to apply when rate limit is reached. - upload_limit: Maximum upload bandwidth in bytes per second. - download_limit: Maximum download bandwidth in bytes per second. - retry_count: Number of times to retry failed requests. - retry_redispatch: Whether to redispatch failed requests to another server. - deny_paths: List of paths that should not be routed to the backend. - server_timeout: Timeout for requests from haproxy to backend servers in seconds. - connect_timeout: Timeout for client requests to haproxy in seconds. - queue_timeout: Timeout for requests waiting in queue in seconds. - server_maxconn: Maximum connections per server. - unit_address: IP address of the unit (if not provided, will use binding address). - http_server_close: Configure server close after request. - allow_http: Whether to allow HTTP traffic in addition to HTTPS. - Warning: enabling HTTP is a security risk, - make sure you apply the necessary precautions. - """ - super().__init__(charm, relation_name) - - self._relation_name = relation_name - self.relation = self.model.get_relation(self._relation_name) - self.charm = charm - self.app = self.charm.app - - # build the full application data - self._application_data = self._generate_application_data( - service, - ports, - protocol, - hosts, - paths, - hostname, - additional_hostnames, - check_interval, - check_rise, - check_fall, - check_path, - check_port, - path_rewrite_expressions, - query_rewrite_expressions, - header_rewrite_expressions, - load_balancing_algorithm, - load_balancing_cookie, - load_balancing_consistent_hashing, - rate_limit_connections_per_minute, - rate_limit_policy, - upload_limit, - download_limit, - retry_count, - retry_redispatch, - deny_paths, - server_timeout, - connect_timeout, - queue_timeout, - server_maxconn, - http_server_close, - allow_http, - ) - self._unit_address = unit_address - - on = self.charm.on - self.framework.observe(on[self._relation_name].relation_created, self._configure) - self.framework.observe(on[self._relation_name].relation_changed, self._configure) - self.framework.observe(on[self._relation_name].relation_broken, self._on_relation_broken) - - def _configure(self, _: EventBase) -> None: - """Handle relation events.""" - self.update_relation_data() - if self.relation and self.get_proxied_endpoints(): - # This event is only emitted when the provider databag changes - # which only happens when relevant changes happened - # Additionally this event is purely informational and it's up to the requirer to - # fetch the proxied endpoints in their code using get_proxied_endpoints - self.on.ready.emit() - - def _on_relation_broken(self, _: RelationBrokenEvent) -> None: - """Handle relation broken event.""" - self.on.removed.emit() - - # pylint: disable=too-many-arguments,too-many-positional-arguments - def provide_haproxy_route_requirements( - self, - service: str, - ports: list[int], - protocol: Literal["http", "https"] = "http", - hosts: Optional[list[IPvAnyAddress]] = None, - paths: Optional[list[str]] = None, - hostname: Optional[str] = None, - additional_hostnames: Optional[list[str]] = None, - check_interval: Optional[int] = None, - check_rise: Optional[int] = None, - check_fall: Optional[int] = None, - check_path: Optional[str] = None, - check_port: Optional[int] = None, - path_rewrite_expressions: Optional[list[str]] = None, - query_rewrite_expressions: Optional[list[str]] = None, - header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, - load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, - load_balancing_cookie: Optional[str] = None, - load_balancing_consistent_hashing: bool = False, - rate_limit_connections_per_minute: Optional[int] = None, - rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, - upload_limit: Optional[int] = None, - download_limit: Optional[int] = None, - retry_count: Optional[int] = None, - retry_redispatch: bool = False, - deny_paths: Optional[list[str]] = None, - server_timeout: int = 60, - connect_timeout: int = 60, - queue_timeout: int = 60, - server_maxconn: Optional[int] = None, - unit_address: Optional[str] = None, - http_server_close: bool = False, - allow_http: bool = False, - external_grpc_port: Optional[int] = None, - ) -> None: - """Update haproxy-route requirements data in the relation. - - Args: - service: The name of the service to route traffic to. - ports: List of ports the service is listening on. - protocol: The protocol that the serive speaks, deafults to "http". - hosts: List of backend server addresses. Currently only support IP addresses. - paths: List of URL paths to route to this service. - hostname: Hostname of this service. - additional_hostnames: Additional hostnames of this service. - check_interval: Interval between health checks in seconds. - check_rise: Number of successful health checks before server is considered up. - check_fall: Number of failed health checks before server is considered down. - check_path: The path to use for server health checks. - check_port: The port to use for http-check. - path_rewrite_expressions: List of regex expressions for path rewrites. - query_rewrite_expressions: List of regex expressions for query rewrites. - header_rewrite_expressions: List of tuples containing header name - and rewrite expression. - load_balancing_algorithm: Algorithm to use for load balancing. - load_balancing_cookie: Cookie name to use when algorithm is set to cookie. - load_balancing_consistent_hashing: Whether to use consistent hashing. - rate_limit_connections_per_minute: Maximum connections allowed per minute. - rate_limit_policy: Policy to apply when rate limit is reached. - upload_limit: Maximum upload bandwidth in bytes per second. - download_limit: Maximum download bandwidth in bytes per second. - retry_count: Number of times to retry failed requests. - retry_redispatch: Whether to redispatch failed requests to another server. - deny_paths: List of paths that should not be routed to the backend. - server_timeout: Timeout for requests from haproxy to backend servers in seconds. - connect_timeout: Timeout for client requests to haproxy in seconds. - queue_timeout: Timeout for requests waiting in queue in seconds. - server_maxconn: Maximum connections per server. - unit_address: IP address of the unit (if not provided, will use binding address). - http_server_close: Configure server close after request. - allow_http: Whether to allow HTTP traffic in addition to HTTPS. - Warning: enabling HTTP is a security risk, - make sure you apply the necessary precautions. - external_grpc_port: Optional external gRPC port. - """ - self._unit_address = unit_address - self._application_data = self._generate_application_data( - service, - ports, - protocol, - hosts, - paths, - hostname, - additional_hostnames, - check_interval, - check_rise, - check_fall, - check_path, - check_port, - path_rewrite_expressions, - query_rewrite_expressions, - header_rewrite_expressions, - load_balancing_algorithm, - load_balancing_cookie, - load_balancing_consistent_hashing, - rate_limit_connections_per_minute, - rate_limit_policy, - upload_limit, - download_limit, - retry_count, - retry_redispatch, - deny_paths, - server_timeout, - connect_timeout, - queue_timeout, - server_maxconn, - http_server_close, - allow_http, - external_grpc_port, - ) - self.update_relation_data() - - # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals - def _generate_application_data( # noqa: C901 - self, - service: Optional[str] = None, - ports: Optional[list[int]] = None, - protocol: Literal["http", "https"] = "http", - hosts: Optional[list[IPvAnyAddress]] = None, - paths: Optional[list[str]] = None, - hostname: Optional[str] = None, - additional_hostnames: Optional[list[str]] = None, - check_interval: Optional[int] = None, - check_rise: Optional[int] = None, - check_fall: Optional[int] = None, - check_path: Optional[str] = None, - check_port: Optional[int] = None, - path_rewrite_expressions: Optional[list[str]] = None, - query_rewrite_expressions: Optional[list[str]] = None, - header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, - load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, - load_balancing_cookie: Optional[str] = None, - load_balancing_consistent_hashing: bool = False, - rate_limit_connections_per_minute: Optional[int] = None, - rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, - upload_limit: Optional[int] = None, - download_limit: Optional[int] = None, - retry_count: Optional[int] = None, - retry_redispatch: bool = False, - deny_paths: Optional[list[str]] = None, - server_timeout: int = 60, - connect_timeout: int = 60, - queue_timeout: int = 60, - server_maxconn: Optional[int] = None, - http_server_close: bool = False, - allow_http: bool = False, - external_grpc_port: Optional[int] = None, - ) -> dict[str, Any]: - """Generate the complete application data structure. - - Args: - service: The name of the service to route traffic to. - ports: List of ports the service is listening on. - protocol: The protocol that the service speaks. - hosts: List of backend server addresses. Currently only support IP addresses. - paths: List of URL paths to route to this service. - hostname: Hostname of this service. - additional_hostnames: Additional hostnames of this service. - check_interval: Interval between health checks in seconds. - check_rise: Number of successful health checks before server is considered up. - check_fall: Number of failed health checks before server is considered down. - check_path: The path to use for server health checks. - check_port: The port to use for http-check. - path_rewrite_expressions: List of regex expressions for path rewrites. - query_rewrite_expressions: List of regex expressions for query rewrites. - header_rewrite_expressions: List of tuples containing header name and - rewrite expression. - load_balancing_algorithm: Algorithm to use for load balancing. - load_balancing_cookie: Cookie name to use when algorithm is set to cookie. - load_balancing_consistent_hashing: Whether to use consistent hashing. - rate_limit_connections_per_minute: Maximum connections allowed per minute. - rate_limit_policy: Policy to apply when rate limit is reached. - upload_limit: Maximum upload bandwidth in bytes per second. - download_limit: Maximum download bandwidth in bytes per second. - retry_count: Number of times to retry failed requests. - retry_redispatch: Whether to redispatch failed requests to another server. - deny_paths: List of paths that should not be routed to the backend. - server_timeout: Timeout for requests from haproxy to backend servers in seconds. - connect_timeout: Timeout for client requests to haproxy in seconds. - queue_timeout: Timeout for requests waiting in queue in seconds. - server_maxconn: Maximum connections per server. - http_server_close: Configure server close after request. - allow_http: Whether to allow HTTP traffic in addition to HTTPS. - Warning: enabling HTTP is a security risk, - make sure you apply the necessary precautions. - external_grpc_port: Optional external gRPC port. - - Returns: - dict: A dictionary containing the complete application data structure. - """ - # Apply default value to list parameters to avoid problems with mutable default args. - if not ports: - ports = [] - if not hosts: - hosts = [] - if not paths: - paths = [] - if not additional_hostnames: - additional_hostnames = [] - if not path_rewrite_expressions: - path_rewrite_expressions = [] - if not query_rewrite_expressions: - query_rewrite_expressions = [] - if not header_rewrite_expressions: - header_rewrite_expressions = [] - if not deny_paths: - deny_paths = [] - - application_data: dict[str, Any] = { - "service": service, - "ports": ports, - "protocol": protocol, - "hosts": hosts, - "paths": paths, - "hostname": hostname, - "additional_hostnames": additional_hostnames, - "load_balancing": { - "algorithm": load_balancing_algorithm, - "cookie": load_balancing_cookie, - "consistent_hashing": load_balancing_consistent_hashing, - }, - "timeout": { - "server": server_timeout, - "connect": connect_timeout, - "queue": queue_timeout, - }, - "bandwidth_limit": { - "download": download_limit, - "upload": upload_limit, - }, - "deny_paths": deny_paths, - "server_maxconn": server_maxconn, - "rewrites": self._generate_rewrite_configuration( - path_rewrite_expressions, - query_rewrite_expressions, - header_rewrite_expressions, - ), - "http_server_close": http_server_close, - "allow_http": allow_http, - "external_grpc_port": external_grpc_port, - } - - if allow_http: - logger.warning( - "HTTP traffic is allowed alongside HTTPS. " - "This is a security risk, make sure you apply the necessary precautions." - ) - - if check := self._generate_server_healthcheck_configuration( - check_interval, check_rise, check_fall, check_path, check_port - ): - application_data["check"] = check - - if rate_limit := self._generate_rate_limit_configuration( - rate_limit_connections_per_minute, rate_limit_policy - ): - application_data["rate_limit"] = rate_limit - - if retry := self._generate_retry_configuration(retry_count, retry_redispatch): - application_data["retry"] = retry - return application_data - - def _generate_server_healthcheck_configuration( - self, - interval: Optional[int], - rise: Optional[int], - fall: Optional[int], - path: Optional[str], - port: Optional[int], - ) -> dict[str, int | Optional[str]]: - """Generate configuration for server health checks. - - Args: - interval: Time between health checks in seconds. - rise: Number of successful checks before marking server as up. - fall: Number of failed checks before marking server as down. - path: The path to use for health checks. - port: The port to use for http-check. - - Returns: - dict[str, int | Optional[str]]: Health check configuration dictionary. - """ - server_healthcheck_configuration: dict[str, int | Optional[str]] = {} - if interval and rise and fall: - server_healthcheck_configuration = { - "interval": interval, - "rise": rise, - "fall": fall, - "path": path, - "port": port, - } - return server_healthcheck_configuration - - def _generate_rewrite_configuration( - self, - path_rewrite_expressions: list[str], - query_rewrite_expressions: list[str], - header_rewrite_expressions: list[tuple[str, str]], - ) -> list[dict[str, str | HaproxyRewriteMethod]]: - """Generate rewrite configuration from provided expressions. - - Args: - path_rewrite_expressions: List of path rewrite expressions. - query_rewrite_expressions: List of query rewrite expressions. - header_rewrite_expressions: List of header name and expression tuples. - - Returns: - list[dict[str, str]]: List of generated rewrite configurations. - """ - # rewrite configuration - rewrite_configurations: list[dict[str, str | HaproxyRewriteMethod]] = [] - for expression in path_rewrite_expressions: - rewrite_configurations.append( - {"method": HaproxyRewriteMethod.SET_PATH, "expression": expression} - ) - for expression in query_rewrite_expressions: - rewrite_configurations.append( - {"method": HaproxyRewriteMethod.SET_QUERY, "expression": expression} - ) - for header, expression in header_rewrite_expressions: - rewrite_configurations.append( - { - "method": HaproxyRewriteMethod.SET_HEADER, - "expression": expression, - "header": header, - } - ) - return rewrite_configurations - - def _generate_rate_limit_configuration( - self, rate_limit_connections_per_minute: Optional[int], rate_limit_policy: RateLimitPolicy - ) -> dict[str, Any]: - """Generate rate limit configuration. - - Args: - rate_limit_connections_per_minute: Maximum connections allowed per minute. - rate_limit_policy: Policy to apply when rate limit is reached. - - Returns: - dict[str, Any]: Rate limit configuration, or empty dict if no limits are set. - """ - rate_limit_configuration = {} - if rate_limit_connections_per_minute: - rate_limit_configuration = { - "connections_per_minute": rate_limit_connections_per_minute, - "policy": rate_limit_policy, - } - return rate_limit_configuration - - def _generate_retry_configuration( - self, count: Optional[int], redispatch: bool - ) -> dict[str, Any]: - """Generate retry configuration. - - Args: - count: Number of times to retry failed requests. - redispatch: Whether to redispatch failed requests to another server. - - Returns: - dict[str, Any]: Retry configuration dictionary, or empty dict if retry not configured. - """ - retry_configuration = {} - if count: - retry_configuration = { - "count": count, - "redispatch": redispatch, - } - return retry_configuration - - def update_relation_data(self) -> None: - """Update both application and unit data in the relation.""" - if not self._application_data.get("service") and not self._application_data.get("ports"): - logger.warning("Required field(s) are missing, skipping update of the relation data.") - return - - if relation := self.relation: - self._update_application_data(relation) - self._update_unit_data(relation) - - def _update_application_data(self, relation: Relation) -> None: - """Update application data in the relation databag. - - Args: - relation: The relation instance. - """ - if self.charm.unit.is_leader(): - application_data = self._prepare_application_data() - application_data.dump(relation.data[self.app], clear=True) - - def _update_unit_data(self, relation: Relation) -> None: - """Prepare and update the unit data in the relation databag. - - Args: - relation: The relation instance. - """ - unit_data = self._prepare_unit_data() - unit_data.dump(relation.data[self.charm.unit], clear=True) - - def _prepare_application_data(self) -> RequirerApplicationData: - """Prepare and validate the application data. - - Raises: - DataValidationError: When validation of application data fails. - - Returns: - RequirerApplicationData: The validated application data model. - """ - try: - return cast( - RequirerApplicationData, RequirerApplicationData.from_dict(self._application_data) - ) - except ValidationError as exc: - logger.error("Validation error when preparing requirer application data.") - raise DataValidationError( - "Validation error when preparing requirer application data." - ) from exc - - def _prepare_unit_data(self) -> RequirerUnitData: - """Prepare and validate unit data. - - Raises: - DataValidationError: When no address or unit IP is available. - - Returns: - RequirerUnitData: The validated unit data model. - """ - address = self._unit_address - if not address: - network_binding = self.charm.model.get_binding(self._relation_name) - if ( - network_binding is not None - and (bind_address := network_binding.network.bind_address) is not None - ): - address = str(bind_address) - else: - logger.error("No unit IP available.") - raise DataValidationError("No unit IP available.") - return RequirerUnitData(address=cast(IPvAnyAddress, address)) - - def get_proxied_endpoints(self) -> list[AnyHttpUrl]: - """The full ingress URL to reach the current unit. - - Returns: - The provider URL or None if the URL isn't available yet or is not valid. - """ - relation = self.relation - if not relation or not relation.app: - return [] - - # Fetch the provider's app databag - try: - databag = relation.data[relation.app] - except ModelError: - logger.exception("Error reading remote app data.") - return [] - - if not databag: # not ready yet - return [] - - try: - provider_data = cast( - HaproxyRouteProviderAppData, HaproxyRouteProviderAppData.load(databag) - ) - return provider_data.endpoints - except DataValidationError: - logger.exception("Invalid provider url.") - return [] diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index 167eff41b..c26957603 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -1,30 +1,34 @@ # Copyright 2026 Canonical Ltd. # See LICENSE file for licensing details. -"""TODO: Add a proper docstring here. +"""haproxy-route-policy interface library. -This is a placeholder docstring for this charm library. Docstrings are -presented on Charmhub and updated whenever you push a new version of the -library. +This interface is used between the HAProxy charm (requirer) and the +haproxy-route-policy charm (provider). -Complete documentation about creating and documenting libraries can be found -in the SDK docs at https://juju.is/docs/sdk/libraries. - -See `charmcraft publish-lib` and `charmcraft fetch-lib` for details of how to -share and consume charm libraries. They serve to enhance collaboration -between charmers. Use a charmer's libraries for classes that handle -integration with their charm. - -Bear in mind that new revisions of the different major API versions (v0, v1, -v2 etc) are maintained independently. You can continue to update v0 and v1 -after you have pushed v3. - -Markdown is supported, following the CommonMark specification. +The requirer publishes route policy requests under ``requests`` as a list of +HAProxy backend objects. The provider publishes approved entries under +``approved_backends`` and additionally exposes ``policy_backend_port`` and +provider unit addresses for policy web UI routing. """ -from charms.haproxy.v2.haproxy_route import RequirerApplicationData -from pydantic import Field, IPvAnyAddress -from pydantic.dataclasses import dataclass +import json +import logging +from typing import Annotated, MutableMapping, Optional, cast + +from ops import CharmBase +from ops.charm import CharmEvents +from ops.framework import EventBase, EventSource, Object +from ops.model import Relation +from pydantic import ( + BaseModel, + BeforeValidator, + ConfigDict, + Field, + ValidationError, + model_validator, +) +from validators import domain # The unique Charmhub library identifier, never change it LIBID = "24c99d77895e481d8661288f95884ee4" @@ -34,28 +38,312 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 1 +LIBPATCH = 2 + + +def valid_domain_with_wildcard(value: str) -> str: + """Validate if value is a valid domain that can include a wildcard. + + The wildcard character (*) can't be at the TLD level, for example *.com is not valid. + This is supported natively by the library ( e.g domain("com") will raise a ValidationError ). + + Raises: + ValueError: When value is not a valid domain. + + Args: + value: The value to validate. + """ + fqdn = value[2:] if value.startswith("*.") else value + if not bool(domain(fqdn)): + raise ValueError(f"Invalid domain: {value}") + return value + + +logger = logging.getLogger(__name__) +HAPROXY_ROUTE_POLICY_RELATION_NAME = "haproxy-route-policy" + + +class DataValidationError(Exception): + """Raised when data validation fails.""" + + +class _DatabagModel(BaseModel): + """Base databag model. + + Attrs: + model_config: pydantic model configuration. + """ + model_config = ConfigDict( + # tolerate additional keys in databag + extra="ignore", + # Allow instantiating this class by field name (instead of forcing alias). + populate_by_name=True, + # Custom config key: whether to nest the whole datastructure (as json) + # under a field or spread it out at the toplevel. + _NEST_UNDER=None, + ) # type: ignore + """Pydantic config.""" -@dataclass -class HaproxyRoutePolicyData: - """Dataclass to store the data for the haproxy-route-policy interface.""" + @classmethod + def load(cls, databag: MutableMapping) -> "_DatabagModel": + """Load this model from a Juju json databag. - requests: list[RequirerApplicationData] + Args: + databag: Databag content. + Raises: + DataValidationError: When model validation failed. -class RequirerUnitData(_DatabagModel): - """haproxy-route requirer unit data. + Returns: + _DatabagModel: The validated model. + """ + nest_under = cls.model_config.get("_NEST_UNDER") + if nest_under: + return cls.model_validate(json.loads(databag[nest_under])) + + try: + data = { + k: json.loads(v) + for k, v in databag.items() + # Don't attempt to parse model-external values + if k in {(f.alias or n) for n, f in cls.model_fields.items()} + } + except json.JSONDecodeError as e: + msg = f"invalid databag contents: expecting json. {databag}" + logger.error(msg) + raise DataValidationError(msg) from e + + try: + return cls.model_validate_json(json.dumps(data)) + except ValidationError as e: + msg = f"failed to validate databag: {databag}" + logger.error(str(e), exc_info=True) + raise DataValidationError(msg) from e + + def dump( + self, databag: Optional[MutableMapping] = None, clear: bool = True + ) -> Optional[MutableMapping]: + """Write the contents of this model to Juju databag. + + Args: + databag: The databag to write to. + clear: Whether to clear the databag before writing. + + Returns: + MutableMapping: The databag. + """ + if clear and databag: + databag.clear() + + if databag is None: + databag = {} + nest_under = self.model_config.get("_NEST_UNDER") + if nest_under: + databag[nest_under] = self.model_dump_json( + by_alias=True, + # skip keys whose values are default + exclude_defaults=True, + ) + return databag + + dct = self.model_dump(mode="json", by_alias=True, exclude_defaults=True) + databag.update({k: json.dumps(v) for k, v in dct.items()}) + return databag + + +class HaproxyRoutePolicyInvalidRelationDataError(Exception): + """Raised when relation data validation for haproxy-route-policy fails.""" + + +class HaproxyRoutePolicyBackendRequest(_DatabagModel): + """Data model representing a single backend request from the requirer. Attributes: - address: IP address of the unit. + relation_id: The relation ID of the request. + backend_name: The name of the HAProxy backend. + hostname_acls: List of hostname ACLs for the backend. + paths: List of paths for the backend. + port: Port number for the backend. """ - address: IPvAnyAddress = Field(description="IP address of the unit.") + relation_id: int = Field(description="Relation ID of the backend request.") + backend_name: str = Field(description="Name of the HAProxy backend.") + hostname_acls: list[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = Field( + description="List of hostname ACLs for the backend." + ) + paths: list[str] = Field(description="List of paths for the backend.") + port: int = Field(gt=0, le=65535, description="Port number for the backend.") + + +class HaproxyRoutePolicyRequirerAppData(_DatabagModel): + """Data model representing the requirer application data for haproxy-route-policy. + + Attributes: + backend_requests: List of backend requests to be evaluated by the policy service. + """ + + backend_requests: list[HaproxyRoutePolicyBackendRequest] = Field( + description="List of backends to be evaluated by the policy service." + ) + + @model_validator(mode="after") + def validate_unique_backend_names(self): + """Ensure that backend names are unique across all requests.""" + backend_names = [request.backend_name for request in self.backend_requests] + if len(backend_names) != len(set(backend_names)): + raise ValueError("Backend names must be unique across all requests.") + return self + + +class HaproxyRoutePolicyProviderAppData(_DatabagModel): + """haproxy-route-policy provider app databag schema.""" + + approved_requests: list[HaproxyRoutePolicyBackendRequest] = Field( + description="List of approved backend requests." + ) + + +class HaproxyRoutePolicyDataAvailableEvent(EventBase): + """Emitted when requirer policy request data becomes available.""" + + +class HaproxyRoutePolicyDataRemovedEvent(EventBase): + """Emitted when one of the relations is removed.""" + + +class HaproxyRoutePolicyProviderEvents(CharmEvents): + """Events emitted by the policy provider helper.""" + + data_available = EventSource(HaproxyRoutePolicyDataAvailableEvent) + data_removed = EventSource(HaproxyRoutePolicyDataRemovedEvent) + + +class HaproxyRoutePolicyProvider(Object): + """haproxy-route-policy provider implementation.""" + + on = HaproxyRoutePolicyProviderEvents() # pyright: ignore + + def __init__( + self, + charm: CharmBase, + relation_name: str = HAPROXY_ROUTE_POLICY_RELATION_NAME, + ) -> None: + """Initialize provider helper. + + Args: + charm: The charm instance using this helper. + relation_name: Name of the relation endpoint. + raise_on_validation_error: Raise on invalid remote data when True. + """ + super().__init__(charm, relation_name) + self.charm = charm + self._relation_name = relation_name + on = self.charm.on + self.framework.observe(on[self._relation_name].relation_changed, self._configure) + self.framework.observe(on[self._relation_name].relation_created, self._configure) + self.framework.observe(on[self._relation_name].relation_broken, self._on_data_removed) + self.framework.observe(on[self._relation_name].relation_departed, self._on_data_removed) + + @property + def relation(self) -> Relation | None: + """Return the first relation for this endpoint, if any.""" + return self.charm.model.get_relation(self._relation_name) + + def _configure(self, _event: EventBase) -> None: + """Handle relation lifecycle and emit data availability events.""" + if self.relation is not None: + _ = self.get_data(self.relation) + self.on.data_available.emit() + + def _on_data_removed(self, _event: EventBase) -> None: + """Handle relation removal events.""" + self.on.data_removed.emit() + + def get_data(self, relation: Relation) -> HaproxyRoutePolicyRequirerAppData: + """Fetch and validate requirer data. + + Args: + relation: Relation to parse. + + Raises: + HaproxyRoutePolicyInvalidRelationDataError: When validation fails and + ``raise_on_validation_error`` is set. + + Returns: + Parsed relation payloads and relation IDs that failed validation. + """ + try: + return cast( + HaproxyRoutePolicyRequirerAppData, + HaproxyRoutePolicyRequirerAppData.load(relation.data[relation.app]), + ) + except DataValidationError as exc: + logger.error( + "haproxy-route-policy data validation failed for relation %s: %s", + relation, + str(exc), + ) + raise HaproxyRoutePolicyInvalidRelationDataError( + f"haproxy-route-policy data validation failed for relation: {relation}" + ) from exc + + +class HaproxyRoutePolicyReadyEvent(EventBase): + """Emitted when provider data is available to the requirer.""" + + +class HaproxyRoutePolicyRemovedEvent(EventBase): + """Emitted when the relation is removed from the requirer side.""" + + +class HaproxyRoutePolicyRequirerEvents(CharmEvents): + """Events emitted by the policy requirer helper.""" + + ready = EventSource(HaproxyRoutePolicyReadyEvent) + removed = EventSource(HaproxyRoutePolicyRemovedEvent) + + +class HaproxyRoutePolicyRequirer(Object): + """haproxy-route-policy requirer implementation.""" + + on = HaproxyRoutePolicyRequirerEvents() # pyright: ignore + + def __init__( + self, + charm: CharmBase, + relation_name: str = HAPROXY_ROUTE_POLICY_RELATION_NAME, + ) -> None: + """Initialize requirer helper. + + Args: + charm: The charm instance using this helper. + relation_name: Name of the relation endpoint. + requests: Optional initial request backend list to publish. + """ + super().__init__(charm, relation_name) + self.charm = charm + self._relation_name = relation_name + + @property + def relation(self) -> Relation | None: + """Return the first relation for this endpoint, if any.""" + return self.charm.model.get_relation(self._relation_name) + def provide_haproxy_route_policy_requests( + self, backend_requests: list[HaproxyRoutePolicyBackendRequest] + ) -> None: + """Set and publish route policy requests.""" + relation = self.relation + if not relation or not self.charm.unit.is_leader(): + return -@dataclass -class HaproxyRoutePolicyProviderData(HaproxyRoutePolicyData): - """Dataclass to store the data for the haproxy-route-policy provider interface.""" + try: + app_data = HaproxyRoutePolicyRequirerAppData(backend_requests=backend_requests) + except ValidationError as exc: + logger.error("Validation error when preparing requirer relation data.") + raise DataValidationError( + "Validation error when preparing requirer relation data." + ) from exc - units_data: list[RequirerUnitData] + app_data.dump(relation.data[self.charm.app], clear=True) diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py new file mode 100644 index 000000000..278847730 --- /dev/null +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py @@ -0,0 +1,168 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for haproxy-route-policy interface library models.""" + +import json +from typing import cast + +import pytest +from charms.haproxy_route_policy.v0.haproxy_route_policy import ( + DataValidationError, + HaproxyRoutePolicyBackendRequest, + HaproxyRoutePolicyProviderAppData, + HaproxyRoutePolicyRequirerAppData, + valid_domain_with_wildcard, +) +from pydantic import ValidationError + +VALID_BACKEND_REQUEST = { + "relation_id": 10, + "backend_name": "backend-a", + "hostname_acls": ["example.com"], + "paths": ["/"], + "port": 8080, +} + + +@pytest.mark.parametrize( + "domain", + [ + pytest.param("example.com", id="fqdn"), + pytest.param("api.example.com", id="subdomain"), + pytest.param("*.example.com", id="wildcard"), + ], +) +def test_valid_domain_with_wildcard_accepts_valid_domain(domain: str): + """ + arrange: provide a valid domain. + act: call valid_domain_with_wildcard. + assert: returns the same domain. + """ + assert valid_domain_with_wildcard(domain) == domain + + +@pytest.mark.parametrize( + "domain", + [ + pytest.param("", id="empty"), + pytest.param("example", id="missing-tld"), + pytest.param("*.com", id="wildcard-tld"), + pytest.param("invalid host", id="space-in-host"), + ], +) +def test_valid_domain_with_wildcard_rejects_invalid_domain(domain: str): + """ + arrange: provide an invalid domain. + act: call valid_domain_with_wildcard. + assert: raises ValueError. + """ + with pytest.raises(ValueError): + valid_domain_with_wildcard(domain) + + +def test_backend_request_model_validation_accepts_valid_payload(): + """ + arrange: build a valid backend request payload. + act: initialize HaproxyRoutePolicyBackendRequest. + assert: fields are parsed correctly. + """ + request = HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST) + + assert request.relation_id == 10 + assert request.backend_name == "backend-a" + assert request.hostname_acls == ["example.com"] + assert request.paths == ["/"] + assert request.port == 8080 + + +@pytest.mark.parametrize( + "field,value", + [ + pytest.param("port", 0, id="port-too-low"), + pytest.param("port", 65536, id="port-too-high"), + pytest.param("hostname_acls", ["invalid host"], id="invalid-hostname"), + ], +) +def test_backend_request_model_validation_rejects_invalid_payload(field: str, value): + """ + arrange: build an invalid backend request payload. + act: initialize HaproxyRoutePolicyBackendRequest. + assert: raises ValidationError. + """ + payload = VALID_BACKEND_REQUEST.copy() + payload[field] = value + + with pytest.raises(ValidationError): + HaproxyRoutePolicyBackendRequest(**payload) + + +def test_requirer_app_data_dump_and_load_roundtrip(): + """ + arrange: build valid requirer app data. + act: dump to databag and load back. + assert: loaded payload matches the original values. + """ + request = HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST) + original = HaproxyRoutePolicyRequirerAppData(backend_requests=[request]) + + databag = cast(dict[str, str], original.dump()) + loaded = cast( + HaproxyRoutePolicyRequirerAppData, HaproxyRoutePolicyRequirerAppData.load(databag) + ) + + assert len(loaded.backend_requests) == 1 + assert loaded.backend_requests[0].backend_name == "backend-a" + assert loaded.backend_requests[0].port == 8080 + + +def test_provider_app_data_dump_and_load_roundtrip(): + """ + arrange: build valid provider app data. + act: dump to databag and load back. + assert: loaded payload matches the original values. + """ + request = HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST) + original = HaproxyRoutePolicyProviderAppData(approved_requests=[request]) + + databag = cast(dict[str, str], original.dump()) + loaded = cast( + HaproxyRoutePolicyProviderAppData, HaproxyRoutePolicyProviderAppData.load(databag) + ) + + assert len(loaded.approved_requests) == 1 + assert loaded.approved_requests[0].backend_name == "backend-a" + assert loaded.approved_requests[0].relation_id == 10 + + +def test_requirer_app_data_load_rejects_duplicate_backend_names(): + """ + arrange: build databag payload with duplicate backend names. + act: load HaproxyRoutePolicyRequirerAppData. + assert: raises DataValidationError. + """ + duplicated_requests = [ + VALID_BACKEND_REQUEST, + { + **VALID_BACKEND_REQUEST, + "relation_id": 11, + "port": 9090, + "hostname_acls": ["api.example.com"], + }, + ] + databag = {"backend_requests": json.dumps(duplicated_requests)} + + with pytest.raises(DataValidationError): + HaproxyRoutePolicyRequirerAppData.load(databag) + + +def test_requirer_app_data_load_rejects_invalid_json(): + """ + arrange: build databag payload with non-json value. + act: load HaproxyRoutePolicyRequirerAppData. + assert: raises DataValidationError. + """ + databag = {"backend_requests": "not-json"} + + with pytest.raises(DataValidationError): + HaproxyRoutePolicyRequirerAppData.load(databag) From 60f3c1d09dec65f6e4a0cebf46b49ffc373256f8 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 15:45:42 +0200 Subject: [PATCH 143/201] update lib, fix test issues --- haproxy-route-policy-operator/charmcraft.yaml | 5 + .../v0/haproxy_route_policy.py | 222 ++++-------------- haproxy-route-policy-operator/src/charm.py | 29 +++ .../tests/unit/test_charm.py | 2 +- .../unit/test_haproxy_route_policy_lib.py | 83 +++---- haproxy-route-policy-operator/uv.lock | 1 + 6 files changed, 108 insertions(+), 234 deletions(-) diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 97c4cc1cc..777c09c5a 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -40,6 +40,11 @@ requires: limit: 1 optional: false +provides: + haproxy-route-policy: + interface: haproxy_route_policy + description: Interface between haproxy and the policy charm to approve/deny backend requests. + actions: get-admin-credentials: description: Retrieve the admin credentials to call the HAProxy Route Policy API. diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index c26957603..9747e916d 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -12,22 +12,24 @@ provider unit addresses for policy web UI routing. """ -import json import logging -from typing import Annotated, MutableMapping, Optional, cast +from typing import Annotated from ops import CharmBase -from ops.charm import CharmEvents -from ops.framework import EventBase, EventSource, Object -from ops.model import Relation +from ops.framework import Object +from ops.model import ( + Relation, + RelationDataAccessError, + RelationDataTypeError, + RelationNotFoundError, +) from pydantic import ( - BaseModel, BeforeValidator, - ConfigDict, Field, ValidationError, model_validator, ) +from pydantic.dataclasses import dataclass from validators import domain # The unique Charmhub library identifier, never change it @@ -38,7 +40,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 2 +LIBPATCH = 1 def valid_domain_with_wildcard(value: str) -> str: @@ -63,100 +65,12 @@ def valid_domain_with_wildcard(value: str) -> str: HAPROXY_ROUTE_POLICY_RELATION_NAME = "haproxy-route-policy" -class DataValidationError(Exception): - """Raised when data validation fails.""" - - -class _DatabagModel(BaseModel): - """Base databag model. - - Attrs: - model_config: pydantic model configuration. - """ - - model_config = ConfigDict( - # tolerate additional keys in databag - extra="ignore", - # Allow instantiating this class by field name (instead of forcing alias). - populate_by_name=True, - # Custom config key: whether to nest the whole datastructure (as json) - # under a field or spread it out at the toplevel. - _NEST_UNDER=None, - ) # type: ignore - """Pydantic config.""" - - @classmethod - def load(cls, databag: MutableMapping) -> "_DatabagModel": - """Load this model from a Juju json databag. - - Args: - databag: Databag content. - - Raises: - DataValidationError: When model validation failed. - - Returns: - _DatabagModel: The validated model. - """ - nest_under = cls.model_config.get("_NEST_UNDER") - if nest_under: - return cls.model_validate(json.loads(databag[nest_under])) - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.model_fields.items()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.model_validate_json(json.dumps(data)) - except ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.error(str(e), exc_info=True) - raise DataValidationError(msg) from e - - def dump( - self, databag: Optional[MutableMapping] = None, clear: bool = True - ) -> Optional[MutableMapping]: - """Write the contents of this model to Juju databag. - - Args: - databag: The databag to write to. - clear: Whether to clear the databag before writing. - - Returns: - MutableMapping: The databag. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - nest_under = self.model_config.get("_NEST_UNDER") - if nest_under: - databag[nest_under] = self.model_dump_json( - by_alias=True, - # skip keys whose values are default - exclude_defaults=True, - ) - return databag - - dct = self.model_dump(mode="json", by_alias=True, exclude_defaults=True) - databag.update({k: json.dumps(v) for k, v in dct.items()}) - return databag - - class HaproxyRoutePolicyInvalidRelationDataError(Exception): """Raised when relation data validation for haproxy-route-policy fails.""" -class HaproxyRoutePolicyBackendRequest(_DatabagModel): +@dataclass +class HaproxyRoutePolicyBackendRequest: """Data model representing a single backend request from the requirer. Attributes: @@ -176,7 +90,8 @@ class HaproxyRoutePolicyBackendRequest(_DatabagModel): port: int = Field(gt=0, le=65535, description="Port number for the backend.") -class HaproxyRoutePolicyRequirerAppData(_DatabagModel): +@dataclass +class HaproxyRoutePolicyRequirerAppData: """Data model representing the requirer application data for haproxy-route-policy. Attributes: @@ -196,7 +111,8 @@ def validate_unique_backend_names(self): return self -class HaproxyRoutePolicyProviderAppData(_DatabagModel): +@dataclass +class HaproxyRoutePolicyProviderAppData: """haproxy-route-policy provider app databag schema.""" approved_requests: list[HaproxyRoutePolicyBackendRequest] = Field( @@ -204,26 +120,9 @@ class HaproxyRoutePolicyProviderAppData(_DatabagModel): ) -class HaproxyRoutePolicyDataAvailableEvent(EventBase): - """Emitted when requirer policy request data becomes available.""" - - -class HaproxyRoutePolicyDataRemovedEvent(EventBase): - """Emitted when one of the relations is removed.""" - - -class HaproxyRoutePolicyProviderEvents(CharmEvents): - """Events emitted by the policy provider helper.""" - - data_available = EventSource(HaproxyRoutePolicyDataAvailableEvent) - data_removed = EventSource(HaproxyRoutePolicyDataRemovedEvent) - - class HaproxyRoutePolicyProvider(Object): """haproxy-route-policy provider implementation.""" - on = HaproxyRoutePolicyProviderEvents() # pyright: ignore - def __init__( self, charm: CharmBase, @@ -234,81 +133,42 @@ def __init__( Args: charm: The charm instance using this helper. relation_name: Name of the relation endpoint. - raise_on_validation_error: Raise on invalid remote data when True. """ super().__init__(charm, relation_name) self.charm = charm - self._relation_name = relation_name - on = self.charm.on - self.framework.observe(on[self._relation_name].relation_changed, self._configure) - self.framework.observe(on[self._relation_name].relation_created, self._configure) - self.framework.observe(on[self._relation_name].relation_broken, self._on_data_removed) - self.framework.observe(on[self._relation_name].relation_departed, self._on_data_removed) + self.relation_name = relation_name @property def relation(self) -> Relation | None: """Return the first relation for this endpoint, if any.""" - return self.charm.model.get_relation(self._relation_name) + return self.charm.model.get_relation(self.relation_name) - def _configure(self, _event: EventBase) -> None: - """Handle relation lifecycle and emit data availability events.""" - if self.relation is not None: - _ = self.get_data(self.relation) - self.on.data_available.emit() - - def _on_data_removed(self, _event: EventBase) -> None: - """Handle relation removal events.""" - self.on.data_removed.emit() - - def get_data(self, relation: Relation) -> HaproxyRoutePolicyRequirerAppData: - """Fetch and validate requirer data. - - Args: - relation: Relation to parse. - - Raises: - HaproxyRoutePolicyInvalidRelationDataError: When validation fails and - ``raise_on_validation_error`` is set. + def set_approved_backend_requests( + self, approved_requests: list[HaproxyRoutePolicyBackendRequest] + ) -> None: + """Set and publish approved backend requests.""" + relation = self.relation + if not relation or not self.charm.unit.is_leader(): + return - Returns: - Parsed relation payloads and relation IDs that failed validation. - """ try: - return cast( - HaproxyRoutePolicyRequirerAppData, - HaproxyRoutePolicyRequirerAppData.load(relation.data[relation.app]), - ) - except DataValidationError as exc: - logger.error( - "haproxy-route-policy data validation failed for relation %s: %s", - relation, - str(exc), - ) + app_data = HaproxyRoutePolicyProviderAppData(approved_requests=approved_requests) + relation.save(app_data, relation.app) + except ( + ValidationError, + RelationDataTypeError, + RelationDataAccessError, + RelationNotFoundError, + ) as exc: + logger.error("Validation error when preparing provider relation data.") raise HaproxyRoutePolicyInvalidRelationDataError( - f"haproxy-route-policy data validation failed for relation: {relation}" + "Validation error when preparing provider relation data." ) from exc -class HaproxyRoutePolicyReadyEvent(EventBase): - """Emitted when provider data is available to the requirer.""" - - -class HaproxyRoutePolicyRemovedEvent(EventBase): - """Emitted when the relation is removed from the requirer side.""" - - -class HaproxyRoutePolicyRequirerEvents(CharmEvents): - """Events emitted by the policy requirer helper.""" - - ready = EventSource(HaproxyRoutePolicyReadyEvent) - removed = EventSource(HaproxyRoutePolicyRemovedEvent) - - class HaproxyRoutePolicyRequirer(Object): """haproxy-route-policy requirer implementation.""" - on = HaproxyRoutePolicyRequirerEvents() # pyright: ignore - def __init__( self, charm: CharmBase, @@ -340,10 +200,14 @@ def provide_haproxy_route_policy_requests( try: app_data = HaproxyRoutePolicyRequirerAppData(backend_requests=backend_requests) - except ValidationError as exc: + relation.save(app_data, relation.app) + except ( + ValidationError, + RelationDataTypeError, + RelationDataAccessError, + RelationNotFoundError, + ) as exc: logger.error("Validation error when preparing requirer relation data.") - raise DataValidationError( + raise HaproxyRoutePolicyInvalidRelationDataError( "Validation error when preparing requirer relation data." ) from exc - - app_data.dump(relation.data[self.charm.app], clear=True) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index ad8b45a47..40bcfc2af 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -13,6 +13,10 @@ from charms.data_platform_libs.v0.data_interfaces import ( DatabaseRequires, ) +from charms.haproxy_route_policy.v0.haproxy_route_policy import ( + HaproxyRoutePolicyProvider, + HaproxyRoutePolicyRequirerAppData, +) from policy import ( HaproxyRoutePolicyDatabaseMigrationError, @@ -41,6 +45,7 @@ DATABASE_RELATION = "database" HAPROXY_ROUTE_POLICY_PORT = 8080 +HAPROXY_ROUTE_POLICY_RELATION_NAME = "haproxy-route-policy" class HaproxyRoutePolicyCharm(ops.CharmBase): @@ -67,6 +72,22 @@ def __init__(self, *args: Any): ) self.framework.observe(self.database.on.database_created, self._reconcile) + self.haproxy_route_policy = HaproxyRoutePolicyProvider( + self, HAPROXY_ROUTE_POLICY_RELATION_NAME + ) + self.framework.observe( + self.on[self.haproxy_route_policy.relation_name].relation_created, self._reconcile + ) + self.framework.observe( + self.on[self.haproxy_route_policy.relation_name].relation_changed, self._reconcile + ) + self.framework.observe( + self.on[self.haproxy_route_policy.relation_name].relation_broken, self._reconcile + ) + self.framework.observe( + self.on[self.haproxy_route_policy.relation_name].relation_departed, self._reconcile + ) + def _reconcile(self, _: ops.EventBase) -> None: """Reconcile snap configuration and service state.""" try: @@ -95,6 +116,14 @@ def _reconcile(self, _: ops.EventBase) -> None: start_gunicorn_service() self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) + + if relation := self.haproxy_route_policy.relation: + requests = relation.load( + HaproxyRoutePolicyRequirerAppData, relation.app + ).backend_requests + logger.info(f"backend requests {requests}, auto approved.") + self.haproxy_route_policy.set_approved_backend_requests(requests) + except DatabaseRelationMissingError: self.unit.status = ops.BlockedStatus("Missing database relation.") return diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index ab3396ef8..1949eae8e 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -145,7 +145,7 @@ def test_config_changed_missing_secrets(secrets): out = ctx.run(ctx.on.config_changed(), state) assert out.unit_status == testing.WaitingStatus( - "Waiting for leader to set shared configuration." + "Waiting for complete shared configuration from leader." ) diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py index 278847730..b7f877347 100644 --- a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py @@ -3,12 +3,8 @@ """Unit tests for haproxy-route-policy interface library models.""" -import json -from typing import cast - import pytest from charms.haproxy_route_policy.v0.haproxy_route_policy import ( - DataValidationError, HaproxyRoutePolicyBackendRequest, HaproxyRoutePolicyProviderAppData, HaproxyRoutePolicyRequirerAppData, @@ -97,72 +93,51 @@ def test_backend_request_model_validation_rejects_invalid_payload(field: str, va HaproxyRoutePolicyBackendRequest(**payload) -def test_requirer_app_data_dump_and_load_roundtrip(): +def test_requirer_app_data_model_accepts_valid_payload(): """ arrange: build valid requirer app data. - act: dump to databag and load back. - assert: loaded payload matches the original values. + act: initialize HaproxyRoutePolicyRequirerAppData. + assert: payload is validated and fields are preserved. """ request = HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST) - original = HaproxyRoutePolicyRequirerAppData(backend_requests=[request]) - - databag = cast(dict[str, str], original.dump()) - loaded = cast( - HaproxyRoutePolicyRequirerAppData, HaproxyRoutePolicyRequirerAppData.load(databag) - ) + app_data = HaproxyRoutePolicyRequirerAppData(backend_requests=[request]) - assert len(loaded.backend_requests) == 1 - assert loaded.backend_requests[0].backend_name == "backend-a" - assert loaded.backend_requests[0].port == 8080 + assert len(app_data.backend_requests) == 1 + assert app_data.backend_requests[0].backend_name == "backend-a" + assert app_data.backend_requests[0].port == 8080 -def test_provider_app_data_dump_and_load_roundtrip(): +def test_provider_app_data_model_accepts_valid_payload(): """ arrange: build valid provider app data. - act: dump to databag and load back. - assert: loaded payload matches the original values. + act: initialize HaproxyRoutePolicyProviderAppData. + assert: payload is validated and fields are preserved. """ request = HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST) - original = HaproxyRoutePolicyProviderAppData(approved_requests=[request]) - - databag = cast(dict[str, str], original.dump()) - loaded = cast( - HaproxyRoutePolicyProviderAppData, HaproxyRoutePolicyProviderAppData.load(databag) - ) + app_data = HaproxyRoutePolicyProviderAppData(approved_requests=[request]) - assert len(loaded.approved_requests) == 1 - assert loaded.approved_requests[0].backend_name == "backend-a" - assert loaded.approved_requests[0].relation_id == 10 + assert len(app_data.approved_requests) == 1 + assert app_data.approved_requests[0].backend_name == "backend-a" + assert app_data.approved_requests[0].relation_id == 10 -def test_requirer_app_data_load_rejects_duplicate_backend_names(): +def test_requirer_app_data_rejects_duplicate_backend_names(): """ - arrange: build databag payload with duplicate backend names. - act: load HaproxyRoutePolicyRequirerAppData. - assert: raises DataValidationError. + arrange: build app data payload with duplicate backend names. + act: initialize HaproxyRoutePolicyRequirerAppData. + assert: raises ValidationError. """ duplicated_requests = [ - VALID_BACKEND_REQUEST, - { - **VALID_BACKEND_REQUEST, - "relation_id": 11, - "port": 9090, - "hostname_acls": ["api.example.com"], - }, + HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST), + HaproxyRoutePolicyBackendRequest( + **{ + **VALID_BACKEND_REQUEST, + "relation_id": 11, + "port": 9090, + "hostname_acls": ["api.example.com"], + } + ), ] - databag = {"backend_requests": json.dumps(duplicated_requests)} - - with pytest.raises(DataValidationError): - HaproxyRoutePolicyRequirerAppData.load(databag) - -def test_requirer_app_data_load_rejects_invalid_json(): - """ - arrange: build databag payload with non-json value. - act: load HaproxyRoutePolicyRequirerAppData. - assert: raises DataValidationError. - """ - databag = {"backend_requests": "not-json"} - - with pytest.raises(DataValidationError): - HaproxyRoutePolicyRequirerAppData.load(databag) + with pytest.raises(ValidationError): + HaproxyRoutePolicyRequirerAppData(backend_requests=duplicated_requests) diff --git a/haproxy-route-policy-operator/uv.lock b/haproxy-route-policy-operator/uv.lock index 4f8d2f929..e1d551647 100644 --- a/haproxy-route-policy-operator/uv.lock +++ b/haproxy-route-policy-operator/uv.lock @@ -504,6 +504,7 @@ requires-dist = [ { name = "ops", specifier = "==3.7.0" }, { name = "pydantic", specifier = ">=2.12.5" }, { name = "requests", specifier = "==2.33.1" }, + { name = "validators", specifier = ">=0.35.0" }, ] [package.metadata.requires-dev] From 6f6c65299a3d887fe82573fe2b24586ccd0a0496 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 19:02:19 +0200 Subject: [PATCH 144/201] add autoapprove logic and add integration tests with any-charm --- haproxy-route-policy-operator/charmcraft.yaml | 4 +- .../tests/integration/conftest.py | 62 +++++++++++++++++++ .../haproxy_route_policy_requirer.py | 42 +++++++++++++ .../test_haproxy_route_policy_relation.py | 46 ++++++++++++++ 4 files changed, 152 insertions(+), 2 deletions(-) create mode 100644 haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py create mode 100644 haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 777c09c5a..095818159 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -42,7 +42,7 @@ requires: provides: haproxy-route-policy: - interface: haproxy_route_policy + interface: haproxy-route-policy description: Interface between haproxy and the policy charm to approve/deny backend requests. actions: @@ -62,7 +62,7 @@ charm-libs: peers: haproxy-route-policy-peer: - interface: haproxy_route_policy_peer + interface: haproxy-route-policy-peer config: options: diff --git a/haproxy-route-policy-operator/tests/integration/conftest.py b/haproxy-route-policy-operator/tests/integration/conftest.py index 8fe147b56..56c980785 100644 --- a/haproxy-route-policy-operator/tests/integration/conftest.py +++ b/haproxy-route-policy-operator/tests/integration/conftest.py @@ -3,6 +3,7 @@ """Fixtures for haproxy-route-policy charm integration tests.""" +import json import pathlib import typing @@ -11,6 +12,10 @@ import yaml JUJU_WAIT_TIMEOUT = 10 * 60 # 10 minutes +ANY_CHARM_HAPROXY_ROUTE_POLICY_REQUIRER_APPLICATION = "any-charm-haproxy-route-policy-requirer" +HAPROXY_ROUTE_POLICY_REQUIRER_SRC = "tests/integration/haproxy_route_policy_requirer.py" +HAPROXY_ROUTE_POLICY_LIB_SRC = "lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py" +POSTGRESQL_APPLICATION = "postgresql" @pytest.fixture(scope="session", name="charm") @@ -58,3 +63,60 @@ def application_fixture(pytestconfig: pytest.Config, juju: jubilant.Juju, charm: base="ubuntu@24.04", ) return app_name + + +@pytest.fixture(scope="module", name="any_charm_haproxy_route_policy_requirer") +def any_charm_haproxy_route_policy_requirer_fixture( + pytestconfig: pytest.Config, juju: jubilant.Juju +): + """Deploy any-charm and configure it to serve as a requirer for the haproxy-route + integration. + """ + if ( + pytestconfig.getoption("--no-deploy") + and ANY_CHARM_HAPROXY_ROUTE_POLICY_REQUIRER_APPLICATION in juju.status().apps + ): + return ANY_CHARM_HAPROXY_ROUTE_POLICY_REQUIRER_APPLICATION + juju.deploy( + "any-charm", + app=ANY_CHARM_HAPROXY_ROUTE_POLICY_REQUIRER_APPLICATION, + channel="beta", + config={ + "src-overwrite": json.dumps( + { + "any_charm.py": pathlib.Path(HAPROXY_ROUTE_POLICY_REQUIRER_SRC).read_text( + encoding="utf-8" + ), + "haproxy_route_policy.py": pathlib.Path( + HAPROXY_ROUTE_POLICY_LIB_SRC + ).read_text(encoding="utf-8"), + } + ), + "python-packages": "pydantic~=2.10\nvalidators", + }, + ) + juju.wait( + lambda status: jubilant.all_active( + status, ANY_CHARM_HAPROXY_ROUTE_POLICY_REQUIRER_APPLICATION + ), + timeout=JUJU_WAIT_TIMEOUT, + ) + return ANY_CHARM_HAPROXY_ROUTE_POLICY_REQUIRER_APPLICATION + + +@pytest.fixture(scope="module", name="postgresql") +def postgresql_fixture(pytestconfig: pytest.Config, juju: jubilant.Juju): + """Deploy PostgreSQL.""" + if pytestconfig.getoption("--no-deploy") and POSTGRESQL_APPLICATION in juju.status().apps: + return POSTGRESQL_APPLICATION + juju.deploy( + "postgresql", + app=POSTGRESQL_APPLICATION, + channel="16/edge", + base="ubuntu@24.04", + ) + juju.wait( + lambda status: jubilant.all_active(status, POSTGRESQL_APPLICATION), + timeout=JUJU_WAIT_TIMEOUT, + ) + return POSTGRESQL_APPLICATION diff --git a/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py b/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py new file mode 100644 index 000000000..815b33ee6 --- /dev/null +++ b/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py @@ -0,0 +1,42 @@ +# pylint: disable=import-error +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +"""haproxy-route requirer source.""" + +import logging + +# Ignoring here to make the linter happy as these modules will be available +# only inside the anycharm unit. +from any_charm_base import AnyCharmBase # type: ignore +from haproxy_route_policy import ( # type: ignore + HaproxyRoutePolicyBackendRequest, + HaproxyRoutePolicyRequirer, +) + +HAPROXY_ROUTE_POLICY_RELATION = "require-haproxy-route-policy" + +logger = logging.getLogger() + + +class AnyCharm(AnyCharmBase): + """haproxy-route requirer charm.""" + + def __init__(self, *args, **kwargs): + # We don't need to include *args and *kwargs in the docstring here. + """Initialize the requirer charm.""" + super().__init__(*args, **kwargs) + self._haproxy_route_policy = HaproxyRoutePolicyRequirer( + self, HAPROXY_ROUTE_POLICY_RELATION + ) + + def update_relation(self): + """Update haproxy-route-tcp relation data""" + backend_requests = [ + HaproxyRoutePolicyBackendRequest( + relation_id=1, + port=4444, + hostname_acls=["example.com"], + ) + ] + self._haproxy_route_policy.provide_haproxy_route_policy_requests(backend_requests) diff --git a/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py b/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py new file mode 100644 index 000000000..803e5cb72 --- /dev/null +++ b/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py @@ -0,0 +1,46 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Basic integration tests for the haproxy-route-policy charm.""" + +import logging + +import jubilant +import pytest + +logger = logging.getLogger(__name__) + + +@pytest.mark.abort_on_fail +def test_haproxy_route_policy_relation( + application: str, + juju: jubilant.Juju, + any_charm_haproxy_route_policy_requirer: str, + postgresql: str, +): + """Test blocked->active transition after integrating with PostgreSQL. + + Args: + application: The deployed haproxy-route-policy application name. + juju: The Juju instance. + + Assert: + The charm is blocked before relation and active after relating with PostgreSQL. + """ + juju.integrate(f"{application}:database", f"{postgresql}:database") + juju.integrate( + f"{any_charm_haproxy_route_policy_requirer}:require-haproxy-route-policy", + f"{application}:haproxy-route-policy", + ) + juju.wait(lambda status: jubilant.all_active(status, application, postgresql)) + juju.run( + f"{any_charm_haproxy_route_policy_requirer}/0", + action="rpc", + params={"method": "update_relation"}, + ) + juju.wait( + lambda status: jubilant.all_active( + status, application, any_charm_haproxy_route_policy_requirer + ) + ) + logger.info(juju.status().apps[application].relations["haproxy-route-policy"]) From 054e536023d19c25ae54d729e11ac2a08ae9d99f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 19:04:33 +0200 Subject: [PATCH 145/201] update uv.lock --- haproxy-route-policy-operator/uv.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy-operator/uv.lock b/haproxy-route-policy-operator/uv.lock index 0561e51a3..e1d551647 100644 --- a/haproxy-route-policy-operator/uv.lock +++ b/haproxy-route-policy-operator/uv.lock @@ -503,7 +503,7 @@ requires-dist = [ { name = "charmlibs-snap", specifier = "==1.0.1" }, { name = "ops", specifier = "==3.7.0" }, { name = "pydantic", specifier = ">=2.12.5" }, - { name = "requests", specifier = "==2.32.5" }, + { name = "requests", specifier = "==2.33.1" }, { name = "validators", specifier = ">=0.35.0" }, ] From 60decd9b474345f3295e6f2bae7327af8d33a6d3 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 20:50:48 +0200 Subject: [PATCH 146/201] wait for complete relation data --- haproxy-route-policy-operator/src/charm.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 40bcfc2af..48d7464f9 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -17,6 +17,7 @@ HaproxyRoutePolicyProvider, HaproxyRoutePolicyRequirerAppData, ) +from pydantic import ValidationError from policy import ( HaproxyRoutePolicyDatabaseMigrationError, @@ -149,6 +150,11 @@ def _reconcile(self, _: ops.EventBase) -> None: logger.exception("Failed to reconcile haproxy-route-policy service") self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") return + except ValidationError: + self.unit.status = ops.WaitingStatus( + "Waiting for complete data for haproxy-route-policy." + ) + return self.unit.status = ops.ActiveStatus() From 95993a829c65fa3e74f13793e0491b0387ecc1da Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 20:55:15 +0200 Subject: [PATCH 147/201] update charm and requirer --- haproxy-route-policy-operator/src/charm.py | 9 ++++++--- .../tests/integration/haproxy_route_policy_requirer.py | 2 ++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index f68fa1d70..96c80d200 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -17,6 +17,7 @@ HaproxyRoutePolicyProvider, HaproxyRoutePolicyRequirerAppData, ) +from pydantic import ValidationError from policy import ( HaproxyRoutePolicyDatabaseMigrationError, @@ -149,9 +150,11 @@ def _reconcile(self, _: ops.EventBase) -> None: logger.exception("Failed to reconcile haproxy-route-policy service") self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") return - except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: - logger.exception("Failed to reconcile haproxy-route-policy service") - self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") + except ValidationError: + logger.exception("Invalid haproxy-route-policy relation data") + self.unit.status = ops.WaitingStatus( + "Waiting for valid haproxy-route-policy relation data" + ) return self.unit.status = ops.ActiveStatus() diff --git a/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py b/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py index 815b33ee6..2984fe23d 100644 --- a/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py +++ b/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py @@ -36,6 +36,8 @@ def update_relation(self): HaproxyRoutePolicyBackendRequest( relation_id=1, port=4444, + backend_name="test-backend", + paths=["/"], hostname_acls=["example.com"], ) ] From f044f1e1faf063576441f6acfdaebba6f0982579 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 20:56:23 +0200 Subject: [PATCH 148/201] update integration tests --- .../tests/integration/test_haproxy_route_policy_relation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py b/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py index 803e5cb72..74e7b4ebd 100644 --- a/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py +++ b/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py @@ -32,7 +32,6 @@ def test_haproxy_route_policy_relation( f"{any_charm_haproxy_route_policy_requirer}:require-haproxy-route-policy", f"{application}:haproxy-route-policy", ) - juju.wait(lambda status: jubilant.all_active(status, application, postgresql)) juju.run( f"{any_charm_haproxy_route_policy_requirer}/0", action="rpc", From ad83b0394b5ff0b498bd68a4d3e62777887cb040 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 21:04:04 +0200 Subject: [PATCH 149/201] remove unused lib and update tests and lib --- .../lib/charms/haproxy/v2/haproxy_route.py | 1609 ----------------- .../v0/haproxy_route_policy.py | 4 +- .../test_haproxy_route_policy_relation.py | 2 +- 3 files changed, 3 insertions(+), 1612 deletions(-) delete mode 100644 haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py diff --git a/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py b/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py deleted file mode 100644 index 795c815ad..000000000 --- a/haproxy-route-policy-operator/lib/charms/haproxy/v2/haproxy_route.py +++ /dev/null @@ -1,1609 +0,0 @@ -# pylint: disable=too-many-lines -"""Haproxy-route interface library. - -## Getting Started - -To get started using the library, you just need to fetch the library using `charmcraft`. - -```shell -cd some-charm -charmcraft fetch-lib charms.haproxy.v2.haproxy_route -``` - -In the `metadata.yaml` of the charm, add the following: - -```yaml -requires: - backend: - interface: haproxy-route - limit: 1 -``` - -Then, to initialise the library: - -```python -from charms.haproxy.v2.haproxy_route import HaproxyRouteRequirer - -class SomeCharm(CharmBase): - def __init__(self, *args): - # ... - - # There are 2 ways you can use the requirer implementation: - # 1. To initialize the requirer with parameters: - self.haproxy_route_requirer = HaproxyRouteRequirer(self, - relation_name=, - service=, - ports=, - protocol=, - hosts=, - paths=, - hostname=, - additional_hostnames=, - check_interval=, - check_rise=, - check_fall=, - check_path=, - check_port=, - path_rewrite_expressions=, list of path rewrite expressions, - query_rewrite_expressions=, list of query rewrite expressions, - header_rewrite_expressions=, list of (header_name, rewrite_expression), - load_balancing_algorithm=, defaults to "leastconn", - load_balancing_cookie=, only used when load_balancing_algorithm is cookie - load_balancing_consistent_hashing=, to enable consistent hashing, - defaults to False, - rate_limit_connections_per_minute=, - rate_limit_policy=, - upload_limit=, - download_limit=, - retry_count=, - retry_redispatch=, - deny_paths=, - server_timeout=, - connect_timeout=, - queue_timeout=, - server_maxconn=, - unit_address=, - http_server_close=, - ) - - # 2.To initialize the requirer with no parameters, i.e - # self.haproxy_route_requirer = HaproxyRouteRequirer(self) - # This will simply initialize the requirer class and it won't perfom any action. - - # Afterwards regardless of how you initialized the requirer you can call the - # provide_haproxy_route_requirements method anywhere in your charm to update the requirer data. - # The method takes the same number of parameters as the requirer class. - # provide_haproxy_route_requirements(address=, port=, ...) - - self.framework.observe( - self.framework.on.config_changed, self._on_config_changed - ) - self.framework.observe( - self.haproxy_route_requirer.on.ready, self._on_endpoints_ready - ) - self.framework.observe( - self.haproxy_route_requirer.on.removed, self._on_endpoints_removed - ) - - def _on_config_changed(self, event: ConfigChangedEvent) -> None: - self.haproxy_route_requirer.provide_haproxy_route_requirements(...) - - def _on_endpoints_ready(self, _: EventBase) -> None: - # Handle endpoints ready event - ... - - def _on_endpoints_removed(self, _: EventBase) -> None: - # Handle endpoints removed event - ... - -## Using the library as the provider -The provider charm should expose the interface as shown below: -```yaml -provides: - haproxy-route: - interface: haproxy-route -``` -Note that this interface supports relating to multiple endpoints. - -Then, to initialise the library: -```python -from charms.haproxy.v2.haproxy_route import HaproxyRouteProvider - -class SomeCharm(CharmBase): - self.haproxy_route_provider = HaproxyRouteProvider(self) - self.framework.observe( - self.haproxy_route_provider.on.data_available, self._on_haproxy_route_data_available - ) - - def _on_haproxy_route_data_available(self, event: EventBase) -> None: - data = self.haproxy_route_provider.get_data(self.haproxy_route_provider.relations) - ... -""" - -import json -import logging -from collections import defaultdict -from enum import Enum -from functools import partial -from typing import Annotated, Any, Literal, MutableMapping, Optional, cast - -from ops import CharmBase, ModelError, RelationBrokenEvent -from ops.charm import CharmEvents -from ops.framework import EventBase, EventSource, Object -from ops.model import Relation -from pydantic import ( - AnyHttpUrl, - BaseModel, - BeforeValidator, - ConfigDict, - Field, - IPvAnyAddress, - ValidationError, - field_validator, - model_validator, -) -from pydantic.dataclasses import dataclass -from typing_extensions import Self -from validators import domain - -# The unique Charmhub library identifier, never change it -LIBID = "08b6347482f6455486b5f5bb4dc4e6cf" - -# Increment this major API version when introducing breaking changes -LIBAPI = 2 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 1 - -logger = logging.getLogger(__name__) -HAPROXY_ROUTE_RELATION_NAME = "haproxy-route" -HAPROXY_CONFIG_INVALID_CHARACTERS = "\n\t#\\'\"\r$ " -HAPROXY_EXPR_INVALID_CHARACTERS = "\n" - - -def value_contains_invalid_characters( - invalid_characters: str, value: Optional[str] -) -> Optional[str]: - """Validate if value contains invalid config characters. - - Args: - invalid_characters: String with the list of invalid characters. - value: The value to validate. - - Raises: - ValueError: When value contains invalid characters. - - Returns: - The validated value. - """ - if value is None: - return value - - if [char for char in value if char in invalid_characters]: - raise ValueError(f"Relation data contains invalid character(s) {value}") - return value - - -def valid_domain_with_wildcard(value: str) -> str: - """Validate if value is a valid domain that can include a wildcard. - - The wildcard character (*) can't be at the TLD level, for example *.com is not valid. - This is supported natively by the library ( e.g domain("com") will raise a ValidationError ). - - Raises: - ValueError: When value is not a valid domain. - - Args: - value: The value to validate. - """ - fqdn = value[2:] if value.startswith("*.") else value - if not bool(domain(fqdn)): - raise ValueError(f"Invalid domain: {value}") - return value - - -VALIDSTR = Annotated[ - str, - BeforeValidator(partial(value_contains_invalid_characters, HAPROXY_CONFIG_INVALID_CHARACTERS)), -] -VALIDEXPRSTR = Annotated[ - str, - BeforeValidator(partial(value_contains_invalid_characters, HAPROXY_EXPR_INVALID_CHARACTERS)), -] - - -class DataValidationError(Exception): - """Raised when data validation fails.""" - - -class HaproxyRouteInvalidRelationDataError(Exception): - """Rasied when data validation of the haproxy-route relation fails.""" - - -class _DatabagModel(BaseModel): - """Base databag model. - - Attrs: - model_config: pydantic model configuration. - """ - - model_config = ConfigDict( - # tolerate additional keys in databag - extra="ignore", - # Allow instantiating this class by field name (instead of forcing alias). - populate_by_name=True, - # Custom config key: whether to nest the whole datastructure (as json) - # under a field or spread it out at the toplevel. - _NEST_UNDER=None, - ) # type: ignore - """Pydantic config.""" - - @classmethod - def load(cls, databag: MutableMapping) -> "_DatabagModel": - """Load this model from a Juju json databag. - - Args: - databag: Databag content. - - Raises: - DataValidationError: When model validation failed. - - Returns: - _DatabagModel: The validated model. - """ - nest_under = cls.model_config.get("_NEST_UNDER") - if nest_under: - return cls.model_validate(json.loads(databag[nest_under])) - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.model_fields.items()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.model_validate_json(json.dumps(data)) - except ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.error(str(e), exc_info=True) - raise DataValidationError(msg) from e - - @classmethod - def from_dict(cls, values: dict) -> "_DatabagModel": - """Load this model from a dict. - - Args: - values: Dict values. - - Raises: - DataValidationError: When model validation failed. - - Returns: - _DatabagModel: The validated model. - """ - try: - logger.info("Loading values from dictionary: %s", values) - return cls.model_validate(values) - except ValidationError as e: - msg = f"failed to validate: {values}" - logger.debug(msg, exc_info=True) - raise DataValidationError(msg) from e - - def dump( - self, databag: Optional[MutableMapping] = None, clear: bool = True - ) -> Optional[MutableMapping]: - """Write the contents of this model to Juju databag. - - Args: - databag: The databag to write to. - clear: Whether to clear the databag before writing. - - Returns: - MutableMapping: The databag. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - nest_under = self.model_config.get("_NEST_UNDER") - if nest_under: - databag[nest_under] = self.model_dump_json( - by_alias=True, - # skip keys whose values are default - exclude_defaults=True, - ) - return databag - - dct = self.model_dump(mode="json", by_alias=True, exclude_defaults=True) - databag.update({k: json.dumps(v) for k, v in dct.items()}) - return databag - - -class ServerHealthCheck(BaseModel): - """Configuration model for backend server health checks. - - Attributes: - interval: Number of seconds between consecutive health check attempts. - rise: Number of consecutive successful health checks required for up. - fall: Number of consecutive failed health checks required for DOWN. - path: List of URL paths to use for HTTP health checks. - port: Customize port value for http-check. - """ - - interval: Optional[int] = Field( - description="The interval (in seconds) between health checks.", default=None - ) - rise: Optional[int] = Field( - description="How many successful health checks before server is considered up.", - default=None, - ) - fall: Optional[int] = Field( - description="How many failed health checks before server is considered down.", default=None - ) - path: Optional[VALIDSTR] = Field(description="The health check path.", default=None) - port: Optional[int] = Field(description="The health check port.", default=None) - - @model_validator(mode="after") - def check_all_required_fields_set(self) -> Self: - """Check that all required fields for health check are set. - - Raises: - ValueError: When validation fails. - - Returns: - The validated model. - """ - if not bool(self.interval) == bool(self.rise) == bool(self.fall): - raise ValueError("All three of interval, rise and fall must be set.") - return self - - -# tarpit is not yet implemented -class RateLimitPolicy(Enum): - """Enum of possible rate limiting policies. - - Attrs: - DENY: deny a client's HTTP request to return a 403 Forbidden error. - REJECT: closes the connection immediately without sending a response. - SILENT: disconnects immediately without notifying the client - that the connection has been closed. - """ - - DENY = "deny" - REJECT = "reject" - SILENT = "silent-drop" - - -class RateLimit(BaseModel): - """Configuration model for connection rate limiting. - - Attributes: - connections_per_minute: Number of connections allowed per minute for a client. - policy: Action to take when the rate limit is exceeded. - """ - - connections_per_minute: int = Field(description="How many connections are allowed per minute.") - policy: RateLimitPolicy = Field( - description="Configure the rate limit policy.", default=RateLimitPolicy.DENY - ) - - -class LoadBalancingAlgorithm(Enum): - """Enum of possible http_route types. - - Attrs: - LEASTCONN: The server with the lowest number of connections receives the connection. - SRCIP: Load balance using the hash of The source IP address. - ROUNDROBIN: Each server is used in turns, according to their weights. - COOKIE: Load balance using hash req.cookie(clientid). - """ - - LEASTCONN = "leastconn" - SRCIP = "source" - ROUNDROBIN = "roundrobin" - COOKIE = "cookie" - - -class LoadBalancingConfiguration(BaseModel): - """Configuration model for load balancing. - - Attributes: - algorithm: Algorithm to use for load balancing. - cookie: Cookie name to use when algorithm is set to cookie. - consistent_hashing: Use consistent hashing to avoid redirection - when servers are added/removed. - """ - - algorithm: LoadBalancingAlgorithm = Field( - description="Configure the load balancing algorithm for the service.", - default=LoadBalancingAlgorithm.LEASTCONN, - ) - cookie: Optional[VALIDSTR] = Field( - description="Only used when algorithm is COOKIE. Define the cookie to load balance on.", - default=None, - ) - # Note: Later when the generic LoadBalancingAlgorithm.HASH is implemented this attribute - # will also apply under that mode. - consistent_hashing: bool = Field( - description=( - "Only used when the `algorithm` is SRCIP or COOKIE. " - "Use consistent hashing to avoid redirection when servers are added/removed. " - "Default is False as it usually does not give a balanced distribution." - ), - default=False, - ) - - @model_validator(mode="after") - def validate_attributes(self) -> Self: - """Check that algorithm-specific configs are only set with their respective algorithm. - - Raises: - ValueError: When validation fails in one of these cases: - 1. self.cookie is not None when self.algorithm != COOKIE - 2. self.consistent_hashing is True when algorithm is neither COOKIE nor SRCIP - - Returns: - The validated model. - """ - if self.cookie is not None and self.algorithm != LoadBalancingAlgorithm.COOKIE: - raise ValueError("cookie only applies when algorithm is COOKIE.") - - if self.consistent_hashing and self.algorithm not in [ - LoadBalancingAlgorithm.COOKIE, - LoadBalancingAlgorithm.SRCIP, - ]: - raise ValueError("Consistent hashing only applies when algorithm is COOKIE or SRCIP.") - return self - - -class BandwidthLimit(BaseModel): - """Configuration model for bandwidth rate limiting. - - Attributes: - upload: Limit upload speed (bytes per second). - download: Limit download speed (bytes per second). - """ - - upload: Optional[int] = Field(description="Upload limit (bytes per seconds).", default=None) - download: Optional[int] = Field( - description="Download limit (bytes per seconds).", default=None - ) - - -# retry-on is not yet implemented -class Retry(BaseModel): - """Configuration model for retry. - - Attributes: - count: How many times should a request retry. - redispatch: Whether to redispatch failed requests to another server. - """ - - count: int = Field(description="How many times should a request retry.") - redispatch: bool = Field( - description="Whether to redispatch failed requests to another server.", default=False - ) - - -class TimeoutConfiguration(BaseModel): - """Configuration model for timeout. - - Attributes: - server: Timeout for requests from haproxy to backend servers. - connect: Timeout for client requests to haproxy. - queue: Timeout for requests waiting in the queue after server-maxconn is reached. - """ - - server: int = Field( - description="Timeout (in seconds) for requests from haproxy to backend servers.", - default=60, - ) - connect: int = Field( - description="Timeout (in seconds) for client requests to haproxy.", default=60 - ) - queue: int = Field( - description="Timeout (in seconds) for requests in the queue.", - default=60, - ) - - -class HaproxyRewriteMethod(Enum): - """Enum of possible HTTP rewrite methods. - - Attrs: - SET_PATH: The server with the lowest number of connections receives the connection. - SET_QUERY: Load balance using the hash of The source IP address. - SET_HEADER: Each server is used in turns, according to their weights. - """ - - SET_PATH = "set-path" - SET_QUERY = "set-query" - SET_HEADER = "set-header" - - -class RewriteConfiguration(BaseModel): - """Configuration model for HTTP rewrite. - - Attributes: - method: Which rewrite method to apply.One of set-path, set-query, set-header. - expression: Regular expression to use with the rewrite method. - header: The name of the header to rewrited. - """ - - method: HaproxyRewriteMethod = Field( - description="Which rewrite method to apply.One of set-path, set-query, set-header." - ) - expression: VALIDEXPRSTR = Field( - description="Regular expression to use with the rewrite method." - ) - header: Optional[VALIDSTR] = Field( - description="The name of the header to rewrite.", default=None - ) - - -class RequirerApplicationData(_DatabagModel): - """Configuration model for HAProxy route requirer application data. - - Attributes: - service: Name of the service requesting HAProxy routing. - ports: List of port numbers on which the service is listening. - protocol: The protocol that the service speaks. - hosts: List of backend server addresses. - paths: List of URL paths to route to this service. Defaults to an empty list. - hostname: Optional: The hostname of this service. - additional_hostnames: List of additional hostnames of this service. - Defaults to an empty list. - rewrites: List of RewriteConfiguration objects defining path, query, or header - rewrite rules. - check: ServerHealthCheck configuration for monitoring backend health. - load_balancing: Configuration for the load balancing strategy. - rate_limit: Optional configuration for limiting connection rates. - bandwidth_limit: Optional configuration for limiting upload and download bandwidth. - retry: Optional configuration for request retry behavior. - deny_paths: List of URL paths that should not be routed to the backend. - timeout: Configuration for server, client, and queue timeouts. - server_maxconn: Optional maximum number of connections per server. - http_server_close: Configure server close after request. - allow_http: Whether to allow HTTP traffic in addition to HTTPS. Defaults to False. - Warning: enabling HTTP is a security risk, make sure you apply the necessary precautions. - external_grpc_port: Optional external gRPC port. - """ - - service: VALIDSTR = Field(description="The name of the service.") - ports: list[int] = Field(description="The list of ports listening for this service.") - protocol: Literal["http", "https"] = Field( - description="The protocol that the service speaks.", - default="http", - ) - hosts: list[IPvAnyAddress] = Field( - description="The list of backend server addresses. Currently only support IP addresses.", - default=[], - ) - paths: list[VALIDSTR] = Field( - description="The list of paths to route to this service.", default=[] - ) - hostname: Optional[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = Field( - description="Hostname of this service.", default=None - ) - additional_hostnames: list[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = ( - Field(description="The list of additional hostnames of this service.", default=[]) - ) - rewrites: list[RewriteConfiguration] = Field( - description="The list of path rewrite rules.", default=[] - ) - check: Optional[ServerHealthCheck] = Field( - description="Configure health check for the service.", - default=None, - ) - load_balancing: LoadBalancingConfiguration = Field( - description="Configure loadbalancing.", default=LoadBalancingConfiguration() - ) - rate_limit: Optional[RateLimit] = Field( - description="Configure rate limit for the service.", default=None - ) - bandwidth_limit: BandwidthLimit = Field( - description="Configure bandwidth limit for the service.", default=BandwidthLimit() - ) - retry: Optional[Retry] = Field( - description="Configure retry for incoming requests.", default=None - ) - deny_paths: list[VALIDSTR] = Field( - description="Configure path that should not be routed to the backend", default=[] - ) - timeout: TimeoutConfiguration = Field( - description="Configure timeout", - default=TimeoutConfiguration(), - ) - server_maxconn: Optional[int] = Field( - description="Configure maximum connection per server", default=None - ) - http_server_close: bool = Field( - description="Configure server close after request", default=False - ) - allow_http: bool = Field( - description="Whether to allow HTTP traffic in addition to HTTPS.", default=False - ) - external_grpc_port: int | None = Field( - description="Optional external gRPC port.", default=None, gt=0, le=65535 - ) - - @field_validator("load_balancing") - @classmethod - def validate_load_balancing_configuration( - cls, configuration: LoadBalancingConfiguration - ) -> LoadBalancingConfiguration: - """Validate the parsed load balancing configuration. - - Args: - configuration: The configuration to validate. - - Raises: - ValueError: When cookie is not set under COOKIE load balancing mode. - - Returns: - LoadBalancingConfiguration: The validated configuration. - """ - if configuration.algorithm == LoadBalancingAlgorithm.COOKIE and not configuration.cookie: - raise ValueError("cookie must be set if load balacing algorithm is COOKIE.") - return configuration - - @field_validator("rewrites") - @classmethod - def validate_rewrites(cls, rewrites: list[RewriteConfiguration]) -> list[RewriteConfiguration]: - """Validate the parsed list of rewrite configurations. - - Args: - rewrites: The configurations to validate. - - Raises: - ValueError: When header is not set under SET_HEADER rewrite method. - - Returns: - list[RewriteConfiguration]: The validated configurations. - """ - for rewrite in rewrites: - if rewrite.method == HaproxyRewriteMethod.SET_HEADER and not rewrite.method: - raise ValueError("header must be set if rewrite method is SET_HEADER.") - return rewrites - - -class HaproxyRouteProviderAppData(_DatabagModel): - """haproxy-route provider databag schema. - - Attributes: - endpoints: The list of proxied endpoints that maps to the backend. - """ - - endpoints: list[AnyHttpUrl] - - -class RequirerUnitData(_DatabagModel): - """haproxy-route requirer unit data. - - Attributes: - address: IP address of the unit. - """ - - address: IPvAnyAddress = Field(description="IP address of the unit.") - - -@dataclass -class HaproxyRouteRequirerData: - """haproxy-route requirer data. - - Attributes: - relation_id: Id of the relation. - application_data: Application data. - units_data: Units data - """ - - relation_id: int - application_data: RequirerApplicationData - units_data: list[RequirerUnitData] - - -@dataclass -class HaproxyRouteRequirersData: - """haproxy-route requirers data. - - Attributes: - requirers_data: List of requirer data. - relation_ids_with_invalid_data: Set of relation ids that contains invalid data. - """ - - requirers_data: list[HaproxyRouteRequirerData] - relation_ids_with_invalid_data: set[int] - - @model_validator(mode="after") - def check_services_unique(self) -> Self: - """Check that requirers define unique services. - - Raises: - DataValidationError: When requirers declared duplicate services. - - Returns: - The validated model. - """ - services = [ - requirer_data.application_data.service for requirer_data in self.requirers_data - ] - if len(services) != len(set(services)): - raise DataValidationError("Services declaration by requirers must be unique.") - - return self - - @model_validator(mode="after") - def check_external_grpc_port_unique(self) -> Self: - """Check that external gRPC ports are unique across requirer applications. - If multiple requirer applications declare the same external gRPC port, - their relation ids are added to relation_ids_with_invalid_data. - - Returns: - The validated model. - """ - relation_ids_per_port: dict[int, list[int]] = defaultdict(list[int]) - for requirer_data in self.requirers_data: - if requirer_data.application_data.external_grpc_port: - relation_ids_per_port[requirer_data.application_data.external_grpc_port].append( - requirer_data.relation_id - ) - - self.relation_ids_with_invalid_data.update( - relation_id - for relation_ids in relation_ids_per_port.values() - for relation_id in relation_ids - if len(relation_ids) > 1 - ) - return self - - @model_validator(mode="after") - def check_grpc_requires_https(self) -> Self: - """Check that backends with external_grpc_port use https protocol. - If not, their relation ids are added to relation_ids_with_invalid_data. - - Returns: - Self: The validated model - """ - for requirer_data in self.requirers_data: - if all( - [ - requirer_data.application_data.external_grpc_port is not None, - requirer_data.application_data.protocol != "https", - requirer_data.relation_id, - ] - ): - self.relation_ids_with_invalid_data.add(requirer_data.relation_id) - return self - - -class HaproxyRouteDataAvailableEvent(EventBase): - """HaproxyRouteDataAvailableEvent custom event. - - This event indicates that the requirers data are available. - """ - - -class HaproxyRouteDataRemovedEvent(EventBase): - """HaproxyRouteDataRemovedEvent custom event. - - This event indicates that one of the endpoints was removed. - """ - - -class HaproxyRouteProviderEvents(CharmEvents): - """List of events that the TLS Certificates requirer charm can leverage. - - Attributes: - data_available: This event indicates that - the haproxy-route endpoints are available. - data_removed: This event indicates that one of the endpoints was removed. - """ - - data_available = EventSource(HaproxyRouteDataAvailableEvent) - data_removed = EventSource(HaproxyRouteDataRemovedEvent) - - -class HaproxyRouteProvider(Object): - """Haproxy-route interface provider implementation. - - Attributes: - on: Custom events of the provider. - relations: Related appliations. - """ - - on = HaproxyRouteProviderEvents() - - def __init__( - self, - charm: CharmBase, - relation_name: str = HAPROXY_ROUTE_RELATION_NAME, - raise_on_validation_error: bool = False, - ) -> None: - """Initialize the HaproxyRouteProvider. - - Args: - charm: The charm that is instantiating the library. - relation_name: The name of the relation. - raise_on_validation_error: Whether the library should raise - HaproxyRouteInvalidRelationDataError when requirer data validation fails. - If this is set to True the provider charm needs to also catch and handle the - thrown exception. - """ - super().__init__(charm, relation_name) - - self._relation_name = relation_name - self.charm = charm - self.raise_on_validation_error = raise_on_validation_error - on = self.charm.on - self.framework.observe(on[self._relation_name].relation_created, self._configure) - self.framework.observe(on[self._relation_name].relation_changed, self._configure) - self.framework.observe(on[self._relation_name].relation_broken, self._on_endpoint_removed) - self.framework.observe( - on[self._relation_name].relation_departed, self._on_endpoint_removed - ) - - @property - def relations(self) -> list[Relation]: - """The list of Relation instances associated with this endpoint.""" - return list(self.charm.model.relations[self._relation_name]) - - def _configure(self, _event: EventBase) -> None: - """Handle relation events.""" - if relations := self.relations: - # Only for data validation - _ = self.get_data(relations) - self.on.data_available.emit() - - def _on_endpoint_removed(self, _: EventBase) -> None: - """Handle relation broken/departed events.""" - self.on.data_removed.emit() - - def get_data(self, relations: list[Relation]) -> HaproxyRouteRequirersData: - """Fetch requirer data. - - Args: - relations: A list of Relation instances to fetch data from. - - Raises: - HaproxyRouteInvalidRelationDataError: When requirer data validation fails. - - Returns: - HaproxyRouteRequirersData: Validated data from all haproxy-route requirers. - """ - requirers_data: list[HaproxyRouteRequirerData] = [] - relation_ids_with_invalid_data: set[int] = set() - for relation in relations: - try: - application_data = self._get_requirer_application_data(relation) - units_data = self._get_requirer_units_data(relation) - haproxy_route_requirer_data = HaproxyRouteRequirerData( - application_data=application_data, - units_data=units_data, - relation_id=relation.id, - ) - requirers_data.append(haproxy_route_requirer_data) - except DataValidationError as exc: - if self.raise_on_validation_error: - logger.error( - "haproxy-route data validation failed for relation %s: %s", - relation, - str(exc), - ) - raise HaproxyRouteInvalidRelationDataError( - f"haproxy-route data validation failed for relation: {relation}" - ) from exc - relation_ids_with_invalid_data.add(relation.id) - continue - return HaproxyRouteRequirersData( - requirers_data=requirers_data, - relation_ids_with_invalid_data=relation_ids_with_invalid_data, - ) - - def _get_requirer_units_data(self, relation: Relation) -> list[RequirerUnitData]: - """Fetch and validate the requirer's units data. - - Args: - relation: The relation to fetch unit data from. - - Raises: - DataValidationError: When unit data validation fails. - - Returns: - list[RequirerUnitData]: List of validated unit data from the requirer. - """ - requirer_units_data: list[RequirerUnitData] = [] - - for unit in relation.units: - databag = relation.data.get(unit) - if not databag: - logger.error( - "Requirer unit data does not exist even though the unit is still present." - ) - continue - try: - data = cast(RequirerUnitData, RequirerUnitData.load(databag)) - requirer_units_data.append(data) - except DataValidationError: - logger.error("Invalid requirer application data for %s", unit) - raise - return requirer_units_data - - def _get_requirer_application_data(self, relation: Relation) -> RequirerApplicationData: - """Fetch and validate the requirer's application databag. - - Args: - relation: The relation to fetch application data from. - - Raises: - DataValidationError: When requirer application data validation fails. - - Returns: - RequirerApplicationData: Validated application data from the requirer. - """ - try: - return cast( - RequirerApplicationData, RequirerApplicationData.load(relation.data[relation.app]) - ) - except DataValidationError: - logger.error("Invalid requirer application data for %s", relation.app.name) - raise - - def publish_proxied_endpoints(self, endpoints: list[str], relation: Relation) -> None: - """Publish to the app databag the proxied endpoints. - - Args: - endpoints: The list of proxied endpoints to publish. - relation: The relation with the requirer application. - """ - HaproxyRouteProviderAppData(endpoints=[cast(AnyHttpUrl, e) for e in endpoints]).dump( - relation.data[self.charm.app], clear=True - ) - - -class HaproxyRouteEnpointsReadyEvent(EventBase): - """HaproxyRouteEnpointsReadyEvent custom event.""" - - -class HaproxyRouteEndpointsRemovedEvent(EventBase): - """HaproxyRouteEndpointsRemovedEvent custom event.""" - - -class HaproxyRouteRequirerEvents(CharmEvents): - """List of events that the TLS Certificates requirer charm can leverage. - - Attributes: - ready: when the provider proxied endpoints are ready. - removed: when the provider - """ - - ready = EventSource(HaproxyRouteEnpointsReadyEvent) - removed = EventSource(HaproxyRouteEndpointsRemovedEvent) - - -class HaproxyRouteRequirer(Object): - """haproxy-route interface requirer implementation. - - Attributes: - on: Custom events of the requirer. - """ - - on = HaproxyRouteRequirerEvents() - - # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals - def __init__( - self, - charm: CharmBase, - relation_name: str, - service: Optional[str] = None, - ports: Optional[list[int]] = None, - protocol: Literal["http", "https"] = "http", - hosts: Optional[list[IPvAnyAddress]] = None, - paths: Optional[list[str]] = None, - hostname: Optional[str] = None, - additional_hostnames: Optional[list[str]] = None, - check_interval: Optional[int] = None, - check_rise: Optional[int] = None, - check_fall: Optional[int] = None, - check_path: Optional[str] = None, - check_port: Optional[int] = None, - path_rewrite_expressions: Optional[list[str]] = None, - query_rewrite_expressions: Optional[list[str]] = None, - header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, - load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, - load_balancing_cookie: Optional[str] = None, - load_balancing_consistent_hashing: bool = False, - rate_limit_connections_per_minute: Optional[int] = None, - rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, - upload_limit: Optional[int] = None, - download_limit: Optional[int] = None, - retry_count: Optional[int] = None, - retry_redispatch: bool = False, - deny_paths: Optional[list[str]] = None, - server_timeout: int = 60, - connect_timeout: int = 60, - queue_timeout: int = 60, - server_maxconn: Optional[int] = None, - unit_address: Optional[str] = None, - http_server_close: bool = False, - allow_http: bool = False, - ) -> None: - """Initialize the HaproxyRouteRequirer. - - Args: - charm: The charm that is instantiating the library. - relation_name: The name of the relation to bind to. - service: The name of the service to route traffic to. - ports: List of ports the service is listening on. - protocol: The protocol that the service speaks. - hosts: List of backend server addresses. Currently only support IP addresses. - paths: List of URL paths to route to this service. - hostname: Hostname of this service. - additional_hostnames: Additional hostnames of this service. - check_interval: Interval between health checks in seconds. - check_rise: Number of successful health checks before server is considered up. - check_fall: Number of failed health checks before server is considered down. - check_path: The path to use for server health checks. - check_port: The port to use for http-check. - path_rewrite_expressions: List of regex expressions for path rewrites. - query_rewrite_expressions: List of regex expressions for query rewrites. - header_rewrite_expressions: List of tuples containing header name - and rewrite expression. - load_balancing_algorithm: Algorithm to use for load balancing. - load_balancing_cookie: Cookie name to use when algorithm is set to cookie. - load_balancing_consistent_hashing: Whether to use consistent hashing. - rate_limit_connections_per_minute: Maximum connections allowed per minute. - rate_limit_policy: Policy to apply when rate limit is reached. - upload_limit: Maximum upload bandwidth in bytes per second. - download_limit: Maximum download bandwidth in bytes per second. - retry_count: Number of times to retry failed requests. - retry_redispatch: Whether to redispatch failed requests to another server. - deny_paths: List of paths that should not be routed to the backend. - server_timeout: Timeout for requests from haproxy to backend servers in seconds. - connect_timeout: Timeout for client requests to haproxy in seconds. - queue_timeout: Timeout for requests waiting in queue in seconds. - server_maxconn: Maximum connections per server. - unit_address: IP address of the unit (if not provided, will use binding address). - http_server_close: Configure server close after request. - allow_http: Whether to allow HTTP traffic in addition to HTTPS. - Warning: enabling HTTP is a security risk, - make sure you apply the necessary precautions. - """ - super().__init__(charm, relation_name) - - self._relation_name = relation_name - self.relation = self.model.get_relation(self._relation_name) - self.charm = charm - self.app = self.charm.app - - # build the full application data - self._application_data = self._generate_application_data( - service, - ports, - protocol, - hosts, - paths, - hostname, - additional_hostnames, - check_interval, - check_rise, - check_fall, - check_path, - check_port, - path_rewrite_expressions, - query_rewrite_expressions, - header_rewrite_expressions, - load_balancing_algorithm, - load_balancing_cookie, - load_balancing_consistent_hashing, - rate_limit_connections_per_minute, - rate_limit_policy, - upload_limit, - download_limit, - retry_count, - retry_redispatch, - deny_paths, - server_timeout, - connect_timeout, - queue_timeout, - server_maxconn, - http_server_close, - allow_http, - ) - self._unit_address = unit_address - - on = self.charm.on - self.framework.observe(on[self._relation_name].relation_created, self._configure) - self.framework.observe(on[self._relation_name].relation_changed, self._configure) - self.framework.observe(on[self._relation_name].relation_broken, self._on_relation_broken) - - def _configure(self, _: EventBase) -> None: - """Handle relation events.""" - self.update_relation_data() - if self.relation and self.get_proxied_endpoints(): - # This event is only emitted when the provider databag changes - # which only happens when relevant changes happened - # Additionally this event is purely informational and it's up to the requirer to - # fetch the proxied endpoints in their code using get_proxied_endpoints - self.on.ready.emit() - - def _on_relation_broken(self, _: RelationBrokenEvent) -> None: - """Handle relation broken event.""" - self.on.removed.emit() - - # pylint: disable=too-many-arguments,too-many-positional-arguments - def provide_haproxy_route_requirements( - self, - service: str, - ports: list[int], - protocol: Literal["http", "https"] = "http", - hosts: Optional[list[IPvAnyAddress]] = None, - paths: Optional[list[str]] = None, - hostname: Optional[str] = None, - additional_hostnames: Optional[list[str]] = None, - check_interval: Optional[int] = None, - check_rise: Optional[int] = None, - check_fall: Optional[int] = None, - check_path: Optional[str] = None, - check_port: Optional[int] = None, - path_rewrite_expressions: Optional[list[str]] = None, - query_rewrite_expressions: Optional[list[str]] = None, - header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, - load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, - load_balancing_cookie: Optional[str] = None, - load_balancing_consistent_hashing: bool = False, - rate_limit_connections_per_minute: Optional[int] = None, - rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, - upload_limit: Optional[int] = None, - download_limit: Optional[int] = None, - retry_count: Optional[int] = None, - retry_redispatch: bool = False, - deny_paths: Optional[list[str]] = None, - server_timeout: int = 60, - connect_timeout: int = 60, - queue_timeout: int = 60, - server_maxconn: Optional[int] = None, - unit_address: Optional[str] = None, - http_server_close: bool = False, - allow_http: bool = False, - external_grpc_port: Optional[int] = None, - ) -> None: - """Update haproxy-route requirements data in the relation. - - Args: - service: The name of the service to route traffic to. - ports: List of ports the service is listening on. - protocol: The protocol that the serive speaks, deafults to "http". - hosts: List of backend server addresses. Currently only support IP addresses. - paths: List of URL paths to route to this service. - hostname: Hostname of this service. - additional_hostnames: Additional hostnames of this service. - check_interval: Interval between health checks in seconds. - check_rise: Number of successful health checks before server is considered up. - check_fall: Number of failed health checks before server is considered down. - check_path: The path to use for server health checks. - check_port: The port to use for http-check. - path_rewrite_expressions: List of regex expressions for path rewrites. - query_rewrite_expressions: List of regex expressions for query rewrites. - header_rewrite_expressions: List of tuples containing header name - and rewrite expression. - load_balancing_algorithm: Algorithm to use for load balancing. - load_balancing_cookie: Cookie name to use when algorithm is set to cookie. - load_balancing_consistent_hashing: Whether to use consistent hashing. - rate_limit_connections_per_minute: Maximum connections allowed per minute. - rate_limit_policy: Policy to apply when rate limit is reached. - upload_limit: Maximum upload bandwidth in bytes per second. - download_limit: Maximum download bandwidth in bytes per second. - retry_count: Number of times to retry failed requests. - retry_redispatch: Whether to redispatch failed requests to another server. - deny_paths: List of paths that should not be routed to the backend. - server_timeout: Timeout for requests from haproxy to backend servers in seconds. - connect_timeout: Timeout for client requests to haproxy in seconds. - queue_timeout: Timeout for requests waiting in queue in seconds. - server_maxconn: Maximum connections per server. - unit_address: IP address of the unit (if not provided, will use binding address). - http_server_close: Configure server close after request. - allow_http: Whether to allow HTTP traffic in addition to HTTPS. - Warning: enabling HTTP is a security risk, - make sure you apply the necessary precautions. - external_grpc_port: Optional external gRPC port. - """ - self._unit_address = unit_address - self._application_data = self._generate_application_data( - service, - ports, - protocol, - hosts, - paths, - hostname, - additional_hostnames, - check_interval, - check_rise, - check_fall, - check_path, - check_port, - path_rewrite_expressions, - query_rewrite_expressions, - header_rewrite_expressions, - load_balancing_algorithm, - load_balancing_cookie, - load_balancing_consistent_hashing, - rate_limit_connections_per_minute, - rate_limit_policy, - upload_limit, - download_limit, - retry_count, - retry_redispatch, - deny_paths, - server_timeout, - connect_timeout, - queue_timeout, - server_maxconn, - http_server_close, - allow_http, - external_grpc_port, - ) - self.update_relation_data() - - # pylint: disable=too-many-arguments,too-many-positional-arguments,too-many-locals - def _generate_application_data( # noqa: C901 - self, - service: Optional[str] = None, - ports: Optional[list[int]] = None, - protocol: Literal["http", "https"] = "http", - hosts: Optional[list[IPvAnyAddress]] = None, - paths: Optional[list[str]] = None, - hostname: Optional[str] = None, - additional_hostnames: Optional[list[str]] = None, - check_interval: Optional[int] = None, - check_rise: Optional[int] = None, - check_fall: Optional[int] = None, - check_path: Optional[str] = None, - check_port: Optional[int] = None, - path_rewrite_expressions: Optional[list[str]] = None, - query_rewrite_expressions: Optional[list[str]] = None, - header_rewrite_expressions: Optional[list[tuple[str, str]]] = None, - load_balancing_algorithm: LoadBalancingAlgorithm = LoadBalancingAlgorithm.LEASTCONN, - load_balancing_cookie: Optional[str] = None, - load_balancing_consistent_hashing: bool = False, - rate_limit_connections_per_minute: Optional[int] = None, - rate_limit_policy: RateLimitPolicy = RateLimitPolicy.DENY, - upload_limit: Optional[int] = None, - download_limit: Optional[int] = None, - retry_count: Optional[int] = None, - retry_redispatch: bool = False, - deny_paths: Optional[list[str]] = None, - server_timeout: int = 60, - connect_timeout: int = 60, - queue_timeout: int = 60, - server_maxconn: Optional[int] = None, - http_server_close: bool = False, - allow_http: bool = False, - external_grpc_port: Optional[int] = None, - ) -> dict[str, Any]: - """Generate the complete application data structure. - - Args: - service: The name of the service to route traffic to. - ports: List of ports the service is listening on. - protocol: The protocol that the service speaks. - hosts: List of backend server addresses. Currently only support IP addresses. - paths: List of URL paths to route to this service. - hostname: Hostname of this service. - additional_hostnames: Additional hostnames of this service. - check_interval: Interval between health checks in seconds. - check_rise: Number of successful health checks before server is considered up. - check_fall: Number of failed health checks before server is considered down. - check_path: The path to use for server health checks. - check_port: The port to use for http-check. - path_rewrite_expressions: List of regex expressions for path rewrites. - query_rewrite_expressions: List of regex expressions for query rewrites. - header_rewrite_expressions: List of tuples containing header name and - rewrite expression. - load_balancing_algorithm: Algorithm to use for load balancing. - load_balancing_cookie: Cookie name to use when algorithm is set to cookie. - load_balancing_consistent_hashing: Whether to use consistent hashing. - rate_limit_connections_per_minute: Maximum connections allowed per minute. - rate_limit_policy: Policy to apply when rate limit is reached. - upload_limit: Maximum upload bandwidth in bytes per second. - download_limit: Maximum download bandwidth in bytes per second. - retry_count: Number of times to retry failed requests. - retry_redispatch: Whether to redispatch failed requests to another server. - deny_paths: List of paths that should not be routed to the backend. - server_timeout: Timeout for requests from haproxy to backend servers in seconds. - connect_timeout: Timeout for client requests to haproxy in seconds. - queue_timeout: Timeout for requests waiting in queue in seconds. - server_maxconn: Maximum connections per server. - http_server_close: Configure server close after request. - allow_http: Whether to allow HTTP traffic in addition to HTTPS. - Warning: enabling HTTP is a security risk, - make sure you apply the necessary precautions. - external_grpc_port: Optional external gRPC port. - - Returns: - dict: A dictionary containing the complete application data structure. - """ - # Apply default value to list parameters to avoid problems with mutable default args. - if not ports: - ports = [] - if not hosts: - hosts = [] - if not paths: - paths = [] - if not additional_hostnames: - additional_hostnames = [] - if not path_rewrite_expressions: - path_rewrite_expressions = [] - if not query_rewrite_expressions: - query_rewrite_expressions = [] - if not header_rewrite_expressions: - header_rewrite_expressions = [] - if not deny_paths: - deny_paths = [] - - application_data: dict[str, Any] = { - "service": service, - "ports": ports, - "protocol": protocol, - "hosts": hosts, - "paths": paths, - "hostname": hostname, - "additional_hostnames": additional_hostnames, - "load_balancing": { - "algorithm": load_balancing_algorithm, - "cookie": load_balancing_cookie, - "consistent_hashing": load_balancing_consistent_hashing, - }, - "timeout": { - "server": server_timeout, - "connect": connect_timeout, - "queue": queue_timeout, - }, - "bandwidth_limit": { - "download": download_limit, - "upload": upload_limit, - }, - "deny_paths": deny_paths, - "server_maxconn": server_maxconn, - "rewrites": self._generate_rewrite_configuration( - path_rewrite_expressions, - query_rewrite_expressions, - header_rewrite_expressions, - ), - "http_server_close": http_server_close, - "allow_http": allow_http, - "external_grpc_port": external_grpc_port, - } - - if allow_http: - logger.warning( - "HTTP traffic is allowed alongside HTTPS. " - "This is a security risk, make sure you apply the necessary precautions." - ) - - if check := self._generate_server_healthcheck_configuration( - check_interval, check_rise, check_fall, check_path, check_port - ): - application_data["check"] = check - - if rate_limit := self._generate_rate_limit_configuration( - rate_limit_connections_per_minute, rate_limit_policy - ): - application_data["rate_limit"] = rate_limit - - if retry := self._generate_retry_configuration(retry_count, retry_redispatch): - application_data["retry"] = retry - return application_data - - def _generate_server_healthcheck_configuration( - self, - interval: Optional[int], - rise: Optional[int], - fall: Optional[int], - path: Optional[str], - port: Optional[int], - ) -> dict[str, int | Optional[str]]: - """Generate configuration for server health checks. - - Args: - interval: Time between health checks in seconds. - rise: Number of successful checks before marking server as up. - fall: Number of failed checks before marking server as down. - path: The path to use for health checks. - port: The port to use for http-check. - - Returns: - dict[str, int | Optional[str]]: Health check configuration dictionary. - """ - server_healthcheck_configuration: dict[str, int | Optional[str]] = {} - if interval and rise and fall: - server_healthcheck_configuration = { - "interval": interval, - "rise": rise, - "fall": fall, - "path": path, - "port": port, - } - return server_healthcheck_configuration - - def _generate_rewrite_configuration( - self, - path_rewrite_expressions: list[str], - query_rewrite_expressions: list[str], - header_rewrite_expressions: list[tuple[str, str]], - ) -> list[dict[str, str | HaproxyRewriteMethod]]: - """Generate rewrite configuration from provided expressions. - - Args: - path_rewrite_expressions: List of path rewrite expressions. - query_rewrite_expressions: List of query rewrite expressions. - header_rewrite_expressions: List of header name and expression tuples. - - Returns: - list[dict[str, str]]: List of generated rewrite configurations. - """ - # rewrite configuration - rewrite_configurations: list[dict[str, str | HaproxyRewriteMethod]] = [] - for expression in path_rewrite_expressions: - rewrite_configurations.append( - {"method": HaproxyRewriteMethod.SET_PATH, "expression": expression} - ) - for expression in query_rewrite_expressions: - rewrite_configurations.append( - {"method": HaproxyRewriteMethod.SET_QUERY, "expression": expression} - ) - for header, expression in header_rewrite_expressions: - rewrite_configurations.append( - { - "method": HaproxyRewriteMethod.SET_HEADER, - "expression": expression, - "header": header, - } - ) - return rewrite_configurations - - def _generate_rate_limit_configuration( - self, rate_limit_connections_per_minute: Optional[int], rate_limit_policy: RateLimitPolicy - ) -> dict[str, Any]: - """Generate rate limit configuration. - - Args: - rate_limit_connections_per_minute: Maximum connections allowed per minute. - rate_limit_policy: Policy to apply when rate limit is reached. - - Returns: - dict[str, Any]: Rate limit configuration, or empty dict if no limits are set. - """ - rate_limit_configuration = {} - if rate_limit_connections_per_minute: - rate_limit_configuration = { - "connections_per_minute": rate_limit_connections_per_minute, - "policy": rate_limit_policy, - } - return rate_limit_configuration - - def _generate_retry_configuration( - self, count: Optional[int], redispatch: bool - ) -> dict[str, Any]: - """Generate retry configuration. - - Args: - count: Number of times to retry failed requests. - redispatch: Whether to redispatch failed requests to another server. - - Returns: - dict[str, Any]: Retry configuration dictionary, or empty dict if retry not configured. - """ - retry_configuration = {} - if count: - retry_configuration = { - "count": count, - "redispatch": redispatch, - } - return retry_configuration - - def update_relation_data(self) -> None: - """Update both application and unit data in the relation.""" - if not self._application_data.get("service") and not self._application_data.get("ports"): - logger.warning("Required field(s) are missing, skipping update of the relation data.") - return - - if relation := self.relation: - self._update_application_data(relation) - self._update_unit_data(relation) - - def _update_application_data(self, relation: Relation) -> None: - """Update application data in the relation databag. - - Args: - relation: The relation instance. - """ - if self.charm.unit.is_leader(): - application_data = self._prepare_application_data() - application_data.dump(relation.data[self.app], clear=True) - - def _update_unit_data(self, relation: Relation) -> None: - """Prepare and update the unit data in the relation databag. - - Args: - relation: The relation instance. - """ - unit_data = self._prepare_unit_data() - unit_data.dump(relation.data[self.charm.unit], clear=True) - - def _prepare_application_data(self) -> RequirerApplicationData: - """Prepare and validate the application data. - - Raises: - DataValidationError: When validation of application data fails. - - Returns: - RequirerApplicationData: The validated application data model. - """ - try: - return cast( - RequirerApplicationData, RequirerApplicationData.from_dict(self._application_data) - ) - except ValidationError as exc: - logger.error("Validation error when preparing requirer application data.") - raise DataValidationError( - "Validation error when preparing requirer application data." - ) from exc - - def _prepare_unit_data(self) -> RequirerUnitData: - """Prepare and validate unit data. - - Raises: - DataValidationError: When no address or unit IP is available. - - Returns: - RequirerUnitData: The validated unit data model. - """ - address = self._unit_address - if not address: - network_binding = self.charm.model.get_binding(self._relation_name) - if ( - network_binding is not None - and (bind_address := network_binding.network.bind_address) is not None - ): - address = str(bind_address) - else: - logger.error("No unit IP available.") - raise DataValidationError("No unit IP available.") - return RequirerUnitData(address=cast(IPvAnyAddress, address)) - - def get_proxied_endpoints(self) -> list[AnyHttpUrl]: - """The full ingress URL to reach the current unit. - - Returns: - The provider URL or None if the URL isn't available yet or is not valid. - """ - relation = self.relation - if not relation or not relation.app: - return [] - - # Fetch the provider's app databag - try: - databag = relation.data[relation.app] - except ModelError: - logger.exception("Error reading remote app data.") - return [] - - if not databag: # not ready yet - return [] - - try: - provider_data = cast( - HaproxyRouteProviderAppData, HaproxyRouteProviderAppData.load(databag) - ) - return provider_data.endpoints - except DataValidationError: - logger.exception("Invalid provider url.") - return [] diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index 9747e916d..7d728b5ed 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -153,7 +153,7 @@ def set_approved_backend_requests( try: app_data = HaproxyRoutePolicyProviderAppData(approved_requests=approved_requests) - relation.save(app_data, relation.app) + relation.save(app_data, self.charm.app) except ( ValidationError, RelationDataTypeError, @@ -200,7 +200,7 @@ def provide_haproxy_route_policy_requests( try: app_data = HaproxyRoutePolicyRequirerAppData(backend_requests=backend_requests) - relation.save(app_data, relation.app) + relation.save(app_data, self.charm.app) except ( ValidationError, RelationDataTypeError, diff --git a/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py b/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py index 74e7b4ebd..b534f46e2 100644 --- a/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py +++ b/haproxy-route-policy-operator/tests/integration/test_haproxy_route_policy_relation.py @@ -33,7 +33,7 @@ def test_haproxy_route_policy_relation( f"{application}:haproxy-route-policy", ) juju.run( - f"{any_charm_haproxy_route_policy_requirer}/0", + f"{any_charm_haproxy_route_policy_requirer}/leader", action="rpc", params={"method": "update_relation"}, ) From 1764981f2f7b790f37eb64318dc2fff35192983f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 21:12:30 +0200 Subject: [PATCH 150/201] remove haproxy-route from charm-libs --- haproxy-route-policy-operator/charmcraft.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 354037832..095818159 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -59,8 +59,6 @@ actions: charm-libs: - lib: data_platform_libs.data_interfaces version: "0" - - lib: haproxy.haproxy_route - version: "2" peers: haproxy-route-policy-peer: From f2ef663c9fa254d196a3122d2f470568138aedf7 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 21:27:34 +0200 Subject: [PATCH 151/201] run integration test for haproxy-route-policy --- .github/workflows/integration_test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 37a929c22..b9dccbfe4 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -34,7 +34,7 @@ jobs: modules: '["test_charm.py"]' - name: haproxy-route-policy-operator working-directory: ./haproxy-route-policy-operator - modules: '["test_charm.py"]' + modules: '["test_charm.py", "test_haproxy_route_policy_relation.py"]' name: Integration tests for ${{ matrix.charm.name }} uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@main secrets: inherit From 118ad05392f314f53c2587c2c36fd0e1fa6912b3 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 21:29:32 +0200 Subject: [PATCH 152/201] ruff fmt --- haproxy-route-policy-operator/src/state/policy.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index 8d613a383..36f61b2c4 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -143,9 +143,7 @@ def _get_django_admin_credentials( ) -def _get_django_secret_key( - charm: ops.CharmBase, peer_relation: ops.Relation -) -> dict[str, str]: +def _get_django_secret_key(charm: ops.CharmBase, peer_relation: ops.Relation) -> dict[str, str]: """Get the Django secret key from the charm's config. Returns: @@ -159,9 +157,7 @@ def _get_django_secret_key( return secret.get_content() except ops.SecretNotFoundError: if charm.unit.is_leader(): - django_secret_key_data = { - "secret-key": secrets.token_urlsafe(SECRET_LENGTH) - } + django_secret_key_data = {"secret-key": secrets.token_urlsafe(SECRET_LENGTH)} secret = charm.app.add_secret( label=DJANGO_SECRET_KEY_SECRET_LABEL, content=django_secret_key_data ) From f38ca8791c152c42e89fdfb80c4afb3f190c9df4 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 21:34:44 +0200 Subject: [PATCH 153/201] add change artifact --- docs/release-notes/artifacts/pr0451.yaml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0451.yaml diff --git a/docs/release-notes/artifacts/pr0451.yaml b/docs/release-notes/artifacts/pr0451.yaml new file mode 100644 index 000000000..bdc84c935 --- /dev/null +++ b/docs/release-notes/artifacts/pr0451.yaml @@ -0,0 +1,22 @@ +version_schema: 2 + +changes: + - title: Added haproxy-route-policy relation provider interface to route-policy operator + author: tphan025 + type: minor + description: > + Added a new `haproxy-route-policy` provided relation on + `haproxy-route-policy-operator`, including a new charm library for + requirer/provider databag schemas and validation of backend request data. + Updated the charm to handle relation lifecycle events, read incoming backend + requests from requirers, and publish approved backend requests back through + the relation (currently auto-approving received requests). Added unit tests + for the relation library models and integration tests/fixtures using + any-charm as a requirer to validate end-to-end relation behavior. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/451 + related_doc: + related_issue: + visibility: public + highlight: false From 65894159dcd1d68a0bc0b82f8527cee084d118e2 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 7 Apr 2026 21:37:01 +0200 Subject: [PATCH 154/201] update change artifacts --- docs/release-notes/artifacts/pr0451.yaml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/docs/release-notes/artifacts/pr0451.yaml b/docs/release-notes/artifacts/pr0451.yaml index bdc84c935..b0d43ee9e 100644 --- a/docs/release-notes/artifacts/pr0451.yaml +++ b/docs/release-notes/artifacts/pr0451.yaml @@ -7,12 +7,7 @@ changes: description: > Added a new `haproxy-route-policy` provided relation on `haproxy-route-policy-operator`, including a new charm library for - requirer/provider databag schemas and validation of backend request data. - Updated the charm to handle relation lifecycle events, read incoming backend - requests from requirers, and publish approved backend requests back through - the relation (currently auto-approving received requests). Added unit tests - for the relation library models and integration tests/fixtures using - any-charm as a requirer to validate end-to-end relation behavior. + requirer/provider data schemas and validation. Added basic logic and tests. urls: pr: - https://github.com/canonical/haproxy-operator/pull/451 From e59ef3c7bc412cbfb2bb1d292b7bee64c3970e43 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 10 Apr 2026 11:08:03 +0200 Subject: [PATCH 155/201] fix failing tests --- haproxy-route-policy-operator/src/charm.py | 2 +- .../tests/unit/test_charm.py | 16 ++++++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 049ad0346..1d481d3bf 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -89,12 +89,12 @@ def _reconcile(self, _: ops.EventBase) -> None: } ) + credentials = self._get_django_admin_credentials(peer_relation) if self.unit.is_leader(): self.unit.status = ops.MaintenanceStatus("[leader] running database migrations") run_migrations() self.unit.status = ops.MaintenanceStatus("[leader] updating Django admin user") - credentials = self._get_django_admin_credentials(peer_relation) if (username := credentials.get("username")) and ( password := credentials.get("password") ): diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index 2db0c4612..57c5672a0 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -29,6 +29,11 @@ def _database_relation() -> testing.Relation: ) +def _peer_relation() -> testing.PeerRelation: + """Build a peer relation.""" + return testing.PeerRelation("haproxy-route-policy-peer") + + def test_install_without_relation_sets_waiting_status(): """ arrange: create charm context without database relation. @@ -36,7 +41,7 @@ def test_install_without_relation_sets_waiting_status(): assert: snap install is invoked and unit waits for database relation data. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State() + state = testing.State(relations=[_peer_relation()]) with ( patch("charm.install_snap") as install_snap_mock, @@ -55,7 +60,8 @@ def test_config_changed_reconciles_snap_with_database_credentials(): """ ctx = testing.Context(HaproxyRoutePolicyCharm) state = testing.State( - relations=[_database_relation()], + leader=True, + relations=[_database_relation(), _peer_relation()], secrets=[ testing.Secret( label=DJANGO_SECRET_KEY_SECRET_LABEL, tracked_content={"secret-key": "test"} @@ -115,7 +121,7 @@ def test_config_changed_missing_secrets(secrets): assert: unit in waiting status. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_database_relation()], secrets=secrets) + state = testing.State(relations=[_database_relation(), _peer_relation()], secrets=secrets) with ( patch("charm.install_snap"), @@ -136,7 +142,9 @@ def test_config_changed_leader_create_secrets(): assert: secrets are created. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_database_relation()], secrets=[], leader=True) + state = testing.State( + relations=[_database_relation(), _peer_relation()], secrets=[], leader=True + ) with ( patch("charm.install_snap"), From abe5c3df7fb926b6cc26c31e5b518d609357ce3b Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 10 Apr 2026 11:50:45 +0200 Subject: [PATCH 156/201] explicitly hint types, update uv lock, update tests --- haproxy-route-policy-operator/tests/unit/test_charm.py | 4 ++-- .../tests/unit/test_haproxy_route_policy_information.py | 2 +- haproxy-route-policy-operator/uv.lock | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index 4faf66867..1949eae8e 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -43,7 +43,7 @@ def test_install_without_relation_sets_waiting_status(): assert: snap install is invoked and unit waits for database relation data. """ ctx = testing.Context(HaproxyRoutePolicyCharm) - state = testing.State(relations=[_peer_relation()]) + state = testing.State() with ( patch("charm.install_snap") as install_snap_mock, @@ -145,7 +145,7 @@ def test_config_changed_missing_secrets(secrets): out = ctx.run(ctx.on.config_changed(), state) assert out.unit_status == testing.WaitingStatus( - "Waiting for leader to set shared configuration." + "Waiting for complete shared configuration from leader." ) diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py index 1d2928d74..7981ec28d 100644 --- a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py @@ -83,7 +83,7 @@ def test_haproxy_route_policy_information_init_rejects_none_string_fields( act: initialize HaproxyRoutePolicyInformation. assert: pydantic validation error is raised. """ - payload = { + payload: dict[str, Any] = { "allowed_hosts": ["example.com"], "admin_username": "admin", # Ignore bandit warning as this is for testing. diff --git a/haproxy-route-policy-operator/uv.lock b/haproxy-route-policy-operator/uv.lock index 4f8d2f929..e1d551647 100644 --- a/haproxy-route-policy-operator/uv.lock +++ b/haproxy-route-policy-operator/uv.lock @@ -504,6 +504,7 @@ requires-dist = [ { name = "ops", specifier = "==3.7.0" }, { name = "pydantic", specifier = ">=2.12.5" }, { name = "requests", specifier = "==2.33.1" }, + { name = "validators", specifier = ">=0.35.0" }, ] [package.metadata.requires-dev] From ee1b6d11ab8e4fa6f5395767c828768861917cef Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 10 Apr 2026 11:52:38 +0200 Subject: [PATCH 157/201] remove docstring --- .../tests/integration/haproxy_route_policy_requirer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py b/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py index 2984fe23d..46a680b43 100644 --- a/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py +++ b/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py @@ -23,7 +23,6 @@ class AnyCharm(AnyCharmBase): """haproxy-route requirer charm.""" def __init__(self, *args, **kwargs): - # We don't need to include *args and *kwargs in the docstring here. """Initialize the requirer charm.""" super().__init__(*args, **kwargs) self._haproxy_route_policy = HaproxyRoutePolicyRequirer( From 1b83c061500d9d310c3473f0e002a2801b3ce18e Mon Sep 17 00:00:00 2001 From: tphan025 Date: Fri, 10 Apr 2026 11:54:15 +0200 Subject: [PATCH 158/201] update lib patch version --- .../lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index 7d728b5ed..c752f1811 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -40,7 +40,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 1 +LIBPATCH = 2 def valid_domain_with_wildcard(value: str) -> str: From 9dce4680a05e9ae0966160c4a757527ab844f721 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 10:34:20 +0200 Subject: [PATCH 159/201] update business logic --- haproxy-operator/charmcraft.yaml | 2 + .../v0/haproxy_route_policy.py | 213 ++++++++++++++++++ haproxy-operator/src/charm.py | 7 +- haproxy-operator/src/state/haproxy_route.py | 54 ++++- 4 files changed, 273 insertions(+), 3 deletions(-) create mode 100644 haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py diff --git a/haproxy-operator/charmcraft.yaml b/haproxy-operator/charmcraft.yaml index d20258112..138a5384c 100644 --- a/haproxy-operator/charmcraft.yaml +++ b/haproxy-operator/charmcraft.yaml @@ -142,3 +142,5 @@ charm-libs: version: "1" - lib: haproxy.ddos_protection version: "0" +- lib: haproxy_route_policy.haproxy_route_policy + version: "0" \ No newline at end of file diff --git a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py new file mode 100644 index 000000000..c752f1811 --- /dev/null +++ b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -0,0 +1,213 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""haproxy-route-policy interface library. + +This interface is used between the HAProxy charm (requirer) and the +haproxy-route-policy charm (provider). + +The requirer publishes route policy requests under ``requests`` as a list of +HAProxy backend objects. The provider publishes approved entries under +``approved_backends`` and additionally exposes ``policy_backend_port`` and +provider unit addresses for policy web UI routing. +""" + +import logging +from typing import Annotated + +from ops import CharmBase +from ops.framework import Object +from ops.model import ( + Relation, + RelationDataAccessError, + RelationDataTypeError, + RelationNotFoundError, +) +from pydantic import ( + BeforeValidator, + Field, + ValidationError, + model_validator, +) +from pydantic.dataclasses import dataclass +from validators import domain + +# The unique Charmhub library identifier, never change it +LIBID = "24c99d77895e481d8661288f95884ee4" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 2 + + +def valid_domain_with_wildcard(value: str) -> str: + """Validate if value is a valid domain that can include a wildcard. + + The wildcard character (*) can't be at the TLD level, for example *.com is not valid. + This is supported natively by the library ( e.g domain("com") will raise a ValidationError ). + + Raises: + ValueError: When value is not a valid domain. + + Args: + value: The value to validate. + """ + fqdn = value[2:] if value.startswith("*.") else value + if not bool(domain(fqdn)): + raise ValueError(f"Invalid domain: {value}") + return value + + +logger = logging.getLogger(__name__) +HAPROXY_ROUTE_POLICY_RELATION_NAME = "haproxy-route-policy" + + +class HaproxyRoutePolicyInvalidRelationDataError(Exception): + """Raised when relation data validation for haproxy-route-policy fails.""" + + +@dataclass +class HaproxyRoutePolicyBackendRequest: + """Data model representing a single backend request from the requirer. + + Attributes: + relation_id: The relation ID of the request. + backend_name: The name of the HAProxy backend. + hostname_acls: List of hostname ACLs for the backend. + paths: List of paths for the backend. + port: Port number for the backend. + """ + + relation_id: int = Field(description="Relation ID of the backend request.") + backend_name: str = Field(description="Name of the HAProxy backend.") + hostname_acls: list[Annotated[str, BeforeValidator(valid_domain_with_wildcard)]] = Field( + description="List of hostname ACLs for the backend." + ) + paths: list[str] = Field(description="List of paths for the backend.") + port: int = Field(gt=0, le=65535, description="Port number for the backend.") + + +@dataclass +class HaproxyRoutePolicyRequirerAppData: + """Data model representing the requirer application data for haproxy-route-policy. + + Attributes: + backend_requests: List of backend requests to be evaluated by the policy service. + """ + + backend_requests: list[HaproxyRoutePolicyBackendRequest] = Field( + description="List of backends to be evaluated by the policy service." + ) + + @model_validator(mode="after") + def validate_unique_backend_names(self): + """Ensure that backend names are unique across all requests.""" + backend_names = [request.backend_name for request in self.backend_requests] + if len(backend_names) != len(set(backend_names)): + raise ValueError("Backend names must be unique across all requests.") + return self + + +@dataclass +class HaproxyRoutePolicyProviderAppData: + """haproxy-route-policy provider app databag schema.""" + + approved_requests: list[HaproxyRoutePolicyBackendRequest] = Field( + description="List of approved backend requests." + ) + + +class HaproxyRoutePolicyProvider(Object): + """haproxy-route-policy provider implementation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str = HAPROXY_ROUTE_POLICY_RELATION_NAME, + ) -> None: + """Initialize provider helper. + + Args: + charm: The charm instance using this helper. + relation_name: Name of the relation endpoint. + """ + super().__init__(charm, relation_name) + self.charm = charm + self.relation_name = relation_name + + @property + def relation(self) -> Relation | None: + """Return the first relation for this endpoint, if any.""" + return self.charm.model.get_relation(self.relation_name) + + def set_approved_backend_requests( + self, approved_requests: list[HaproxyRoutePolicyBackendRequest] + ) -> None: + """Set and publish approved backend requests.""" + relation = self.relation + if not relation or not self.charm.unit.is_leader(): + return + + try: + app_data = HaproxyRoutePolicyProviderAppData(approved_requests=approved_requests) + relation.save(app_data, self.charm.app) + except ( + ValidationError, + RelationDataTypeError, + RelationDataAccessError, + RelationNotFoundError, + ) as exc: + logger.error("Validation error when preparing provider relation data.") + raise HaproxyRoutePolicyInvalidRelationDataError( + "Validation error when preparing provider relation data." + ) from exc + + +class HaproxyRoutePolicyRequirer(Object): + """haproxy-route-policy requirer implementation.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str = HAPROXY_ROUTE_POLICY_RELATION_NAME, + ) -> None: + """Initialize requirer helper. + + Args: + charm: The charm instance using this helper. + relation_name: Name of the relation endpoint. + requests: Optional initial request backend list to publish. + """ + super().__init__(charm, relation_name) + self.charm = charm + self._relation_name = relation_name + + @property + def relation(self) -> Relation | None: + """Return the first relation for this endpoint, if any.""" + return self.charm.model.get_relation(self._relation_name) + + def provide_haproxy_route_policy_requests( + self, backend_requests: list[HaproxyRoutePolicyBackendRequest] + ) -> None: + """Set and publish route policy requests.""" + relation = self.relation + if not relation or not self.charm.unit.is_leader(): + return + + try: + app_data = HaproxyRoutePolicyRequirerAppData(backend_requests=backend_requests) + relation.save(app_data, self.charm.app) + except ( + ValidationError, + RelationDataTypeError, + RelationDataAccessError, + RelationNotFoundError, + ) as exc: + logger.error("Validation error when preparing requirer relation data.") + raise HaproxyRoutePolicyInvalidRelationDataError( + "Validation error when preparing requirer relation data." + ) from exc diff --git a/haproxy-operator/src/charm.py b/haproxy-operator/src/charm.py index 4142529f0..245b2bb6b 100755 --- a/haproxy-operator/src/charm.py +++ b/haproxy-operator/src/charm.py @@ -25,6 +25,7 @@ from charms.haproxy.v0.spoe_auth import SpoeAuthRequirer from charms.haproxy.v1.haproxy_route_tcp import HaproxyRouteTcpProvider from charms.haproxy.v2.haproxy_route import HaproxyRouteProvider +from charms.haproxy_route_policy.v0.haproxy_route_policy import HaproxyRoutePolicyRequirer from charms.tls_certificates_interface.v4.tls_certificates import ( CertificateAvailableEvent, CertificateRequestAttributes, @@ -78,6 +79,7 @@ RECV_CA_CERTS_RELATION = "receive-ca-certs" SPOE_AUTH_RELATION = "spoe-auth" HAPROXY_ROUTE_TCP_RELATION = "haproxy-route-tcp" +HAPROXY_ROUTE_POLICY_RELATION_NAME = "haproxy-route-policy" class HaproxyUnitAddressNotAvailableError(CharmStateValidationBaseError): @@ -124,7 +126,9 @@ def __init__(self, *args: typing.Any): self.haproxy_route_tcp_provider = HaproxyRouteTcpProvider(self) self.spoe_auth_requirer = SpoeAuthRequirer(self, SPOE_AUTH_RELATION) self.ddos_requirer = DDoSProtectionRequirer(self) - + self.haproxy_route_policy = HaproxyRoutePolicyRequirer( + self, HAPROXY_ROUTE_POLICY_RELATION_NAME + ) self.recv_ca_certs = CertificateTransferRequires(self, RECV_CA_CERTS_RELATION) self.certificates = TLSCertificatesRequiresV4( charm=self, @@ -359,6 +363,7 @@ def _configure_haproxy_route( haproxy_route_requirers_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=self.haproxy_route_provider, haproxy_route_tcp=self.haproxy_route_tcp_provider, + haproxy_route_policy=self.haproxy_route_policy, external_hostname=typing.cast( typing.Optional[str], self.model.config.get("external-hostname") ), diff --git a/haproxy-operator/src/state/haproxy_route.py b/haproxy-operator/src/state/haproxy_route.py index 964f842d6..6cd69eeda 100644 --- a/haproxy-operator/src/state/haproxy_route.py +++ b/haproxy-operator/src/state/haproxy_route.py @@ -23,6 +23,11 @@ RequirerApplicationData, ServerHealthCheck, ) +from charms.haproxy_route_policy.v0.haproxy_route_policy import ( + HaproxyRoutePolicyBackendRequest, + HaproxyRoutePolicyProviderAppData, + HaproxyRoutePolicyRequirer, +) from pydantic import Field, IPvAnyAddress, model_validator from pydantic.dataclasses import dataclass from typing_extensions import Self @@ -294,6 +299,28 @@ class HaproxyRouteRequirersInformation: relation_ids_with_invalid_data_tcp: set[int] ports_with_conflicts: set[int] tcp_frontends: list[HAProxyRouteTcpFrontend] = Field(strict=False) + # This is used to transform haproxy-route requirers to backend requests for the policy charm. + valid_haproxy_route_requirers: list[HaproxyRouteRequirerData] + + @property + def backend_requests_for_policy(self) -> list[HaproxyRoutePolicyBackendRequest]: + """Transform the requirer data into backend requests for the policy charm. + + Returns: + list[HaproxyRoutePolicyBackendRequest]: The backend requests for the policy charm. + """ + backend_requests: list[HaproxyRoutePolicyBackendRequest] = [] + for backend in self.backends: + backend_requests.append( + HaproxyRoutePolicyBackendRequest( + relation_id=backend.relation_id, + backend_name=backend.backend_name, + hostname_acls=list(backend.hostname_acls), + paths=backend.application_data.paths, + port=80 if backend.application_data.allow_http else 443, + ) + ) + return backend_requests @classmethod def from_provider( # pylint: disable=too-many-arguments @@ -301,6 +328,7 @@ def from_provider( # pylint: disable=too-many-arguments *, haproxy_route: HaproxyRouteProvider, haproxy_route_tcp: HaproxyRouteTcpProvider, + haproxy_route_policy: HaproxyRoutePolicyRequirer, external_hostname: Optional[str], peers: list[str], ca_certs_configured: bool, @@ -310,6 +338,7 @@ def from_provider( # pylint: disable=too-many-arguments Args: haproxy_route: The haproxy-route provider class. haproxy_route_tcp: The haproxy-route-tcp provider class. + haproxy_route_policy: The haproxy-route-policy requirer class. external_hostname: The charm's configured hostname. peers: List of IP address of haproxy peer units. ca_certs_configured: If ca certificates are configured for haproxy backends. @@ -322,15 +351,35 @@ def from_provider( # pylint: disable=too-many-arguments for the haproxy-route interface. """ try: + # Fetch approved requests from the policy charm and cross-reference with requirers data from haproxy-route + approved_requirers = [] + requirers = haproxy_route.get_data(haproxy_route.relations) + if relation := haproxy_route_policy.relation: + approved_requests = relation.load( + HaproxyRoutePolicyProviderAppData, relation.app + ).approved_requests + approved_backend_names = {request.backend_name for request in approved_requests} + approved_requirers = [ + requirer + for requirer in requirers.requirers_data + if requirer.application_data.service in approved_backend_names + ] + # This is used to check that requirers don't ask for the same backend name. backend_names: set[str] = set() # Control stick tables for rate_limiting and # eventually any shared values between haproxy units. stick_table_entries: list[str] = [] - requirers = haproxy_route.get_data(haproxy_route.relations) backends: list[HAProxyRouteBackend] = [] relation_ids_with_invalid_data = requirers.relation_ids_with_invalid_data - for requirer in requirers.requirers_data: + + # If there is a policy relation, only process the approved requirers. Otherwise, process all requirers. + requirers_to_process = ( + approved_requirers + if haproxy_route_policy.relation is not None + else requirers.requirers_data + ) + for requirer in requirers_to_process: # Duplicate backend names check is done in the library's `get_data` method backend_names.add(requirer.application_data.service) @@ -383,6 +432,7 @@ def from_provider( # pylint: disable=too-many-arguments relation_ids_with_invalid_data_tcp=relation_ids_with_invalid_data_tcp, tcp_frontends=tcp_frontends, ports_with_conflicts=set[int](), + valid_haproxy_route_requirers=requirers.requirers_data, ) except DataValidationError as exc: # This exception is only raised if the provider has "raise_on_validation_error" set From d950a5ea7c7483fb17b008fc31fd6ab64f43f1d5 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 11:01:57 +0200 Subject: [PATCH 160/201] update logic to send haproxy route policy data to provider --- haproxy-operator/src/charm.py | 5 ++++ haproxy-operator/src/state/haproxy_route.py | 27 ++++++++++++++------- 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/haproxy-operator/src/charm.py b/haproxy-operator/src/charm.py index 245b2bb6b..e4ee1d108 100755 --- a/haproxy-operator/src/charm.py +++ b/haproxy-operator/src/charm.py @@ -408,6 +408,9 @@ def _configure_haproxy_route( ), ) if self.unit.is_leader(): + self.haproxy_route_policy.provide_haproxy_route_policy_requests( + haproxy_route_requirers_information.backend_requests_for_policy + ) self._publish_haproxy_route_proxied_endpoints(haproxy_route_requirers_information) self._publish_haproxy_route_tcp_proxied_endpoints( haproxy_route_requirers_information, ha_information @@ -437,6 +440,7 @@ def _get_certificate_requests(self) -> typing.List[CertificateRequestAttributes] HaproxyRouteRequirersInformation.from_provider( haproxy_route=self.haproxy_route_provider, haproxy_route_tcp=self.haproxy_route_tcp_provider, + haproxy_route_policy=self.haproxy_route_policy, external_hostname=external_hostname, peers=self._get_peer_units_address(), ca_certs_configured=bool(self.recv_ca_certs.get_all_certificates()), @@ -627,6 +631,7 @@ def _on_get_proxied_endpoints_action(self, event: ActionEvent) -> None: haproxy_route_requirers_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=self.haproxy_route_provider, haproxy_route_tcp=self.haproxy_route_tcp_provider, + haproxy_route_policy=self.haproxy_route_policy, external_hostname=typing.cast("str | None", self.config.get("external-hostname")), peers=self._get_peer_units_address(), ca_certs_configured=bool(self.recv_ca_certs.get_all_certificates()), diff --git a/haproxy-operator/src/state/haproxy_route.py b/haproxy-operator/src/state/haproxy_route.py index 6cd69eeda..a3f6c7176 100644 --- a/haproxy-operator/src/state/haproxy_route.py +++ b/haproxy-operator/src/state/haproxy_route.py @@ -28,7 +28,7 @@ HaproxyRoutePolicyProviderAppData, HaproxyRoutePolicyRequirer, ) -from pydantic import Field, IPvAnyAddress, model_validator +from pydantic import Field, IPvAnyAddress, ValidationError, model_validator from pydantic.dataclasses import dataclass from typing_extensions import Self @@ -311,15 +311,24 @@ def backend_requests_for_policy(self) -> list[HaproxyRoutePolicyBackendRequest]: """ backend_requests: list[HaproxyRoutePolicyBackendRequest] = [] for backend in self.backends: - backend_requests.append( - HaproxyRoutePolicyBackendRequest( - relation_id=backend.relation_id, - backend_name=backend.backend_name, - hostname_acls=list(backend.hostname_acls), - paths=backend.application_data.paths, - port=80 if backend.application_data.allow_http else 443, + try: + port = backend.application_data.external_grpc_port or ( + 80 if backend.application_data.allow_http else 443 ) - ) + backend_requests.append( + HaproxyRoutePolicyBackendRequest( + relation_id=backend.relation_id, + backend_name=backend.backend_name, + hostname_acls=list(backend.hostname_acls), + paths=backend.application_data.paths, + port=port, + ) + ) + except ValidationError as exc: + logger.error( + "Validation error for backend %s, skipping: %s", backend.backend_name, exc + ) + continue return backend_requests @classmethod From 6b8589e014819eeeb7cc6034d8b834823d081be2 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 11:05:16 +0200 Subject: [PATCH 161/201] update password length checks --- haproxy-route-policy-operator/tests/integration/test_charm.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/haproxy-route-policy-operator/tests/integration/test_charm.py b/haproxy-route-policy-operator/tests/integration/test_charm.py index bb0f73ec2..4df9ee613 100644 --- a/haproxy-route-policy-operator/tests/integration/test_charm.py +++ b/haproxy-route-policy-operator/tests/integration/test_charm.py @@ -30,4 +30,5 @@ def test_charm_becomes_active_after_relation_with_postgresql( result = juju.run(f"{application}/0", "get-admin-credentials") assert result.results["username"] == "admin" - assert len(result.results["password"]) == 16 + # secrets.token_urlsafe(32) generates a string of length 43. + assert len(result.results["password"]) == 43 From a2e4462efde877d138a1667f144beae9da66ce1a Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 13:58:58 +0200 Subject: [PATCH 162/201] don't use self-hosted runner for unit tests --- .github/workflows/test.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 7be052a6c..1d21f2bb9 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -22,8 +22,7 @@ jobs: uses: canonical/operator-workflows/.github/workflows/test.yaml@main secrets: inherit with: - self-hosted-runner: true - self-hosted-runner-image: "noble" + self-hosted-runner: false working-directory: ${{ matrix.charm.working-directory }} with-uv: true build-snap-haproxy-route-policy: From db91d9cfcec76a9354a29e813bcfbc6e67a6d40d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 14:10:29 +0200 Subject: [PATCH 163/201] update unit test wf --- .github/workflows/test.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 1d21f2bb9..dd12f55bc 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -19,9 +19,11 @@ jobs: - name: haproxy-route-policy-operator working-directory: ./haproxy-route-policy-operator name: Unit tests for ${{ matrix.charm.name }} - uses: canonical/operator-workflows/.github/workflows/test.yaml@main + uses: canonical/operator-workflows/.github/workflows/test.yaml@parametrize-runs-on-for-unit-tests secrets: inherit with: + runs-on-base: ubuntu@24.04 + python-version: "3.12" self-hosted-runner: false working-directory: ${{ matrix.charm.working-directory }} with-uv: true From 0060932a047cf61b45608a76c782eefd4f50f790 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 14:14:54 +0200 Subject: [PATCH 164/201] update runs-on tag --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index dd12f55bc..f77e573f7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -22,7 +22,7 @@ jobs: uses: canonical/operator-workflows/.github/workflows/test.yaml@parametrize-runs-on-for-unit-tests secrets: inherit with: - runs-on-base: ubuntu@24.04 + runs-on-base: ubuntu-24.04 python-version: "3.12" self-hosted-runner: false working-directory: ${{ matrix.charm.working-directory }} From e516c115e3f14b1523b337708b7c6e6b67b04b1d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 14:18:50 +0200 Subject: [PATCH 165/201] rename --- haproxy-route-policy-operator/charmcraft.yaml | 3 +-- haproxy-route-policy-operator/src/state/policy.py | 7 +++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 97c4cc1cc..8e5eec3ef 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -61,9 +61,8 @@ peers: config: options: - allowed-hosts: + extra-allowed-hosts: type: string description: A comma-separated list of host/domain names that the dns-policy-app API can serve. This configuration will set the DJANGO_ALLOWED_HOSTS environment variable with its content being a JSON encoded list. - default: "0.0.0.0" \ No newline at end of file diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index 36f61b2c4..4b5a24be6 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -19,6 +19,7 @@ DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec PEER_RELATION_NAME = "haproxy-route-policy-peer" SECRET_LENGTH = 32 +DEFAULT_ALLOWED_HOSTS = ["localhost"] class DjangoSecretKeyMissingError(Exception): @@ -74,7 +75,9 @@ class HaproxyRoutePolicyInformation: def allowed_hosts_snap_configuration(self) -> dict[str, str]: """Return snap configuration keys and values.""" return { - "allowed-hosts": json.dumps([str(host) for host in self.allowed_hosts]), + "allowed-hosts": json.dumps( + DEFAULT_ALLOWED_HOSTS + [str(host) for host in self.allowed_hosts] + ), } @classmethod @@ -96,7 +99,7 @@ def from_charm(cls, charm: ops.CharmBase) -> "HaproxyRoutePolicyInformation": cast(IPvAnyAddress | FQDN, address) for address in cast(str, charm.config.get("allowed-hosts")).split(",") ] - if charm.config.get("allowed-hosts") + if charm.config.get("extra-allowed-hosts") else [] ) credentials = _get_django_admin_credentials(charm, peer_relation) From 230830f9f950d0169b8eaca08026b966a2b045c1 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 15:48:45 +0200 Subject: [PATCH 166/201] update unit tests --- .../tests/unit/test_haproxy_route_policy_information.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py index 7981ec28d..979bfa44d 100644 --- a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py @@ -99,10 +99,10 @@ def test_haproxy_route_policy_information_init_rejects_none_string_fields( @pytest.mark.parametrize( "allowed_hosts, expected", [ - pytest.param([], {"allowed-hosts": "[]"}, id="empty"), + pytest.param([], {"allowed-hosts": '["localhost"]'}, id="empty"), pytest.param( ["example.com", "api.example.com"], - {"allowed-hosts": '["example.com", "api.example.com"]'}, + {"allowed-hosts": '["localhost", "example.com", "api.example.com"]'}, id="multiple-fqdn", ), ], From c36f0637e0e3246f3bdaa372589c95746115b711 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 18:30:08 +0200 Subject: [PATCH 167/201] query the API to refresh backend requests --- haproxy-route-policy-operator/src/charm.py | 32 +++- haproxy-route-policy-operator/src/policy.py | 177 ++++++++++++++++++++ 2 files changed, 206 insertions(+), 3 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 96c80d200..8ca81ce04 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -20,6 +20,8 @@ from pydantic import ValidationError from policy import ( + HaproxyRoutePolicyAPIError, + HaproxyRoutePolicyClient, HaproxyRoutePolicyDatabaseMigrationError, configure_snap, create_or_update_user, @@ -119,11 +121,31 @@ def _reconcile(self, _: ops.EventBase) -> None: self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) if relation := self.haproxy_route_policy.relation: - requests = relation.load( + backend_requests = relation.load( HaproxyRoutePolicyRequirerAppData, relation.app ).backend_requests - logger.info(f"backend requests {requests}, auto approved.") - self.haproxy_route_policy.set_approved_backend_requests(requests) + + client = HaproxyRoutePolicyClient( + username=haproxy_route_policy_information.admin_username, + password=haproxy_route_policy_information.admin_password, + ) + + self.unit.status = ops.MaintenanceStatus( + "evaluating backend requests via policy service" + ) + evaluated = client.refresh(backend_requests) + + approved = [ + req + for req, ev in zip(backend_requests, evaluated, strict=True) + if ev.status == "accepted" + ] + logger.info( + "backend requests evaluated: %d total, %d approved", + len(evaluated), + len(approved), + ) + self.haproxy_route_policy.set_approved_backend_requests(approved) except DatabaseRelationMissingError: self.unit.status = ops.BlockedStatus("Missing database relation.") @@ -150,6 +172,10 @@ def _reconcile(self, _: ops.EventBase) -> None: logger.exception("Failed to reconcile haproxy-route-policy service") self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") return + except HaproxyRoutePolicyAPIError as exc: + logger.exception("Policy service API error") + self.unit.status = ops.BlockedStatus(f"policy service error: {exc.message}") + return except ValidationError: logger.exception("Invalid haproxy-route-policy relation data") self.unit.status = ops.WaitingStatus( diff --git a/haproxy-route-policy-operator/src/policy.py b/haproxy-route-policy-operator/src/policy.py index 30b4bfe89..5b254429d 100644 --- a/haproxy-route-policy-operator/src/policy.py +++ b/haproxy-route-policy-operator/src/policy.py @@ -10,9 +10,15 @@ import subprocess # nosec from typing import Any +import requests as http_requests from charmlibs import snap +from charms.haproxy_route_policy.v0.haproxy_route_policy import ( + HaproxyRoutePolicyBackendRequest, +) +from pydantic.dataclasses import dataclass SNAP_NAME = "haproxy-route-policy" +DEFAULT_ENDPOINT = "http://localhost:8080" logger = logging.getLogger(__name__) @@ -79,3 +85,174 @@ def create_or_update_user(username: str, password: str) -> None: ) except subprocess.CalledProcessError as e: raise RuntimeError(f"failed to create/update Django user: {e.stdout}") from e + + +@dataclass(frozen=True) +class EvaluatedBackendRequest: + """A backend request returned by the policy service with its evaluation status. + + Attributes: + id: UUID of the request assigned by the policy service. + relation_id: Juju relation ID the request originated from. + backend_name: HAProxy backend name. + hostname_acls: Hostnames requested for routing. + paths: URL paths requested for routing. + port: Frontend port for HAProxy. + status: Evaluation status (pending, accepted, rejected). + """ + + id: str + relation_id: int + backend_name: str + hostname_acls: list[str] + paths: list[str] + port: int + status: str + + +class HaproxyRoutePolicyAPIError(Exception): + """Raised when the haproxy-route-policy API returns an error. + + Attributes: + status_code: HTTP status code. + message: Error message from the API. + """ + + def __init__(self, status_code: int, message: str) -> None: + self.status_code = status_code + self.message = message + super().__init__(f"API error {status_code}: {message}") + + +class HaproxyRoutePolicyClient: + """Client for the haproxy-route-policy Django REST API. + + Communicates with the policy service exposed by the haproxy-route-policy + snap to create, evaluate and manage backend requests. + """ + + def __init__( + self, + username: str, + password: str, + endpoint: str = DEFAULT_ENDPOINT, + ) -> None: + """Initialize the client. + + Args: + username: Django admin username for basic auth. + password: Django admin password for basic auth. + endpoint: Base URL of the policy service. + """ + self._endpoint = endpoint.rstrip("/") + self._auth = (username, password) + + # ------------------------------------------------------------------ + # Backend requests + # ------------------------------------------------------------------ + + def refresh( + self, + backend_requests: list[HaproxyRoutePolicyBackendRequest], + ) -> list[EvaluatedBackendRequest]: + """Submit backend requests to the policy service for evaluation. + + Existing requests with the same ``backend_name`` are updated; + new ones are created. The policy service evaluates every request + against its rule set and returns the current status. + + Args: + backend_requests: list of backend requests from the requirer. + + Returns: + List of evaluated backend requests with their status. + """ + payload = [ + { + "relation_id": req.relation_id, + "backend_name": req.backend_name, + "hostname_acls": list(req.hostname_acls), + "paths": list(req.paths), + "port": req.port, + } + for req in backend_requests + ] + response = http_requests.post( + f"{self._endpoint}/api/v1/requests", + json=payload, + auth=self._auth, + timeout=10, + ) + self._raise_for_error(response) + return [EvaluatedBackendRequest(**item) for item in response.json()] + + def list_requests(self, status: str | None = None) -> list[EvaluatedBackendRequest]: + """List backend requests, optionally filtered by status. + + Args: + status: Optional status filter (pending, accepted, rejected). + + Returns: + List of evaluated backend requests. + """ + params: dict[str, str] = {} + if status: + params["status"] = status + response = http_requests.get( + f"{self._endpoint}/api/v1/requests", + params=params, + auth=self._auth, + timeout=10, + ) + self._raise_for_error(response) + return [EvaluatedBackendRequest(**item) for item in response.json()] + + def get_request(self, request_id: str) -> EvaluatedBackendRequest: + """Get a single backend request by ID. + + Args: + request_id: UUID of the request. + + Returns: + The evaluated backend request. + """ + response = http_requests.get( + f"{self._endpoint}/api/v1/requests/{request_id}", + auth=self._auth, + timeout=10, + ) + self._raise_for_error(response) + return EvaluatedBackendRequest(**response.json()) + + def delete_request(self, request_id: str) -> None: + """Delete a backend request. + + Args: + request_id: UUID of the request to delete. + """ + response = http_requests.delete( + f"{self._endpoint}/api/v1/requests/{request_id}", + auth=self._auth, + timeout=10, + ) + self._raise_for_error(response) + + # ------------------------------------------------------------------ + # Helpers + # ------------------------------------------------------------------ + + def _raise_for_error(self, response: http_requests.Response) -> None: + """Raise :class:`HaproxyRoutePolicyAPIError` on non-2xx responses. + + Args: + response: The HTTP response to check. + + Raises: + HaproxyRoutePolicyAPIError: If the response status is not 2xx. + """ + try: + response.raise_for_status() + except http_requests.exceptions.HTTPError as exc: + raise HaproxyRoutePolicyAPIError( + status_code=response.status_code, message=response.text + ) from exc From ad2d2e8389e9feadcd5182f3ba8c7b6b0c805e7a Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 13 Apr 2026 19:04:37 +0200 Subject: [PATCH 168/201] remove merge errors --- haproxy-route-policy-operator/src/charm.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 852cea081..efc9dff32 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -6,7 +6,6 @@ """haproxy-route-policy-operator charm.""" import logging -import secrets from typing import Any import ops @@ -44,16 +43,6 @@ HAPROXY_ROUTE_POLICY_PORT = 8080 # Ignore bandit warnings here as these are labels DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" # nosec -DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec -PEER_RELATION_NAME = "haproxy-route-policy-peer" - - -class DjangoSecretKeyMissingError(Exception): - """Raised when the Django secret key is not generated by the leader unit.""" - - -class DjangoAdminCredentialsMissingError(Exception): - """Raised when the Django admin credentials are not generated by the leader unit.""" class HaproxyRoutePolicyCharm(ops.CharmBase): @@ -99,7 +88,6 @@ def _reconcile(self, _: ops.EventBase) -> None: } ) - credentials = self._get_django_admin_credentials(peer_relation) if self.unit.is_leader(): self.unit.status = ops.MaintenanceStatus("[leader] running database migrations") run_migrations() From fc78300790706b2405313056aa00937daa6f9637 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 14 Apr 2026 00:01:23 +0200 Subject: [PATCH 169/201] fix rendering bug for gprc backends --- haproxy-operator/src/state/haproxy_route.py | 14 +++++++++++++- .../templates/haproxy_route_grpc.cfg.j2 | 2 +- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/haproxy-operator/src/state/haproxy_route.py b/haproxy-operator/src/state/haproxy_route.py index 964f842d6..9de2e9c63 100644 --- a/haproxy-operator/src/state/haproxy_route.py +++ b/haproxy-operator/src/state/haproxy_route.py @@ -258,7 +258,19 @@ def https_backend_server_configuration(self) -> str: """ if self.application_data.protocol != "https": return "" - return f"{LEADING_SPACE}ssl ca-file {HAPROXY_CAS_FILE!s} alpn h2,http/1.1 check-alpn h2,http/1.1" + return f"{LEADING_SPACE}ssl ca-file {HAPROXY_CAS_FILE} alpn h2,http/1.1 check-alpn h2,http/1.1" + + @property + def grpc_backend_server_configuration(self) -> str: + """Build the backend server configuration for gRPC protocol. + + Returns: + str: The backend server configuration for gRPC protocol, + or an empty string if external_grpc_port is not set. + """ + if not self.application_data.external_grpc_port: + return "" + return f"{LEADING_SPACE}ssl ca-file {HAPROXY_CAS_FILE} alpn h2 check-alpn h2" @property def enable_http_check(self) -> bool: diff --git a/haproxy-operator/templates/haproxy_route_grpc.cfg.j2 b/haproxy-operator/templates/haproxy_route_grpc.cfg.j2 index 644eb737d..37ac4b405 100644 --- a/haproxy-operator/templates/haproxy_route_grpc.cfg.j2 +++ b/haproxy-operator/templates/haproxy_route_grpc.cfg.j2 @@ -20,6 +20,6 @@ backend {{ backend.backend_name }} http-request {{ rewrite_configuration }} {% endfor %} {% for server in backend.servers %} - server {{ server.server_name }} {{ server.address }}:{{ server.port }} ssl ca-file {{ haproxy_cas_file }} alpn h2 + server {{ server.server_name }} {{ server.address }}:{{ server.port }}{{ backend.grpc_backend_server_configuration }} {% endfor %} {% endfor %} From 0db3152983ae0a9be382a603614ffe64b81ecab9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 14 Apr 2026 00:07:36 +0200 Subject: [PATCH 170/201] add test for grpc backend rendering --- .../tests/unit/test_haproxy_route_options.py | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/haproxy-operator/tests/unit/test_haproxy_route_options.py b/haproxy-operator/tests/unit/test_haproxy_route_options.py index fac6eb1e5..d703ca872 100644 --- a/haproxy-operator/tests/unit/test_haproxy_route_options.py +++ b/haproxy-operator/tests/unit/test_haproxy_route_options.py @@ -200,3 +200,64 @@ def test_protocol_https_no_ca(monkeypatch: pytest.MonkeyPatch, certificates_inte # The relation data is invalid assert protocol_https_relation.local_app_data["endpoints"] == "[]" # type: ignore assert out.app_status == ActiveStatus("") + + +@pytest.mark.usefixtures("systemd_mock", "mocks_external_calls") +def test_grpc_backend( + monkeypatch: pytest.MonkeyPatch, certificates_integration, receive_ca_certs_relation +): + """ + arrange: prepare the state with the haproxy-route relation with external_grpc_port and + protocol https. + act: run relation_changed for the haproxy-route relation. + assert: the rendered config contains the gRPC server line with ssl ca-file, alpn h2, + and check-alpn h2. + """ + render_file_mock = MagicMock() + monkeypatch.setattr("haproxy.render_file", render_file_mock) + haproxy_route_relation = Relation( + endpoint="haproxy-route", + local_app_data={"endpoints": json.dumps([f"https://{TEST_EXTERNAL_HOSTNAME_CONFIG}/"])}, + remote_app_data={ + "hostname": f'"{TEST_EXTERNAL_HOSTNAME_CONFIG}"', + "hosts": '["10.12.97.153","10.12.97.154"]', + "ports": "[50051]", + "protocol": '"https"', + "service": '"grpc-service"', + "external_grpc_port": "9090", + }, + remote_units_data={0: {"address": '"10.75.1.129"'}}, + ) + state = State( + relations=frozenset( + { + receive_ca_certs_relation, + haproxy_route_relation, + certificates_integration, + } + ), + leader=True, + model=Model(name="haproxy-tutorial"), + app_status=ActiveStatus(""), + unit_status=ActiveStatus(""), + ) + + ctx = Context(HAProxyCharm, juju_version="3.6.8") + out = ctx.run( + ctx.on.relation_changed(haproxy_route_relation), + state, + ) + + render_file_mock.assert_called_once() + haproxy_conf_contents = render_file_mock.call_args_list[0].args[1] + assert ( + "server grpc-service_50051_0 10.12.97.153:50051" + " ssl ca-file /var/lib/haproxy/cas/cas.pem alpn h2 check-alpn h2\n" + in haproxy_conf_contents + ) + assert ( + "server grpc-service_50051_1 10.12.97.154:50051" + " ssl ca-file /var/lib/haproxy/cas/cas.pem alpn h2 check-alpn h2\n" + in haproxy_conf_contents + ) + assert out.app_status == ActiveStatus("") From 4be66e90cf67bce103a072ed378dbfb5f39775ea Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 14 Apr 2026 00:18:27 +0200 Subject: [PATCH 171/201] thin out the client --- haproxy-route-policy-operator/src/policy.py | 54 +-------------------- 1 file changed, 2 insertions(+), 52 deletions(-) diff --git a/haproxy-route-policy-operator/src/policy.py b/haproxy-route-policy-operator/src/policy.py index 5b254429d..5ad9590ae 100644 --- a/haproxy-route-policy-operator/src/policy.py +++ b/haproxy-route-policy-operator/src/policy.py @@ -87,26 +87,14 @@ def create_or_update_user(username: str, password: str) -> None: raise RuntimeError(f"failed to create/update Django user: {e.stdout}") from e -@dataclass(frozen=True) -class EvaluatedBackendRequest: +@dataclass +class EvaluatedBackendRequest(HaproxyRoutePolicyBackendRequest): """A backend request returned by the policy service with its evaluation status. Attributes: - id: UUID of the request assigned by the policy service. - relation_id: Juju relation ID the request originated from. - backend_name: HAProxy backend name. - hostname_acls: Hostnames requested for routing. - paths: URL paths requested for routing. - port: Frontend port for HAProxy. status: Evaluation status (pending, accepted, rejected). """ - id: str - relation_id: int - backend_name: str - hostname_acls: list[str] - paths: list[str] - port: int status: str @@ -147,10 +135,6 @@ def __init__( self._endpoint = endpoint.rstrip("/") self._auth = (username, password) - # ------------------------------------------------------------------ - # Backend requests - # ------------------------------------------------------------------ - def refresh( self, backend_requests: list[HaproxyRoutePolicyBackendRequest], @@ -207,40 +191,6 @@ def list_requests(self, status: str | None = None) -> list[EvaluatedBackendReque self._raise_for_error(response) return [EvaluatedBackendRequest(**item) for item in response.json()] - def get_request(self, request_id: str) -> EvaluatedBackendRequest: - """Get a single backend request by ID. - - Args: - request_id: UUID of the request. - - Returns: - The evaluated backend request. - """ - response = http_requests.get( - f"{self._endpoint}/api/v1/requests/{request_id}", - auth=self._auth, - timeout=10, - ) - self._raise_for_error(response) - return EvaluatedBackendRequest(**response.json()) - - def delete_request(self, request_id: str) -> None: - """Delete a backend request. - - Args: - request_id: UUID of the request to delete. - """ - response = http_requests.delete( - f"{self._endpoint}/api/v1/requests/{request_id}", - auth=self._auth, - timeout=10, - ) - self._raise_for_error(response) - - # ------------------------------------------------------------------ - # Helpers - # ------------------------------------------------------------------ - def _raise_for_error(self, response: http_requests.Response) -> None: """Raise :class:`HaproxyRoutePolicyAPIError` on non-2xx responses. From dae3b3b2d6a42e25f8c207951657ee69ea6a7865 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 14 Apr 2026 00:28:41 +0200 Subject: [PATCH 172/201] add relation interface, add global test --- .github/workflows/integration_test.yaml | 3 +- haproxy-operator/charmcraft.yaml | 3 ++ tests/integration/conftest.py | 23 +++++++++ .../integration/test_haproxy_route_policy.py | 50 +++++++++++++++++++ tests/integration/test_oauth_spoe.py | 2 +- 5 files changed, 79 insertions(+), 2 deletions(-) create mode 100644 tests/integration/test_haproxy_route_policy.py diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index b9dccbfe4..2d2c220de 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -59,7 +59,8 @@ jobs: modules: | [ "test_oauth_spoe.py", - "test_haproxy_ddos.py" + "test_haproxy_ddos.py", + "test_haproxy_route_policy.py" ] with-uv: true pre-run-script: ./tests/integration/setup-integration-tests.sh diff --git a/haproxy-operator/charmcraft.yaml b/haproxy-operator/charmcraft.yaml index 138a5384c..121826eee 100644 --- a/haproxy-operator/charmcraft.yaml +++ b/haproxy-operator/charmcraft.yaml @@ -61,6 +61,9 @@ requires: ddos-protection: interface: ddos_protection limit: 1 + haproxy-route-policy: + interface: haproxy-route-policy + limit: 1 provides: diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 51f57ee88..4f9c272b4 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -415,3 +415,26 @@ def browser_context_manager(): [driver_executable, driver_cli, "install", "chromium"], env=get_driver_env() ) logger.info("install chromium %s", completed_process) + + +@pytest.fixture(scope="module", name="haproxy_route_policy") +def haproxy_route_policy_fixture( + pytestconfig: pytest.Config, lxd_juju: jubilant.Juju, app_name, host_name +) -> str: + """Deploy the haproxy-route-policy charm.""" + charm_name = "haproxy-route-policy" + if pytestconfig.getoption("--no-deploy") and app_name in lxd_juju.status().apps: + return app_name + + charm_file = next( + (f for f in pytestconfig.getoption("--charm-file") if f"{charm_name}_" in f), + None, + ) + assert charm_file, f"--charm-file with {charm_name} charm should be set" + + lxd_juju.deploy( + charm=charm_file, + app=app_name, + config={"hostname": host_name}, + ) + return app_name diff --git a/tests/integration/test_haproxy_route_policy.py b/tests/integration/test_haproxy_route_policy.py new file mode 100644 index 000000000..2f8721493 --- /dev/null +++ b/tests/integration/test_haproxy_route_policy.py @@ -0,0 +1,50 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Integration tests for haproxy route policy.""" + +import json +import logging + + +import jubilant +import pytest + +logger = logging.getLogger(__name__) + +TEST_HOSTNAME = "example.com" + + +@pytest.mark.abort_on_fail +def test_haproxy_route_policy( + configured_application_with_tls: str, + haproxy_route_policy, + lxd_juju: jubilant.Juju, + any_charm_haproxy_route_deployer, +): + """Test the HAProxy route policy integration.""" + lxd_juju.integrate( + f"{configured_application_with_tls}:haproxy-route", + any_charm_haproxy_route_deployer, + ) + lxd_juju.integrate( + f"{configured_application_with_tls}:haproxy-route-policy", + haproxy_route_policy, + ) + lxd_juju.run( + f"{any_charm_haproxy_route_deployer}/0", + "rpc", + { + "method": "update_relation", + "args": json.dumps( + [ + { + "service": any_charm_haproxy_route_deployer, + "ports": [80], + "hostname": TEST_HOSTNAME, + } + ] + ), + }, + ) + lxd_juju.wait(jubilant.all_active) diff --git a/tests/integration/test_oauth_spoe.py b/tests/integration/test_oauth_spoe.py index ef5b0a55d..a232f27fa 100644 --- a/tests/integration/test_oauth_spoe.py +++ b/tests/integration/test_oauth_spoe.py @@ -51,7 +51,7 @@ def test_oauth_spoe( HostConfig("haproxy3.internal", "haproxy-route-requirer3", None), ] - # Deploy the haproxy-requirer integration charms and she haproxy-spoe-auth charms + # Deploy the haproxy-requirer integration charms and the haproxy-spoe-auth charms for host_config in host_configs: any_charm_haproxy_route_deployer(host_config.requirer) if host_config.spoe: From e76ddb708a58fec63cec23716e79483bb846fff1 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 14 Apr 2026 00:29:22 +0200 Subject: [PATCH 173/201] add relation interface --- haproxy-operator/charmcraft.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haproxy-operator/charmcraft.yaml b/haproxy-operator/charmcraft.yaml index 138a5384c..121826eee 100644 --- a/haproxy-operator/charmcraft.yaml +++ b/haproxy-operator/charmcraft.yaml @@ -61,6 +61,9 @@ requires: ddos-protection: interface: ddos_protection limit: 1 + haproxy-route-policy: + interface: haproxy-route-policy + limit: 1 provides: From 951e72566d76c765a306c150aad495492a9c91ee Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 14 Apr 2026 13:41:36 +0200 Subject: [PATCH 174/201] move exception handling to a separate state module, refactor charm code --- haproxy-route-policy-operator/src/charm.py | 155 +++++++----------- haproxy-route-policy-operator/src/policy.py | 5 +- .../src/state/validation.py | 89 ++++++++++ 3 files changed, 149 insertions(+), 100 deletions(-) create mode 100644 haproxy-route-policy-operator/src/state/validation.py diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index ca8a40951..c06b8a091 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -9,7 +9,6 @@ from typing import Any import ops -from charmlibs.snap import SnapError from charms.data_platform_libs.v0.data_interfaces import ( DatabaseRequires, ) @@ -17,12 +16,9 @@ HaproxyRoutePolicyProvider, HaproxyRoutePolicyRequirerAppData, ) -from pydantic import ValidationError from policy import ( - HaproxyRoutePolicyAPIError, HaproxyRoutePolicyClient, - HaproxyRoutePolicyDatabaseMigrationError, configure_snap, create_or_update_user, install_snap, @@ -31,18 +27,13 @@ ) from state.database import ( DatabaseInformation, - DatabaseRelationMissingError, - DatabaseRelationNotReadyError, ) from state.policy import ( DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, PEER_RELATION_NAME, - DjangoAdminCredentialsInvalidError, - DjangoAdminCredentialsMissingError, - DjangoSecretKeyMissingError, HaproxyRoutePolicyInformation, - PeerRelationMissingError, ) +from state.validation import handle_charm_exceptions logger = logging.getLogger(__name__) @@ -91,6 +82,7 @@ def __init__(self, *args: Any): self.on[self.haproxy_route_policy.relation_name].relation_departed, self._reconcile ) + @handle_charm_exceptions def _reconcile(self, _: ops.EventBase) -> None: """Reconcile snap configuration and service state.""" peer_relation = self.model.get_relation(PEER_RELATION_NAME) @@ -98,95 +90,34 @@ def _reconcile(self, _: ops.EventBase) -> None: self.unit.status = ops.WaitingStatus("Waiting for peer relation.") return - try: - install_snap() - self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") - database_information = DatabaseInformation.from_requirer(self, self.database) - haproxy_route_policy_information = HaproxyRoutePolicyInformation.from_charm(self) - configure_snap( - { - **haproxy_route_policy_information.allowed_hosts_snap_configuration, - **database_information.haproxy_route_policy_snap_configuration, - } - ) + install_snap() + self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") + database_information = DatabaseInformation.from_requirer(self, self.database) + haproxy_route_policy_information = HaproxyRoutePolicyInformation.from_charm(self) + configure_snap( + { + **haproxy_route_policy_information.allowed_hosts_snap_configuration, + **database_information.haproxy_route_policy_snap_configuration, + } + ) - if self.unit.is_leader(): - self.unit.status = ops.MaintenanceStatus("[leader] running database migrations") - run_migrations() - - self.unit.status = ops.MaintenanceStatus("[leader] updating Django admin user") - create_or_update_user( - haproxy_route_policy_information.admin_username, - haproxy_route_policy_information.admin_password, - ) - - self.unit.status = ops.MaintenanceStatus("starting gunicorn service") - start_gunicorn_service() - - self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) - - if relation := self.haproxy_route_policy.relation: - backend_requests = relation.load( - HaproxyRoutePolicyRequirerAppData, relation.app - ).backend_requests - - client = HaproxyRoutePolicyClient( - username=haproxy_route_policy_information.admin_username, - password=haproxy_route_policy_information.admin_password, - ) - - self.unit.status = ops.MaintenanceStatus( - "evaluating backend requests via policy service" - ) - evaluated = client.refresh(backend_requests) - - approved = [ - req - for req, ev in zip(backend_requests, evaluated, strict=True) - if ev.status == "accepted" - ] - logger.info( - "backend requests evaluated: %d total, %d approved", - len(evaluated), - len(approved), - ) - self.haproxy_route_policy.set_approved_backend_requests(approved) - - except DatabaseRelationMissingError: - self.unit.status = ops.BlockedStatus("Missing database relation.") - return - except DatabaseRelationNotReadyError: - logger.exception("Database relation not ready") - self.unit.status = ops.WaitingStatus("waiting for complete database relation.") - return - except PeerRelationMissingError: - logger.exception("Peer relation missing") - self.unit.status = ops.WaitingStatus("Waiting for peer relation.") - return - except ( - DjangoSecretKeyMissingError, - DjangoAdminCredentialsMissingError, - DjangoAdminCredentialsInvalidError, - ): - logger.exception("Django shared configuration not ready") - self.unit.status = ops.WaitingStatus( - "Waiting for complete shared configuration from leader." - ) - return - except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: - logger.exception("Failed to reconcile haproxy-route-policy service") - self.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") - return - except HaproxyRoutePolicyAPIError as exc: - logger.exception("Policy service API error") - self.unit.status = ops.BlockedStatus(f"policy service error: {exc.message}") - return - except ValidationError: - logger.exception("Invalid haproxy-route-policy relation data") - self.unit.status = ops.WaitingStatus( - "Waiting for valid haproxy-route-policy relation data" + if self.unit.is_leader(): + self.unit.status = ops.MaintenanceStatus("[leader] running database migrations") + run_migrations() + + self.unit.status = ops.MaintenanceStatus("[leader] updating Django admin user") + create_or_update_user( + haproxy_route_policy_information.admin_username, + haproxy_route_policy_information.admin_password, ) - return + + self.unit.status = ops.MaintenanceStatus("starting gunicorn service") + start_gunicorn_service() + + self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) + + if relation := self.haproxy_route_policy.relation: + self._fetch_and_refresh_backend_requests(haproxy_route_policy_information, relation) self.unit.status = ops.ActiveStatus() @@ -206,6 +137,36 @@ def _on_get_admin_credentials_action(self, event: ops.ActionEvent) -> None: except ops.SecretNotFoundError: event.fail("Admin credentials not found.") + def _fetch_and_refresh_backend_requests( + self, + haproxy_route_policy_information: HaproxyRoutePolicyInformation, + haproxy_route_policy_relation: ops.Relation, + ) -> None: + """Fetch backend requests from relation and refresh their status via the policy API.""" + backend_requests = haproxy_route_policy_relation.load( + HaproxyRoutePolicyRequirerAppData, haproxy_route_policy_relation.app + ).backend_requests + + client = HaproxyRoutePolicyClient( + username=haproxy_route_policy_information.admin_username, + password=haproxy_route_policy_information.admin_password, + ) + + self.unit.status = ops.MaintenanceStatus("evaluating backend requests via policy service") + evaluated = client.refresh(backend_requests) + + approved = [ + req + for req, ev in zip(backend_requests, evaluated, strict=True) + if ev.status == "accepted" + ] + logger.info( + "backend requests evaluated: %d total, %d approved", + len(evaluated), + len(approved), + ) + self.haproxy_route_policy.set_approved_backend_requests(approved) + if __name__ == "__main__": # pragma: nocover ops.main(HaproxyRoutePolicyCharm) diff --git a/haproxy-route-policy-operator/src/policy.py b/haproxy-route-policy-operator/src/policy.py index 5ad9590ae..62aa672fb 100644 --- a/haproxy-route-policy-operator/src/policy.py +++ b/haproxy-route-policy-operator/src/policy.py @@ -3,8 +3,6 @@ """Helpers for managing the haproxy-route-policy snap.""" -from __future__ import annotations - import logging import os import subprocess # nosec @@ -15,6 +13,7 @@ from charms.haproxy_route_policy.v0.haproxy_route_policy import ( HaproxyRoutePolicyBackendRequest, ) +from pydantic import Field from pydantic.dataclasses import dataclass SNAP_NAME = "haproxy-route-policy" @@ -95,7 +94,7 @@ class EvaluatedBackendRequest(HaproxyRoutePolicyBackendRequest): status: Evaluation status (pending, accepted, rejected). """ - status: str + status: str = Field(description="Evaluation status (pending, accepted, rejected)") class HaproxyRoutePolicyAPIError(Exception): diff --git a/haproxy-route-policy-operator/src/state/validation.py b/haproxy-route-policy-operator/src/state/validation.py new file mode 100644 index 000000000..5d0cd5422 --- /dev/null +++ b/haproxy-route-policy-operator/src/state/validation.py @@ -0,0 +1,89 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Module for functions containing charm utilities.""" + +import functools +import logging +import typing + +import ops +from charmlibs.snap import SnapError +from pydantic import ValidationError + +from policy import HaproxyRoutePolicyAPIError, HaproxyRoutePolicyDatabaseMigrationError +from state.database import DatabaseRelationMissingError, DatabaseRelationNotReadyError +from state.policy import ( + DjangoAdminCredentialsInvalidError, + DjangoAdminCredentialsMissingError, + DjangoSecretKeyMissingError, + PeerRelationMissingError, +) + +logger = logging.getLogger(__name__) + +C = typing.TypeVar("C", bound=ops.CharmBase) + + +def handle_charm_exceptions( + method: typing.Callable[[C, typing.Any], None], +) -> typing.Callable[[C, typing.Any], None]: + """Create a decorator that puts the charm in blocked state if the config is wrong. + + Args: + method: observer method to wrap. + + Returns: + the function wrapper. + """ + + @functools.wraps(method) + def wrapper(instance: C, *args: typing.Any) -> None: + """Block the charm if the config is wrong. + + Args: + instance: the instance of the class with the hook method. + args: Additional events + + Returns: + The value returned from the original function. That is, None. + """ + try: + return method(instance, *args) + except DatabaseRelationMissingError: + instance.unit.status = ops.BlockedStatus("Missing database relation.") + return + except DatabaseRelationNotReadyError: + logger.exception("Database relation not ready") + instance.unit.status = ops.WaitingStatus("waiting for complete database relation.") + return + except PeerRelationMissingError: + logger.exception("Peer relation missing") + instance.unit.status = ops.WaitingStatus("Waiting for peer relation.") + return + except ( + DjangoSecretKeyMissingError, + DjangoAdminCredentialsMissingError, + DjangoAdminCredentialsInvalidError, + ): + logger.exception("Django shared configuration not ready") + instance.unit.status = ops.WaitingStatus( + "Waiting for complete shared configuration from leader." + ) + return + except (SnapError, HaproxyRoutePolicyDatabaseMigrationError) as exc: + logger.exception("Failed to reconcile haproxy-route-policy service") + instance.unit.status = ops.BlockedStatus(f"reconciliation failed: {exc}") + return + except HaproxyRoutePolicyAPIError as exc: + logger.exception("Policy service API error") + instance.unit.status = ops.BlockedStatus(f"policy service error: {exc.message}") + return + except ValidationError: + logger.exception("Invalid haproxy-route-policy relation data") + instance.unit.status = ops.WaitingStatus( + "Waiting for valid haproxy-route-policy relation data" + ) + return + + return wrapper From 5d83e44edf2552fd539dfb848c32adff6bf69217 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 15 Apr 2026 09:57:35 +0200 Subject: [PATCH 175/201] small fixes --- haproxy-operator/src/charm.py | 27 ++++---- haproxy-operator/src/state/haproxy_route.py | 66 +++++++++++++------ .../src/state/policy.py | 2 +- 3 files changed, 59 insertions(+), 36 deletions(-) diff --git a/haproxy-operator/src/charm.py b/haproxy-operator/src/charm.py index e4ee1d108..44848b3dd 100755 --- a/haproxy-operator/src/charm.py +++ b/haproxy-operator/src/charm.py @@ -202,18 +202,14 @@ def __init__(self, *args: typing.Any): self.framework.observe( self.on.get_proxied_endpoints_action, self._on_get_proxied_endpoints_action ) - self.framework.observe( - self.on[SPOE_AUTH_RELATION].relation_changed, self._on_config_changed - ) - self.framework.observe( - self.on[SPOE_AUTH_RELATION].relation_broken, self._on_config_changed - ) - self.framework.observe( - self.on[DDOS_PROTECTION_RELATION_NAME].relation_changed, self._on_config_changed - ) - self.framework.observe( - self.on[DDOS_PROTECTION_RELATION_NAME].relation_broken, self._on_config_changed - ) + # Hook relation-related events to the reconcile loop. + for relation in [ + SPOE_AUTH_RELATION, + DDOS_PROTECTION_RELATION_NAME, + HAPROXY_ROUTE_POLICY_RELATION_NAME, + ]: + self.framework.observe(self.on[relation].relation_changed, self._on_config_changed) + self.framework.observe(self.on[relation].relation_broken, self._on_config_changed) @validate_config_and_tls(defer=False) def _on_install(self, _: typing.Any) -> None: @@ -370,6 +366,10 @@ def _configure_haproxy_route( peers=self._get_peer_units_address(), ca_certs_configured=bool(self.recv_ca_certs.get_all_certificates()), ) + if self.unit.is_leader() and self.haproxy_route_policy.relation is not None: + self.haproxy_route_policy.provide_haproxy_route_policy_requests( + haproxy_route_requirers_information.backend_requests_for_policy + ) # We ONLY allow the charm to run with no certificate requested if: # 1. there's only haproxy-route-tcp relations # AND @@ -408,9 +408,6 @@ def _configure_haproxy_route( ), ) if self.unit.is_leader(): - self.haproxy_route_policy.provide_haproxy_route_policy_requests( - haproxy_route_requirers_information.backend_requests_for_policy - ) self._publish_haproxy_route_proxied_endpoints(haproxy_route_requirers_information) self._publish_haproxy_route_tcp_proxied_endpoints( haproxy_route_requirers_information, ha_information diff --git a/haproxy-operator/src/state/haproxy_route.py b/haproxy-operator/src/state/haproxy_route.py index d49b05238..90e3e0aa4 100644 --- a/haproxy-operator/src/state/haproxy_route.py +++ b/haproxy-operator/src/state/haproxy_route.py @@ -310,7 +310,7 @@ class HaproxyRouteRequirersInformation: relation_ids_with_invalid_data: set[int] relation_ids_with_invalid_data_tcp: set[int] ports_with_conflicts: set[int] - tcp_frontends: list[HAProxyRouteTcpFrontend] = Field(strict=False) + tcp_frontends: list[HAProxyRouteTcpFrontend] # This is used to transform haproxy-route requirers to backend requests for the policy charm. valid_haproxy_route_requirers: list[HaproxyRouteRequirerData] @@ -322,23 +322,29 @@ def backend_requests_for_policy(self) -> list[HaproxyRoutePolicyBackendRequest]: list[HaproxyRoutePolicyBackendRequest]: The backend requests for the policy charm. """ backend_requests: list[HaproxyRoutePolicyBackendRequest] = [] - for backend in self.backends: + for requirer in self.valid_haproxy_route_requirers: try: - port = backend.application_data.external_grpc_port or ( - 80 if backend.application_data.allow_http else 443 + port = requirer.application_data.external_grpc_port or ( + 80 if requirer.application_data.allow_http else 443 ) backend_requests.append( HaproxyRoutePolicyBackendRequest( - relation_id=backend.relation_id, - backend_name=backend.backend_name, - hostname_acls=list(backend.hostname_acls), - paths=backend.application_data.paths, + relation_id=requirer.relation_id, + backend_name=requirer.application_data.service, + hostname_acls=list( + generate_hostname_acls( + requirer.application_data, external_hostname=None + ) + ), + paths=requirer.application_data.paths, port=port, ) ) except ValidationError as exc: logger.error( - "Validation error for backend %s, skipping: %s", backend.backend_name, exc + "Validation error for backend %s, skipping: %s", + requirer.application_data.service, + exc, ) continue return backend_requests @@ -373,18 +379,10 @@ def from_provider( # pylint: disable=too-many-arguments """ try: # Fetch approved requests from the policy charm and cross-reference with requirers data from haproxy-route - approved_requirers = [] requirers = haproxy_route.get_data(haproxy_route.relations) - if relation := haproxy_route_policy.relation: - approved_requests = relation.load( - HaproxyRoutePolicyProviderAppData, relation.app - ).approved_requests - approved_backend_names = {request.backend_name for request in approved_requests} - approved_requirers = [ - requirer - for requirer in requirers.requirers_data - if requirer.application_data.service in approved_backend_names - ] + approved_requirers = parse_haproxy_route_policy_requirer_data( + requirers.requirers_data, haproxy_route_policy + ) # This is used to check that requirers don't ask for the same backend name. backend_names: set[str] = set() @@ -673,3 +671,31 @@ def parse_haproxy_route_tcp_requirers_data( logger.error(f"Failed to parse TCP frontend: {exc}") continue return tcp_frontends + + +def parse_haproxy_route_policy_requirer_data( + requirers: list[HaproxyRouteRequirerData], haproxy_route_policy: HaproxyRoutePolicyRequirer +) -> list[HaproxyRouteRequirerData]: + """Parse haproxy-route requirer data into backend requests for the policy charm. + + Args: + requirers: List of haproxy-route requirer data. + haproxy_route_policy: The haproxy-route-policy requirer instance. + """ + try: + if relation := haproxy_route_policy.relation: + approved_requests = relation.load( + HaproxyRoutePolicyProviderAppData, relation.app + ).approved_requests + approved_backend_names = {request.backend_name for request in approved_requests} + return [ + requirer + for requirer in requirers + if requirer.application_data.service in approved_backend_names + ] + except ValidationError: + logger.exception( + "Validation error when loading approved backend requests from policy relation." + ) + return [] + return [] diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index 4b5a24be6..236e490f2 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -97,7 +97,7 @@ def from_charm(cls, charm: ops.CharmBase) -> "HaproxyRoutePolicyInformation": allowed_hosts = ( [ cast(IPvAnyAddress | FQDN, address) - for address in cast(str, charm.config.get("allowed-hosts")).split(",") + for address in cast(str, charm.config.get("extra-allowed-hosts")).split(",") ] if charm.config.get("extra-allowed-hosts") else [] From 66bb54dff13c67bd8544932b4e592d6cd4d9f961 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 15 Apr 2026 13:01:07 +0200 Subject: [PATCH 176/201] fix unit tests --- haproxy-operator/src/state/haproxy_route.py | 68 +++++++++++++------ .../tests/unit/test_haproxy_route_lib.py | 1 + haproxy-operator/tests/unit/test_state.py | 9 +++ 3 files changed, 57 insertions(+), 21 deletions(-) diff --git a/haproxy-operator/src/state/haproxy_route.py b/haproxy-operator/src/state/haproxy_route.py index d49b05238..f931b6820 100644 --- a/haproxy-operator/src/state/haproxy_route.py +++ b/haproxy-operator/src/state/haproxy_route.py @@ -28,7 +28,7 @@ HaproxyRoutePolicyProviderAppData, HaproxyRoutePolicyRequirer, ) -from pydantic import Field, IPvAnyAddress, ValidationError, model_validator +from pydantic import IPvAnyAddress, ValidationError, model_validator from pydantic.dataclasses import dataclass from typing_extensions import Self @@ -310,7 +310,7 @@ class HaproxyRouteRequirersInformation: relation_ids_with_invalid_data: set[int] relation_ids_with_invalid_data_tcp: set[int] ports_with_conflicts: set[int] - tcp_frontends: list[HAProxyRouteTcpFrontend] = Field(strict=False) + tcp_frontends: list[HAProxyRouteTcpFrontend] # This is used to transform haproxy-route requirers to backend requests for the policy charm. valid_haproxy_route_requirers: list[HaproxyRouteRequirerData] @@ -322,23 +322,29 @@ def backend_requests_for_policy(self) -> list[HaproxyRoutePolicyBackendRequest]: list[HaproxyRoutePolicyBackendRequest]: The backend requests for the policy charm. """ backend_requests: list[HaproxyRoutePolicyBackendRequest] = [] - for backend in self.backends: + for requirer in self.valid_haproxy_route_requirers: try: - port = backend.application_data.external_grpc_port or ( - 80 if backend.application_data.allow_http else 443 + port = requirer.application_data.external_grpc_port or ( + 80 if requirer.application_data.allow_http else 443 ) backend_requests.append( HaproxyRoutePolicyBackendRequest( - relation_id=backend.relation_id, - backend_name=backend.backend_name, - hostname_acls=list(backend.hostname_acls), - paths=backend.application_data.paths, + relation_id=requirer.relation_id, + backend_name=requirer.application_data.service, + hostname_acls=list( + generate_hostname_acls( + requirer.application_data, external_hostname=None + ) + ), + paths=requirer.application_data.paths, port=port, ) ) except ValidationError as exc: logger.error( - "Validation error for backend %s, skipping: %s", backend.backend_name, exc + "Validation error for backend %s, skipping: %s", + requirer.application_data.service, + exc, ) continue return backend_requests @@ -373,18 +379,10 @@ def from_provider( # pylint: disable=too-many-arguments """ try: # Fetch approved requests from the policy charm and cross-reference with requirers data from haproxy-route - approved_requirers = [] requirers = haproxy_route.get_data(haproxy_route.relations) - if relation := haproxy_route_policy.relation: - approved_requests = relation.load( - HaproxyRoutePolicyProviderAppData, relation.app - ).approved_requests - approved_backend_names = {request.backend_name for request in approved_requests} - approved_requirers = [ - requirer - for requirer in requirers.requirers_data - if requirer.application_data.service in approved_backend_names - ] + approved_requirers = parse_haproxy_route_policy_requirer_data( + requirers.requirers_data, haproxy_route_policy + ) # This is used to check that requirers don't ask for the same backend name. backend_names: set[str] = set() @@ -673,3 +671,31 @@ def parse_haproxy_route_tcp_requirers_data( logger.error(f"Failed to parse TCP frontend: {exc}") continue return tcp_frontends + + +def parse_haproxy_route_policy_requirer_data( + requirers: list[HaproxyRouteRequirerData], haproxy_route_policy: HaproxyRoutePolicyRequirer +) -> list[HaproxyRouteRequirerData]: + """Parse haproxy-route requirer data into backend requests for the policy charm. + + Args: + requirers: List of haproxy-route requirer data. + haproxy_route_policy: The haproxy-route-policy requirer instance. + """ + try: + if relation := haproxy_route_policy.relation: + approved_requests = relation.load( + HaproxyRoutePolicyProviderAppData, relation.app + ).approved_requests + approved_backend_names = {request.backend_name for request in approved_requests} + return [ + requirer + for requirer in requirers + if requirer.application_data.service in approved_backend_names + ] + except ValidationError: + logger.exception( + "Validation error when loading approved backend requests from policy relation." + ) + return [] + return [] diff --git a/haproxy-operator/tests/unit/test_haproxy_route_lib.py b/haproxy-operator/tests/unit/test_haproxy_route_lib.py index db54df481..8db91645a 100644 --- a/haproxy-operator/tests/unit/test_haproxy_route_lib.py +++ b/haproxy-operator/tests/unit/test_haproxy_route_lib.py @@ -117,6 +117,7 @@ def test_haproxy_route_requirer_information( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname=None, peers=[], ca_certs_configured=False, diff --git a/haproxy-operator/tests/unit/test_state.py b/haproxy-operator/tests/unit/test_state.py index 6e65ce177..c610d940a 100644 --- a/haproxy-operator/tests/unit/test_state.py +++ b/haproxy-operator/tests/unit/test_state.py @@ -187,6 +187,7 @@ def test_haproxy_route_requirer_information_reserved_ports( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname="test.domain", peers=[], ca_certs_configured=False, @@ -223,6 +224,7 @@ def test_haproxy_route_requirer_information( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname=None, peers=[], ca_certs_configured=False, @@ -313,6 +315,7 @@ def test_tcp_grpc_port_conflict( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname="haproxy.internal", peers=[], ca_certs_configured=True, @@ -363,6 +366,7 @@ def test_tcp_port_conflict_standard_ports( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname="haproxy.internal", peers=[], ca_certs_configured=False, @@ -414,6 +418,7 @@ def test_grpc_port_conflict_standard_ports( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname="haproxy.internal", peers=[], ca_certs_configured=True, @@ -467,6 +472,7 @@ def test_tcp_grpc_different_ports( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname="haproxy.internal", peers=[], ca_certs_configured=True, @@ -511,6 +517,7 @@ def test_tcp_only_happy_path( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname="test.example.com", peers=[], ca_certs_configured=False, @@ -551,6 +558,7 @@ def test_http_only_happy_path( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname="haproxy.internal", peers=[], ca_certs_configured=False, @@ -973,6 +981,7 @@ def test_haproxy_route_requirers_information_with_wildcard_hostnames( haproxy_route_information = HaproxyRouteRequirersInformation.from_provider( haproxy_route=haproxy_route_provider_mock, haproxy_route_tcp=haproxy_route_tcp_provider_mock, + haproxy_route_policy=MagicMock(relation=None), external_hostname="haproxy.internal", peers=[], ca_certs_configured=False, From 2b9245f9d2fb4cec3dd06d10351a4ea45ec0bfa7 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 15 Apr 2026 13:31:52 +0200 Subject: [PATCH 177/201] add static assets --- haproxy-route-policy/snap/snapcraft.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/haproxy-route-policy/snap/snapcraft.yaml b/haproxy-route-policy/snap/snapcraft.yaml index c070370fd..c512a7ad3 100644 --- a/haproxy-route-policy/snap/snapcraft.yaml +++ b/haproxy-route-policy/snap/snapcraft.yaml @@ -26,6 +26,7 @@ parts: override-build: | # Also copy the source code to the install directory for the manage.py script UV_PROJECT_ENVIRONMENT=venv uv sync + DJANGO_SECRET_KEY=collectstatic ./venv/bin/python3 manage.py collectstatic --noinput cp -r . $SNAPCRAFT_PART_INSTALL/app chown -R 584792:584792 $SNAPCRAFT_PART_INSTALL/app craftctl default From 9c0d188e1ac2059d1277ccae60eff41452002582 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 15 Apr 2026 18:46:28 +0200 Subject: [PATCH 178/201] update db schema, add static files to gunicorn, update business logic --- .../haproxy_route_policy/settings.py | 2 +- .../haproxy_route_policy/urls.py | 3 +++ haproxy-route-policy/policy/admin.py | 15 +++++++++++++ haproxy-route-policy/policy/db_models.py | 2 ++ .../policy/migrations/0003_alter_rule_kind.py | 22 +++++++++++++++++++ haproxy-route-policy/policy/rule_engine.py | 21 ++++++++++++++++++ haproxy-route-policy/policy/urls.py | 5 +++++ haproxy-route-policy/policy/views.py | 20 +++++++++++++++++ haproxy-route-policy/snap/hooks/configure | 12 ---------- .../snap/scripts/bin/gunicorn-start | 2 -- haproxy-route-policy/snap/scripts/bin/manage | 2 -- 11 files changed, 89 insertions(+), 17 deletions(-) create mode 100644 haproxy-route-policy/policy/admin.py create mode 100644 haproxy-route-policy/policy/migrations/0003_alter_rule_kind.py diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 2343aa328..9790b6b11 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -21,7 +21,7 @@ # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") -DEBUG = os.environ.get("DJANGO_DEBUG", "").lower() == "true" +DEBUG = True ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS", "[]")) diff --git a/haproxy-route-policy/haproxy_route_policy/urls.py b/haproxy-route-policy/haproxy_route_policy/urls.py index 711c78c61..b12d34b31 100644 --- a/haproxy-route-policy/haproxy_route_policy/urls.py +++ b/haproxy-route-policy/haproxy_route_policy/urls.py @@ -25,6 +25,7 @@ TokenRefreshView, TokenVerifyView, ) +from django.contrib.staticfiles.urls import staticfiles_urlpatterns from policy import urls as policy_urls @@ -35,3 +36,5 @@ path("api/token/verify/", TokenVerifyView.as_view(), name="token_verify"), path("", include(policy_urls)), ] + +urlpatterns += staticfiles_urlpatterns() diff --git a/haproxy-route-policy/policy/admin.py b/haproxy-route-policy/policy/admin.py new file mode 100644 index 000000000..efaa5f90c --- /dev/null +++ b/haproxy-route-policy/policy/admin.py @@ -0,0 +1,15 @@ +# Copyright 2026 Canonical Ltd. +# See LICENSE file for licensing details. + +from django.contrib import admin +from .db_models import BackendRequest, Rule + + +@admin.register(BackendRequest) +class BackendRequestAdmin(admin.ModelAdmin): + pass + + +@admin.register(Rule) +class RuleAdmin(admin.ModelAdmin): + pass diff --git a/haproxy-route-policy/policy/db_models.py b/haproxy-route-policy/policy/db_models.py index 03b178c39..194f8e893 100644 --- a/haproxy-route-policy/policy/db_models.py +++ b/haproxy-route-policy/policy/db_models.py @@ -33,9 +33,11 @@ RULE_ACTION_CHOICES = [(action, action) for action in RULE_ACTIONS] RULE_KIND_HOSTNAME_AND_PATH_MATCH = "hostname_and_path_match" +RULE_KIND_BACKEND_MATCH = "backend_match" RULE_KINDS = [ RULE_KIND_HOSTNAME_AND_PATH_MATCH, + RULE_KIND_BACKEND_MATCH, ] RULE_KIND_CHOICES = [(kind, kind) for kind in RULE_KINDS] diff --git a/haproxy-route-policy/policy/migrations/0003_alter_rule_kind.py b/haproxy-route-policy/policy/migrations/0003_alter_rule_kind.py new file mode 100644 index 000000000..e3e8e53e2 --- /dev/null +++ b/haproxy-route-policy/policy/migrations/0003_alter_rule_kind.py @@ -0,0 +1,22 @@ +# Generated by Django 6.0.4 on 2026-04-15 16:32 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("policy", "0002_rule"), + ] + + operations = [ + migrations.AlterField( + model_name="rule", + name="kind", + field=models.TextField( + choices=[ + ("hostname_and_path_match", "hostname_and_path_match"), + ("backend_match", "backend_match"), + ] + ), + ), + ] diff --git a/haproxy-route-policy/policy/rule_engine.py b/haproxy-route-policy/policy/rule_engine.py index c59a27535..55f1cdcec 100644 --- a/haproxy-route-policy/policy/rule_engine.py +++ b/haproxy-route-policy/policy/rule_engine.py @@ -20,6 +20,7 @@ RULE_ACTION_ALLOW, RULE_ACTION_DENY, RULE_KIND_HOSTNAME_AND_PATH_MATCH, + RULE_KIND_BACKEND_MATCH, REQUEST_STATUS_ACCEPTED, REQUEST_STATUS_REJECTED, REQUEST_STATUS_PENDING, @@ -64,6 +65,24 @@ def _hostname_and_path_match(rule: Rule, request: BackendRequest) -> bool: return bool(set(rule_paths).intersection(request.paths)) +def _backend_match(rule: Rule, request: BackendRequest) -> bool: + """Check if a backend_match rule matches a backend request. + + A rule matches if the rule's `backends` list contains the request's + `backend_name`. + + Args: + rule: The rule to check. + request: The backend request to evaluate. + + Returns: + True if the rule matches the request, False otherwise. + """ + if rule_backend_name := rule.parameters.get("backend_name"): + return request.backend_name == rule_backend_name + return False + + def evaluate_request(request: BackendRequest) -> str: """Evaluate a backend request against all rules and return the resulting status. @@ -119,4 +138,6 @@ def _matches(rule: Rule, request: BackendRequest) -> bool: """ if rule.kind == RULE_KIND_HOSTNAME_AND_PATH_MATCH: return _hostname_and_path_match(rule, request) + if rule.kind == RULE_KIND_BACKEND_MATCH: + return _backend_match(rule, request) return False diff --git a/haproxy-route-policy/policy/urls.py b/haproxy-route-policy/policy/urls.py index 3229cb919..14027efea 100644 --- a/haproxy-route-policy/policy/urls.py +++ b/haproxy-route-policy/policy/urls.py @@ -18,6 +18,11 @@ views.RequestDetailView.as_view(), name="api-request-detail", ), + path( + "api/v1/requests/refresh", + views.RequestRefreshView.as_view(), + name="api-request-refresh", + ), path( "api/v1/rules", views.ListCreateRulesView.as_view(), diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index b4483518c..6421133c0 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -141,6 +141,26 @@ def delete(self, request, pk): return Response(status=HTTP_204_NO_CONTENT) +class RequestRefreshView(APIView): + """View for re-evaluating a backend request against all rules.""" + + def get(self, request): + """Re-evaluate all requests.""" + queryset = BackendRequest.objects.all() + processed_requests = [] + with transaction.atomic(): + for backend_request in queryset: + serializer = BackendRequestSerializer(backend_request) + if serializer.is_valid(): + serializer.save( + status=evaluate_request( + BackendRequest(**serializer.validated_data) + ) + ) + processed_requests.append(serializer.data) + return Response(processed_requests) + + def get_object(object_class: Type[Rule] | Type[BackendRequest], pk: str): try: return object_class.objects.get(pk=pk) diff --git a/haproxy-route-policy/snap/hooks/configure b/haproxy-route-policy/snap/hooks/configure index 34cea617d..61d9008c1 100644 --- a/haproxy-route-policy/snap/hooks/configure +++ b/haproxy-route-policy/snap/hooks/configure @@ -3,18 +3,6 @@ # Copyright 2026 Canonical Ltd. # See LICENSE file for licensing details. -DJANGO_DEBUG="$(snapctl get debug)" -export DJANGO_DEBUG - -case "$DJANGO_DEBUG" in - "true") ;; - "false") ;; - *) - >&2 echo "'$DJANGO_DEBUG is not a supported value for django_debug. Possible values are true, false" - return 1 - ;; -esac - DJANGO_LOG_LEVEL="$(snapctl get log-level)" export DJANGO_LOG_LEVEL diff --git a/haproxy-route-policy/snap/scripts/bin/gunicorn-start b/haproxy-route-policy/snap/scripts/bin/gunicorn-start index e42d3d2cd..b3342d99a 100755 --- a/haproxy-route-policy/snap/scripts/bin/gunicorn-start +++ b/haproxy-route-policy/snap/scripts/bin/gunicorn-start @@ -7,8 +7,6 @@ set -xe DJANGO_SECRET_KEY="$(snapctl get secret-key)" export DJANGO_SECRET_KEY -DJANGO_DEBUG="$(snapctl get debug)" -export DJANGO_DEBUG DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" export DJANGO_ALLOWED_HOSTS DJANGO_LOG_LEVEL="$(snapctl get log-level)" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index 6901bead8..c9e1c4511 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -7,8 +7,6 @@ set -e DJANGO_SECRET_KEY="$(snapctl get secret-key)" export DJANGO_SECRET_KEY -DJANGO_DEBUG="$(snapctl get debug)" -export DJANGO_DEBUG DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" if [ -z "$DJANGO_ALLOWED_HOSTS" ]; then DJANGO_ALLOWED_HOSTS="[]" From 9fdbd00de15ad8d1e2de46eb22899ec9cbf605af Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 15 Apr 2026 19:17:37 +0200 Subject: [PATCH 179/201] update custom view in admin panel, fix small issue with model validation --- haproxy-route-policy/policy/admin.py | 31 +++++++++++++++++-- haproxy-route-policy/policy/serializers.py | 9 ++++++ .../policy/refresh_requests_form.html | 14 +++++++++ haproxy-route-policy/policy/views.py | 15 +++++---- 4 files changed, 59 insertions(+), 10 deletions(-) create mode 100644 haproxy-route-policy/policy/templates/policy/refresh_requests_form.html diff --git a/haproxy-route-policy/policy/admin.py b/haproxy-route-policy/policy/admin.py index efaa5f90c..0f60fca0d 100644 --- a/haproxy-route-policy/policy/admin.py +++ b/haproxy-route-policy/policy/admin.py @@ -1,13 +1,40 @@ # Copyright 2026 Canonical Ltd. # See LICENSE file for licensing details. -from django.contrib import admin +from django.contrib import admin, messages +from django.http import HttpResponseRedirect +from django.urls import path, reverse from .db_models import BackendRequest, Rule +from .views import RequestRefreshView @admin.register(BackendRequest) class BackendRequestAdmin(admin.ModelAdmin): - pass + change_list_template = "policy/refresh_requests_form.html" + readonly_fields = ("status",) + + def get_urls(self): + urls = super().get_urls() + custom_urls = [ + path( + "refresh/", + self.admin_site.admin_view(self.refresh_requests), + name="policy_backendrequest_refresh", + ), + ] + return custom_urls + urls + + def refresh_requests(self, request): + """Re-evaluate all backend requests by delegating to RequestRefreshView.""" + view = RequestRefreshView() + view.get(request) + count = BackendRequest.objects.count() + self.message_user( + request, + f"Successfully refreshed {count} request(s).", + messages.SUCCESS, + ) + return HttpResponseRedirect(reverse("admin:policy_backendrequest_changelist")) @admin.register(Rule) diff --git a/haproxy-route-policy/policy/serializers.py b/haproxy-route-policy/policy/serializers.py index a71d371a7..4ef3e76d0 100644 --- a/haproxy-route-policy/policy/serializers.py +++ b/haproxy-route-policy/policy/serializers.py @@ -46,4 +46,13 @@ def validate(self, attrs): raise serializers.ValidationError( f"Invalid path(s) in rule: {', '.join([str(path) for path in invalid_paths])}" ) + if attrs.get("kind") == "backend_match": + if not isinstance(attrs.get("parameters"), dict): + raise serializers.ValidationError( + "The parameters field must be a JSON object." + ) + if not attrs["parameters"].get("backend_name"): + raise serializers.ValidationError( + "The parameters field must contain a 'backend_name' key for backend_match rules." + ) return attrs diff --git a/haproxy-route-policy/policy/templates/policy/refresh_requests_form.html b/haproxy-route-policy/policy/templates/policy/refresh_requests_form.html new file mode 100644 index 000000000..f2fd5b1b4 --- /dev/null +++ b/haproxy-route-policy/policy/templates/policy/refresh_requests_form.html @@ -0,0 +1,14 @@ +{% extends "admin/change_list.html" %} +{% load i18n %} + +{% block object-tools-items %} +
  • +
    + {% csrf_token %} + +
    +
  • + {{ block.super }} +{% endblock %} diff --git a/haproxy-route-policy/policy/views.py b/haproxy-route-policy/policy/views.py index 6421133c0..af91cbb4f 100644 --- a/haproxy-route-policy/policy/views.py +++ b/haproxy-route-policy/policy/views.py @@ -150,14 +150,13 @@ def get(self, request): processed_requests = [] with transaction.atomic(): for backend_request in queryset: - serializer = BackendRequestSerializer(backend_request) - if serializer.is_valid(): - serializer.save( - status=evaluate_request( - BackendRequest(**serializer.validated_data) - ) - ) - processed_requests.append(serializer.data) + new_status = evaluate_request(backend_request) + if backend_request.status != new_status: + backend_request.status = new_status + backend_request.save() + processed_requests.append( + BackendRequestSerializer(backend_request).data + ) return Response(processed_requests) From d18746699c0bee0741b0d26b7053be34eaff807d Mon Sep 17 00:00:00 2001 From: Phan Trung Thanh Date: Wed, 15 Apr 2026 20:56:34 +0200 Subject: [PATCH 180/201] Update haproxy-route-policy-operator/charmcraft.yaml Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- haproxy-route-policy-operator/charmcraft.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-route-policy-operator/charmcraft.yaml b/haproxy-route-policy-operator/charmcraft.yaml index 3c81563c2..5dafdc376 100644 --- a/haproxy-route-policy-operator/charmcraft.yaml +++ b/haproxy-route-policy-operator/charmcraft.yaml @@ -68,6 +68,6 @@ config: options: extra-allowed-hosts: type: string - description: A comma-separated list of host/domain names that the dns-policy-app API + description: A comma-separated list of host/domain names that the haproxy-route-policy API can serve. This configuration will set the DJANGO_ALLOWED_HOSTS environment variable with its content being a JSON encoded list. From 894df6da8b9ec411fc12d9b3e47cdea794293316 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 16 Apr 2026 10:34:37 +0200 Subject: [PATCH 181/201] fix issues in PR --- haproxy-operator/charmcraft.yaml | 2 +- haproxy-route-policy-operator/src/state/policy.py | 6 +++--- .../tests/integration/haproxy_route_policy_requirer.py | 4 ++-- haproxy-route-policy-operator/tests/unit/test_charm.py | 6 +++--- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/haproxy-operator/charmcraft.yaml b/haproxy-operator/charmcraft.yaml index 121826eee..3f8f9c06b 100644 --- a/haproxy-operator/charmcraft.yaml +++ b/haproxy-operator/charmcraft.yaml @@ -146,4 +146,4 @@ charm-libs: - lib: haproxy.ddos_protection version: "0" - lib: haproxy_route_policy.haproxy_route_policy - version: "0" \ No newline at end of file + version: "0" diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index 4b5a24be6..4f0a6ace7 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -18,7 +18,7 @@ DJANGO_SECRET_KEY_SECRET_LABEL = "django-secret-key" # nosec DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL = "django-admin-credentials" # nosec PEER_RELATION_NAME = "haproxy-route-policy-peer" -SECRET_LENGTH = 32 +SECRET_NBYTES = 32 DEFAULT_ALLOWED_HOSTS = ["localhost"] @@ -131,7 +131,7 @@ def _get_django_admin_credentials( if charm.unit.is_leader(): django_admin_credentials_data = { "username": "admin", - "password": secrets.token_urlsafe(SECRET_LENGTH), + "password": secrets.token_urlsafe(SECRET_NBYTES), } secret = charm.app.add_secret( label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, @@ -160,7 +160,7 @@ def _get_django_secret_key(charm: ops.CharmBase, peer_relation: ops.Relation) -> return secret.get_content() except ops.SecretNotFoundError: if charm.unit.is_leader(): - django_secret_key_data = {"secret-key": secrets.token_urlsafe(SECRET_LENGTH)} + django_secret_key_data = {"secret-key": secrets.token_urlsafe(SECRET_NBYTES)} secret = charm.app.add_secret( label=DJANGO_SECRET_KEY_SECRET_LABEL, content=django_secret_key_data ) diff --git a/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py b/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py index 46a680b43..357c8077b 100644 --- a/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py +++ b/haproxy-route-policy-operator/tests/integration/haproxy_route_policy_requirer.py @@ -2,7 +2,7 @@ # Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. -"""haproxy-route requirer source.""" +"""haproxy-route-policy requirer source.""" import logging @@ -30,7 +30,7 @@ def __init__(self, *args, **kwargs): ) def update_relation(self): - """Update haproxy-route-tcp relation data""" + """Update haproxy-route-policy relation data""" backend_requests = [ HaproxyRoutePolicyBackendRequest( relation_id=1, diff --git a/haproxy-route-policy-operator/tests/unit/test_charm.py b/haproxy-route-policy-operator/tests/unit/test_charm.py index 389084420..e8a63db87 100644 --- a/haproxy-route-policy-operator/tests/unit/test_charm.py +++ b/haproxy-route-policy-operator/tests/unit/test_charm.py @@ -13,7 +13,7 @@ from state.policy import ( DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, DJANGO_SECRET_KEY_SECRET_LABEL, - SECRET_LENGTH, + SECRET_NBYTES, ) @@ -73,14 +73,14 @@ def test_config_changed_reconciles_snap_with_database_credentials(is_leader): secrets=[ testing.Secret( label=DJANGO_SECRET_KEY_SECRET_LABEL, - tracked_content={"secret-key": secrets.token_urlsafe(SECRET_LENGTH)}, + tracked_content={"secret-key": secrets.token_urlsafe(SECRET_NBYTES)}, ), testing.Secret( label=DJANGO_ADMIN_CREDENTIALS_SECRET_LABEL, # Ignore bandit warning as this is for testing. tracked_content={ "username": "admin", - "password": secrets.token_urlsafe(SECRET_LENGTH), + "password": secrets.token_urlsafe(SECRET_NBYTES), }, # nosec ), ], From 0417bac7414e404a550cafa4f768293533c83764 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 16 Apr 2026 11:19:05 +0200 Subject: [PATCH 182/201] minor fixes --- .../charms/haproxy_route_policy/v0/haproxy_route_policy.py | 4 ++-- haproxy-route-policy-operator/src/charm.py | 1 - haproxy-route-policy-operator/src/state/policy.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index c752f1811..cd8342656 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -6,9 +6,9 @@ This interface is used between the HAProxy charm (requirer) and the haproxy-route-policy charm (provider). -The requirer publishes route policy requests under ``requests`` as a list of +The requirer publishes route policy requests under ``backend_requests`` as a list of HAProxy backend objects. The provider publishes approved entries under -``approved_backends`` and additionally exposes ``policy_backend_port`` and +``approved_requests`` and additionally exposes ``policy_backend_port`` and provider unit addresses for policy web UI routing. """ diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index e2c20500a..e78671c30 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -127,7 +127,6 @@ def _reconcile(self, _: ops.EventBase) -> None: requests = relation.load( HaproxyRoutePolicyRequirerAppData, relation.app ).backend_requests - logger.info(f"backend requests {requests}, auto approved.") self.haproxy_route_policy.set_approved_backend_requests(requests) except DatabaseRelationMissingError: diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index 4f0a6ace7..f1a80322b 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -3,7 +3,7 @@ # Copyright 2026 Canonical Ltd. # See LICENSE file for licensing details. -"""Charm state for database information.""" +"""Charm state for HAProxy route policy information.""" import json import secrets From 54b0d925d88c284faa926a8dd467d7fed71ed858 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 16 Apr 2026 13:44:38 +0200 Subject: [PATCH 183/201] update method and add change artifact --- docs/release-notes/artifacts/pr0458.yaml | 21 +++++++++++++++++++++ haproxy-operator/src/state/haproxy_route.py | 7 +++++-- 2 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 docs/release-notes/artifacts/pr0458.yaml diff --git a/docs/release-notes/artifacts/pr0458.yaml b/docs/release-notes/artifacts/pr0458.yaml new file mode 100644 index 000000000..c41a327aa --- /dev/null +++ b/docs/release-notes/artifacts/pr0458.yaml @@ -0,0 +1,21 @@ +version_schema: 2 + +changes: + - title: Integrated haproxy-route-policy relation into haproxy-operator + author: tphan025 + type: minor + description: > + Added a new `haproxy-route-policy` requirer relation to the haproxy-operator + charm. When the relation is present, the charm publishes backend requests + derived from haproxy-route requirers to the policy provider and only + configures backends that appear in the approved list returned by the policy + charm. When no policy relation exists, all haproxy-route backends continue + to be configured as before. Vendored the haproxy-route-policy interface + library and updated unit tests to pass the new policy parameter. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/458 + related_doc: + related_issue: + visibility: public + highlight: false diff --git a/haproxy-operator/src/state/haproxy_route.py b/haproxy-operator/src/state/haproxy_route.py index f931b6820..8bf280b4e 100644 --- a/haproxy-operator/src/state/haproxy_route.py +++ b/haproxy-operator/src/state/haproxy_route.py @@ -380,7 +380,7 @@ def from_provider( # pylint: disable=too-many-arguments try: # Fetch approved requests from the policy charm and cross-reference with requirers data from haproxy-route requirers = haproxy_route.get_data(haproxy_route.relations) - approved_requirers = parse_haproxy_route_policy_requirer_data( + approved_requirers = get_approved_requirers_from_policy( requirers.requirers_data, haproxy_route_policy ) @@ -673,7 +673,7 @@ def parse_haproxy_route_tcp_requirers_data( return tcp_frontends -def parse_haproxy_route_policy_requirer_data( +def get_approved_requirers_from_policy( requirers: list[HaproxyRouteRequirerData], haproxy_route_policy: HaproxyRoutePolicyRequirer ) -> list[HaproxyRouteRequirerData]: """Parse haproxy-route requirer data into backend requests for the policy charm. @@ -681,6 +681,9 @@ def parse_haproxy_route_policy_requirer_data( Args: requirers: List of haproxy-route requirer data. haproxy_route_policy: The haproxy-route-policy requirer instance. + + Returns: + list[HaproxyRouteRequirerData]: The list of requirer data that are approved by the policy charm. """ try: if relation := haproxy_route_policy.relation: From e134d0645fbf3eaa1c0a1efefd7919e9ba08509f Mon Sep 17 00:00:00 2001 From: tphan025 Date: Thu, 16 Apr 2026 16:32:39 +0200 Subject: [PATCH 184/201] expose policy provider via a backend, update tests --- .../v0/haproxy_route_policy.py | 20 ++- haproxy-operator/src/haproxy.py | 1 + haproxy-operator/src/state/haproxy_route.py | 136 +++++++++++++++++- .../templates/haproxy_route.cfg.j2 | 8 ++ .../v0/haproxy_route_policy.py | 16 ++- haproxy-route-policy-operator/src/charm.py | 4 +- .../unit/test_haproxy_route_policy_lib.py | 4 +- 7 files changed, 176 insertions(+), 13 deletions(-) diff --git a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index c752f1811..879afe771 100644 --- a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -6,9 +6,9 @@ This interface is used between the HAProxy charm (requirer) and the haproxy-route-policy charm (provider). -The requirer publishes route policy requests under ``requests`` as a list of +The requirer publishes route policy requests under ``backend_requests`` as a list of HAProxy backend objects. The provider publishes approved entries under -``approved_backends`` and additionally exposes ``policy_backend_port`` and +``approved_requests`` and additionally exposes ``policy_backend_port`` and provider unit addresses for policy web UI routing. """ @@ -40,7 +40,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 2 +LIBPATCH = 4 def valid_domain_with_wildcard(value: str) -> str: @@ -118,6 +118,12 @@ class HaproxyRoutePolicyProviderAppData: approved_requests: list[HaproxyRoutePolicyBackendRequest] = Field( description="List of approved backend requests." ) + policy_backend_port: int = Field( + gt=0, + le=65535, + description="Port number for the policy backend service (e.g. for routing to policy web UI).", + ) + model: str = Field(description="Model name where the policy backend is deployed.") class HaproxyRoutePolicyProvider(Object): @@ -144,7 +150,7 @@ def relation(self) -> Relation | None: return self.charm.model.get_relation(self.relation_name) def set_approved_backend_requests( - self, approved_requests: list[HaproxyRoutePolicyBackendRequest] + self, approved_requests: list[HaproxyRoutePolicyBackendRequest], policy_backend_port: int ) -> None: """Set and publish approved backend requests.""" relation = self.relation @@ -152,7 +158,11 @@ def set_approved_backend_requests( return try: - app_data = HaproxyRoutePolicyProviderAppData(approved_requests=approved_requests) + app_data = HaproxyRoutePolicyProviderAppData( + approved_requests=approved_requests, + policy_backend_port=policy_backend_port, + model=self.charm.model.name, + ) relation.save(app_data, self.charm.app) except ( ValidationError, diff --git a/haproxy-operator/src/haproxy.py b/haproxy-operator/src/haproxy.py index 8da4067af..1d92f0ea3 100644 --- a/haproxy-operator/src/haproxy.py +++ b/haproxy-operator/src/haproxy.py @@ -197,6 +197,7 @@ def reconcile_haproxy_route( "spoe_auth_info_list": spoe_oauth_info_list, "ip_allow_list_file": IP_ALLOW_LIST_FILE, "deny_paths_file": DENY_PATHS_FILE, + "policy_provider_backend": haproxy_route_requirers_information.policy_provider_backend, } self._render_haproxy_config(HAPROXY_ROUTE_CONFIG_TEMPLATE, template_context) if spoe_oauth_info_list: diff --git a/haproxy-operator/src/state/haproxy_route.py b/haproxy-operator/src/state/haproxy_route.py index 8bf280b4e..2648ba034 100644 --- a/haproxy-operator/src/state/haproxy_route.py +++ b/haproxy-operator/src/state/haproxy_route.py @@ -28,7 +28,7 @@ HaproxyRoutePolicyProviderAppData, HaproxyRoutePolicyRequirer, ) -from pydantic import IPvAnyAddress, ValidationError, model_validator +from pydantic import Field, IPvAnyAddress, ValidationError, model_validator from pydantic.dataclasses import dataclass from typing_extensions import Self @@ -287,6 +287,133 @@ def enable_http_check(self) -> bool: return self.application_data.protocol == "http" +@dataclass(frozen=True) +class HaproxyRoutePolicyProviderBackend: + """A representation of the haproxy-route-policy provider backend. + + Attrs: + policy_backend_port: The port of the policy backend. + policy_backend_unit_addresses: The list of unit addresses for the policy backend. + path: The path identifier for the policy backend. + hostname: The external hostname for the policy backend. + """ + + policy_backend_port: int = Field( + gt=0, le=65535, description="Port number for the policy backend." + ) + policy_backend_unit_addresses: list[IPvAnyAddress] = Field( + description="List of unit addresses for the policy backend." + ) + model: str = Field(description="Model name for the policy backend.") + app: str = Field(description="Application name for the policy backend.") + hostname: Optional[str] = Field(description="External hostname for the policy backend.") + + @classmethod + def from_requirer( + cls, haproxy_route_policy: HaproxyRoutePolicyRequirer, external_hostname: str | None + ) -> "HaproxyRoutePolicyProviderBackend| None": + """Create a HaproxyRoutePolicyProviderBackend from the policy requirer relation. + + Args: + haproxy_route_policy: The haproxy-route-policy requirer instance. + external_hostname: The charm's configured external hostname. + + Returns: + HaproxyRoutePolicyProviderBackend or None if relation data is invalid or missing. + """ + try: + if relation := haproxy_route_policy.relation: + provider_data = relation.load(HaproxyRoutePolicyProviderAppData, relation.app) + provider_unit_addresses = [ + # explicitly cast to IPvAnyAddress because we already filtered out None values. + cast(IPvAnyAddress, relation.data[unit].get("private-address")) + for unit in relation.units + if relation.data[unit].get("private-address") is not None + ] + return cls( + policy_backend_port=provider_data.policy_backend_port, + policy_backend_unit_addresses=provider_unit_addresses, + model=provider_data.model, + app=relation.app.name, + hostname=external_hostname, + ) + except ValidationError as exc: + logger.error("Validation error when parsing policy provider backend: %s", exc) + # We don't propagate this error because the data is likely incomplete and in that case + # we don't block the charm but simply skip rendering of the policy backend. + return None + + @property + def backend_name(self) -> str: + """The backend name for the policy backend. + + Returns: + str: The backend name for the policy backend. + """ + return f"policy_{self.model}_{self.app}" + + @property + def hostname_acl_name(self) -> str: + """The hostname ACL name for the policy backend. + + Returns: + str: The hostname ACL name for the policy backend. + """ + return f"{self.backend_name}_hostname" + + @property + def path_acl_name(self) -> str: + """The path ACL name for the policy backend. + + Returns: + str: The path ACL name for the policy backend. + """ + return f"{self.backend_name}_path" + + @property + def hostname_acl(self) -> str | None: + """Build the hostname ACL for the policy backend. + + Returns: + str | None: The hostname ACL string, or None if no hostname is set. + """ + if self.hostname: + return f"acl {self.hostname_acl_name} req.hdr(host),field(1,:) -i {self.hostname}" + return None + + @property + def path_acl(self) -> str: + """Build the path ACL for the policy backend. + + Returns: + str: The path ACL string. + """ + return f"acl {self.path_acl_name} path_beg -i /{self.model}-{self.app}" + + @property + def policy_backend_server_configuration(self) -> list[str]: + """Build the backend server configuration for the policy backend. + + Returns: + list[str]: The backend server configuration for the policy backend. + """ + return [ + f"server {self.app}_{unit_index} {address!s}:{self.policy_backend_port} check" + for unit_index, address in enumerate(self.policy_backend_unit_addresses) + ] + + @property + def use_backend_configuration(self) -> str: + """Build the use_backend configuration for the policy backend. + + Returns: + str: The use_backend configuration for the policy backend. + """ + if hostname_acl := self.hostname_acl: + return f"use_backend {self.backend_name} if {self.path_acl} {hostname_acl}" + return f"use_backend {self.backend_name} if {self.path_acl}" + + # pylint: disable=too-many-locals @dataclass(frozen=True) class HaproxyRouteRequirersInformation: @@ -313,6 +440,7 @@ class HaproxyRouteRequirersInformation: tcp_frontends: list[HAProxyRouteTcpFrontend] # This is used to transform haproxy-route requirers to backend requests for the policy charm. valid_haproxy_route_requirers: list[HaproxyRouteRequirerData] + policy_provider_backend: HaproxyRoutePolicyProviderBackend | None @property def backend_requests_for_policy(self) -> list[HaproxyRoutePolicyBackendRequest]: @@ -452,9 +580,11 @@ def from_provider( # pylint: disable=too-many-arguments tcp_frontends=tcp_frontends, ports_with_conflicts=set[int](), valid_haproxy_route_requirers=requirers.requirers_data, + policy_provider_backend=HaproxyRoutePolicyProviderBackend.from_requirer( + haproxy_route_policy=haproxy_route_policy, external_hostname=external_hostname + ), ) - except DataValidationError as exc: - # This exception is only raised if the provider has "raise_on_validation_error" set + except (ValidationError, DataValidationError) as exc: raise HaproxyRouteIntegrationDataValidationError from exc @model_validator(mode="after") diff --git a/haproxy-operator/templates/haproxy_route.cfg.j2 b/haproxy-operator/templates/haproxy_route.cfg.j2 index b708d5fbc..c123236a6 100644 --- a/haproxy-operator/templates/haproxy_route.cfg.j2 +++ b/haproxy-operator/templates/haproxy_route.cfg.j2 @@ -44,6 +44,14 @@ frontend haproxy {% endif %} use_backend {{ backend.backend_name }} if {% if backend.path_acl_required %}acl_path_{{ backend.backend_name }}{% endif %} acl_host_{{ backend.backend_name }} {% if backend.deny_path_acl_required %}!acl_deny_path_{{ backend.backend_name }}{% endif +%} {% endfor %} +{% if policy_provider_backend is not none %} + # Routing configuration for the haproxy-route-policy provider +{% if policy_provider_backend is not none %} + {{ policy_provider_backend.hostname_acl }} +{% endif %} + {{ policy_provider_backend.path_acl }} + {{ policy_provider_backend.use_backend_configuration }} +{% endif %} default_backend default diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index cd8342656..879afe771 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -40,7 +40,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 2 +LIBPATCH = 4 def valid_domain_with_wildcard(value: str) -> str: @@ -118,6 +118,12 @@ class HaproxyRoutePolicyProviderAppData: approved_requests: list[HaproxyRoutePolicyBackendRequest] = Field( description="List of approved backend requests." ) + policy_backend_port: int = Field( + gt=0, + le=65535, + description="Port number for the policy backend service (e.g. for routing to policy web UI).", + ) + model: str = Field(description="Model name where the policy backend is deployed.") class HaproxyRoutePolicyProvider(Object): @@ -144,7 +150,7 @@ def relation(self) -> Relation | None: return self.charm.model.get_relation(self.relation_name) def set_approved_backend_requests( - self, approved_requests: list[HaproxyRoutePolicyBackendRequest] + self, approved_requests: list[HaproxyRoutePolicyBackendRequest], policy_backend_port: int ) -> None: """Set and publish approved backend requests.""" relation = self.relation @@ -152,7 +158,11 @@ def set_approved_backend_requests( return try: - app_data = HaproxyRoutePolicyProviderAppData(approved_requests=approved_requests) + app_data = HaproxyRoutePolicyProviderAppData( + approved_requests=approved_requests, + policy_backend_port=policy_backend_port, + model=self.charm.model.name, + ) relation.save(app_data, self.charm.app) except ( ValidationError, diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index c06b8a091..53bac2138 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -165,7 +165,9 @@ def _fetch_and_refresh_backend_requests( len(evaluated), len(approved), ) - self.haproxy_route_policy.set_approved_backend_requests(approved) + self.haproxy_route_policy.set_approved_backend_requests( + approved, HAPROXY_ROUTE_POLICY_PORT + ) if __name__ == "__main__": # pragma: nocover diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py index b7f877347..389a7b551 100644 --- a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py @@ -114,7 +114,9 @@ def test_provider_app_data_model_accepts_valid_payload(): assert: payload is validated and fields are preserved. """ request = HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST) - app_data = HaproxyRoutePolicyProviderAppData(approved_requests=[request]) + app_data = HaproxyRoutePolicyProviderAppData( + approved_requests=[request], policy_backend_port=8080, model="test-model" + ) assert len(app_data.approved_requests) == 1 assert app_data.approved_requests[0].backend_name == "backend-a" From 924578cdb61be8f9b14886f1eced240d4fbd69af Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 09:56:54 +0200 Subject: [PATCH 185/201] add trusted origins to snap config --- haproxy-route-policy/haproxy_route_policy/settings.py | 4 ++-- haproxy-route-policy/snap/hooks/configure | 2 ++ haproxy-route-policy/snap/hooks/install | 1 + haproxy-route-policy/snap/scripts/bin/gunicorn-start | 2 ++ haproxy-route-policy/snap/scripts/bin/manage | 2 ++ 5 files changed, 9 insertions(+), 2 deletions(-) diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 9790b6b11..b09baab8e 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -23,8 +23,8 @@ SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") DEBUG = True -ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS", "[]")) - +ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS") or "[]") +CSRF_TRUSTED_ORIGINS = json.loads(os.getenv("DJANGO_CSRF_TRUSTED_ORIGINS") or "[]") # Application definition INSTALLED_APPS = [ diff --git a/haproxy-route-policy/snap/hooks/configure b/haproxy-route-policy/snap/hooks/configure index 61d9008c1..8a4e42c66 100644 --- a/haproxy-route-policy/snap/hooks/configure +++ b/haproxy-route-policy/snap/hooks/configure @@ -25,6 +25,8 @@ esac DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" export DJANGO_ALLOWED_HOSTS +DJANGO_CSRF_TRUSTED_ORIGINS="$(snapctl get csrf-trusted-origins)" +export DJANGO_CSRF_TRUSTED_ORIGINS DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" export DJANGO_DATABASE_PASSWORD DJANGO_DATABASE_HOST="$(snapctl get database-host)" diff --git a/haproxy-route-policy/snap/hooks/install b/haproxy-route-policy/snap/hooks/install index 5b80a85c0..27d665c65 100755 --- a/haproxy-route-policy/snap/hooks/install +++ b/haproxy-route-policy/snap/hooks/install @@ -9,5 +9,6 @@ set -e snapctl set debug='false' snapctl set log-level='INFO' snapctl set allowed-hosts='["localhost", "127.0.0.1"]' +snapctl set csrf-trusted-origins='[]' SECRET_KEY="$(tr -dc a-zA-Z0-9 < /dev/urandom | head -c 50)" snapctl set secret-key="$SECRET_KEY" diff --git a/haproxy-route-policy/snap/scripts/bin/gunicorn-start b/haproxy-route-policy/snap/scripts/bin/gunicorn-start index b3342d99a..ec5afa702 100755 --- a/haproxy-route-policy/snap/scripts/bin/gunicorn-start +++ b/haproxy-route-policy/snap/scripts/bin/gunicorn-start @@ -9,6 +9,8 @@ DJANGO_SECRET_KEY="$(snapctl get secret-key)" export DJANGO_SECRET_KEY DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" export DJANGO_ALLOWED_HOSTS +DJANGO_CSRF_TRUSTED_ORIGINS="$(snapctl get csrf-trusted-origins)" +export DJANGO_CSRF_TRUSTED_ORIGINS DJANGO_LOG_LEVEL="$(snapctl get log-level)" export DJANGO_LOG_LEVEL DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index c9e1c4511..84b530aa9 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -12,6 +12,8 @@ if [ -z "$DJANGO_ALLOWED_HOSTS" ]; then DJANGO_ALLOWED_HOSTS="[]" fi export DJANGO_ALLOWED_HOSTS +DJANGO_CSRF_TRUSTED_ORIGINS="$(snapctl get csrf-trusted-origins)" +export DJANGO_CSRF_TRUSTED_ORIGINS DJANGO_LOG_LEVEL="$(snapctl get log-level)" export DJANGO_LOG_LEVEL DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" From a799b1daabed0cfcf558ef1b71649364eb27f467 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 11:51:50 +0200 Subject: [PATCH 186/201] set secure proxy header for haproxy --- haproxy-route-policy/haproxy_route_policy/settings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index b09baab8e..0f1734eff 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -25,6 +25,8 @@ ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS") or "[]") CSRF_TRUSTED_ORIGINS = json.loads(os.getenv("DJANGO_CSRF_TRUSTED_ORIGINS") or "[]") +# settings.py +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") # Application definition INSTALLED_APPS = [ From d555a13ef00f5ccaef22e33d852375f050798163 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 12:40:57 +0200 Subject: [PATCH 187/201] update template, conditionally render haproxy-route-policy backend when relation is present --- haproxy-operator/src/charm.py | 16 ++++- haproxy-operator/src/state/charm_state.py | 10 +++- haproxy-operator/src/state/haproxy_route.py | 59 +++++++++---------- .../templates/haproxy_route.cfg.j2 | 16 +++-- haproxy-operator/tests/unit/test_state.py | 6 ++ 5 files changed, 70 insertions(+), 37 deletions(-) diff --git a/haproxy-operator/src/charm.py b/haproxy-operator/src/charm.py index 44848b3dd..d3a872ca4 100755 --- a/haproxy-operator/src/charm.py +++ b/haproxy-operator/src/charm.py @@ -271,6 +271,7 @@ def _reconcile(self) -> None: self.haproxy_route_provider, self.haproxy_route_tcp_provider, self.reverseproxy_requirer, + self.haproxy_route_policy, ) proxy_mode = charm_state.mode if proxy_mode == ProxyMode.INVALID: @@ -429,6 +430,7 @@ def _get_certificate_requests(self) -> typing.List[CertificateRequestAttributes] self.haproxy_route_provider, self.haproxy_route_tcp_provider, self.reverseproxy_requirer, + self.haproxy_route_policy, ) proxy_mode = charm_state.mode @@ -443,7 +445,7 @@ def _get_certificate_requests(self) -> typing.List[CertificateRequestAttributes] ca_certs_configured=bool(self.recv_ca_certs.get_all_certificates()), ) ) - return [ + certificate_requests = [ CertificateRequestAttributes( common_name=hostname_acl, sans_dns=frozenset([hostname_acl]) ) @@ -458,6 +460,18 @@ def _get_certificate_requests(self) -> typing.List[CertificateRequestAttributes] for backend in frontend.backends if backend.application_data.sni is not None ] + # Add the generated hostname with subdomain of the policy charm. + if ( + haproxy_route_policy_backend + := haproxy_route_requirer_information.policy_provider_backend + ): + certificate_requests.append( + CertificateRequestAttributes( + common_name=haproxy_route_policy_backend.hostname, + sans_dns=frozenset([haproxy_route_policy_backend.hostname]), + ) + ) + return certificate_requests except ( HaproxyRouteIntegrationDataValidationError, TLSNotReadyError, diff --git a/haproxy-operator/src/state/charm_state.py b/haproxy-operator/src/state/charm_state.py index 935ec343d..ee819fe80 100644 --- a/haproxy-operator/src/state/charm_state.py +++ b/haproxy-operator/src/state/charm_state.py @@ -15,6 +15,7 @@ import ops from charms.haproxy.v1.haproxy_route_tcp import HaproxyRouteTcpProvider from charms.haproxy.v2.haproxy_route import HaproxyRouteProvider +from charms.haproxy_route_policy.v0.haproxy_route_policy import HaproxyRoutePolicyRequirer from charms.traefik_k8s.v1.ingress_per_unit import IngressPerUnitProvider from charms.traefik_k8s.v2.ingress import IngressPerAppProvider from pydantic import Field, ValidationError, field_validator @@ -111,6 +112,7 @@ def _validate_state( haproxy_route_provider: HaproxyRouteProvider, haproxy_route_tcp_provider: HaproxyRouteTcpProvider, reverseproxy_requirer: HTTPRequirer, + haproxy_route_policy: HaproxyRoutePolicyRequirer, ) -> ProxyMode: """Validate if all the necessary preconditions are fulfilled. @@ -120,6 +122,7 @@ def _validate_state( haproxy_route_provider: The haproxy route provider. haproxy_route_tcp_provider: The haproxy-route-tcp provider. reverseproxy_requirer: The reverse proxy requirer. + haproxy_route_policy: The haproxy route policy requirer. Raises: HaproxyTooManyIntegrationsError: when there are too many integrations and @@ -132,7 +135,9 @@ def _validate_state( is_ingress_per_unit_related = bool(ingress_per_unit_provider.relations) is_legacy_related = bool(reverseproxy_requirer.relations) is_haproxy_route_related = bool( - haproxy_route_provider.relations or haproxy_route_tcp_provider.relations + haproxy_route_provider.relations + or haproxy_route_tcp_provider.relations + or haproxy_route_policy.relation is not None ) if ( @@ -172,6 +177,7 @@ def from_charm( # pylint: disable=too-many-arguments, too-many-positional-argum haproxy_route_provider: HaproxyRouteProvider, haproxy_route_tcp_provider: HaproxyRouteTcpProvider, reverseproxy_requirer: HTTPRequirer, + haproxy_route_policy: HaproxyRoutePolicyRequirer, ) -> "CharmState": """Create a CharmState class from a charm instance. @@ -182,6 +188,7 @@ def from_charm( # pylint: disable=too-many-arguments, too-many-positional-argum haproxy_route_provider: The haproxy-route provider. haproxy_route_tcp_provider: The haproxy-route-tcp provider. reverseproxy_requirer: The reverse proxy requirer. + haproxy_route_policy: The haproxy route policy requirer. Raises: InvalidCharmConfigError: When the charm's config is invalid. @@ -200,6 +207,7 @@ def from_charm( # pylint: disable=too-many-arguments, too-many-positional-argum haproxy_route_provider, haproxy_route_tcp_provider, reverseproxy_requirer, + haproxy_route_policy, ), global_max_connection=global_max_connection, enable_hsts=enable_hsts, diff --git a/haproxy-operator/src/state/haproxy_route.py b/haproxy-operator/src/state/haproxy_route.py index 2648ba034..f0e9062a2 100644 --- a/haproxy-operator/src/state/haproxy_route.py +++ b/haproxy-operator/src/state/haproxy_route.py @@ -55,6 +55,10 @@ class HaproxyRouteIntegrationDataValidationError(CharmStateValidationBaseError): """Exception raised when ingress integration is not established.""" +class HaproxyRoutePolicyMissingHostnameError(CharmStateValidationBaseError): + """Exception raised when haproxy-route-policy is present but external-hostname is missing.""" + + @dataclass(frozen=True) class HAProxyRouteServer: """A representation of a server in the backend section of the haproxy config. @@ -306,7 +310,7 @@ class HaproxyRoutePolicyProviderBackend: ) model: str = Field(description="Model name for the policy backend.") app: str = Field(description="Application name for the policy backend.") - hostname: Optional[str] = Field(description="External hostname for the policy backend.") + hostname: str = Field(description="Hostname for the policy backend.") @classmethod def from_requirer( @@ -318,9 +322,21 @@ def from_requirer( haproxy_route_policy: The haproxy-route-policy requirer instance. external_hostname: The charm's configured external hostname. + Raises: + HaproxyRoutePolicyMissingHostnameError: When haproxy-route-policy relation is present + but external hostname is not set. + Returns: HaproxyRoutePolicyProviderBackend or None if relation data is invalid or missing. """ + if not external_hostname: + logger.error( + "External hostname is required for policy backend but is not set. " + "Skipping policy backend configuration." + ) + raise HaproxyRoutePolicyMissingHostnameError( + "External hostname is required for haproxy-route-policy but is not set." + ) try: if relation := haproxy_route_policy.relation: provider_data = relation.load(HaproxyRoutePolicyProviderAppData, relation.app) @@ -335,7 +351,7 @@ def from_requirer( policy_backend_unit_addresses=provider_unit_addresses, model=provider_data.model, app=relation.app.name, - hostname=external_hostname, + hostname=f"{provider_data.model}-{relation.app.name}.{external_hostname}", ) except ValidationError as exc: logger.error("Validation error when parsing policy provider backend: %s", exc) @@ -362,33 +378,13 @@ def hostname_acl_name(self) -> str: return f"{self.backend_name}_hostname" @property - def path_acl_name(self) -> str: - """The path ACL name for the policy backend. - - Returns: - str: The path ACL name for the policy backend. - """ - return f"{self.backend_name}_path" - - @property - def hostname_acl(self) -> str | None: + def hostname_acl(self) -> str: """Build the hostname ACL for the policy backend. Returns: - str | None: The hostname ACL string, or None if no hostname is set. + str: The hostname ACL string. """ - if self.hostname: - return f"acl {self.hostname_acl_name} req.hdr(host),field(1,:) -i {self.hostname}" - return None - - @property - def path_acl(self) -> str: - """Build the path ACL for the policy backend. - - Returns: - str: The path ACL string. - """ - return f"acl {self.path_acl_name} path_beg -i /{self.model}-{self.app}" + return f"acl {self.hostname_acl_name} req.hdr(host),field(1,:) -i {self.hostname}" @property def policy_backend_server_configuration(self) -> list[str]: @@ -409,9 +405,7 @@ def use_backend_configuration(self) -> str: Returns: str: The use_backend configuration for the policy backend. """ - if hostname_acl := self.hostname_acl: - return f"use_backend {self.backend_name} if {self.path_acl} {hostname_acl}" - return f"use_backend {self.backend_name} if {self.path_acl}" + return f"use_backend {self.backend_name} if {self.hostname_acl_name}" # pylint: disable=too-many-locals @@ -569,6 +563,11 @@ def from_provider( # pylint: disable=too-many-arguments ) ) + policy_provider_backend = None + if haproxy_route_policy.relation is not None: + policy_provider_backend = HaproxyRoutePolicyProviderBackend.from_requirer( + haproxy_route_policy, external_hostname + ) return HaproxyRouteRequirersInformation( # Sort backend by the max depth of the required path. # This is to ensure that backends with deeper path ACLs get routed first. @@ -580,9 +579,7 @@ def from_provider( # pylint: disable=too-many-arguments tcp_frontends=tcp_frontends, ports_with_conflicts=set[int](), valid_haproxy_route_requirers=requirers.requirers_data, - policy_provider_backend=HaproxyRoutePolicyProviderBackend.from_requirer( - haproxy_route_policy=haproxy_route_policy, external_hostname=external_hostname - ), + policy_provider_backend=policy_provider_backend, ) except (ValidationError, DataValidationError) as exc: raise HaproxyRouteIntegrationDataValidationError from exc diff --git a/haproxy-operator/templates/haproxy_route.cfg.j2 b/haproxy-operator/templates/haproxy_route.cfg.j2 index c123236a6..1ea934584 100644 --- a/haproxy-operator/templates/haproxy_route.cfg.j2 +++ b/haproxy-operator/templates/haproxy_route.cfg.j2 @@ -1,6 +1,6 @@ {% extends 'haproxy.cfg.j2' %} {% block proxy_configuration %} -{% if http_backends %} +{% if http_backends or policy_provider_backend is not none %} frontend haproxy mode http bind [::]:80 v4v6 @@ -46,15 +46,23 @@ frontend haproxy {% endfor %} {% if policy_provider_backend is not none %} # Routing configuration for the haproxy-route-policy provider -{% if policy_provider_backend is not none %} {{ policy_provider_backend.hostname_acl }} -{% endif %} - {{ policy_provider_backend.path_acl }} {{ policy_provider_backend.use_backend_configuration }} {% endif %} default_backend default +{% if policy_provider_backend is not none %} +# Backend configuration for the haproxy-route-policy provider +backend {{ policy_provider_backend.backend_name }} + mode http + http-request set-header X-Forwarded-Proto https + http-request set-header X-Forwarded-Ssl on +{% for server_configuration in policy_provider_backend.policy_backend_server_configuration %} + {{ server_configuration}} +{% endfor %} +{% endif %} + peers haproxy_peers {% for address in peer_units_address %} peer {{ address }} diff --git a/haproxy-operator/tests/unit/test_state.py b/haproxy-operator/tests/unit/test_state.py index c610d940a..36436344a 100644 --- a/haproxy-operator/tests/unit/test_state.py +++ b/haproxy-operator/tests/unit/test_state.py @@ -145,6 +145,8 @@ def test_proxy_mode_tcp(): haproxy_route_provider_mock = MagicMock() haproxy_route_provider_mock.relations = [] haproxy_route_tcp_provider_mock = MagicMock() + haproxy_route_policy_requirer_mock = MagicMock() + haproxy_route_policy_requirer_mock.relation = None haproxy_route_tcp_provider_mock.relations = [MagicMock(spec=ops.Relation)] reverseproxy_requirer_mock = MagicMock() reverseproxy_requirer_mock.relations = [] @@ -155,6 +157,7 @@ def test_proxy_mode_tcp(): haproxy_route_provider=haproxy_route_provider_mock, haproxy_route_tcp_provider=haproxy_route_tcp_provider_mock, reverseproxy_requirer=reverseproxy_requirer_mock, + haproxy_route_policy=haproxy_route_policy_requirer_mock, ) assert charm_state.mode == ProxyMode.HAPROXY_ROUTE @@ -597,6 +600,8 @@ def test_charm_state_ddos_protection(ddos_protection, expected_value): haproxy_route_tcp_provider_mock.relations = [] reverseproxy_requirer_mock = MagicMock() reverseproxy_requirer_mock.relations = [] + haproxy_route_policy_requirer_mock = MagicMock() + haproxy_route_policy_requirer_mock.relation = None charm_state = CharmState.from_charm( charm=charm_mock, @@ -605,6 +610,7 @@ def test_charm_state_ddos_protection(ddos_protection, expected_value): haproxy_route_provider=haproxy_route_provider_mock, haproxy_route_tcp_provider=haproxy_route_tcp_provider_mock, reverseproxy_requirer=reverseproxy_requirer_mock, + haproxy_route_policy=haproxy_route_policy_requirer_mock, ) assert charm_state.ddos_protection is expected_value From dbb59a7544f02aed034ff5243c042317664f9c9a Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 12:53:42 +0200 Subject: [PATCH 188/201] add change artifact --- docs/release-notes/artifacts/pr0463.yaml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0463.yaml diff --git a/docs/release-notes/artifacts/pr0463.yaml b/docs/release-notes/artifacts/pr0463.yaml new file mode 100644 index 000000000..b1f1d6796 --- /dev/null +++ b/docs/release-notes/artifacts/pr0463.yaml @@ -0,0 +1,18 @@ +version_schema: 2 + +changes: + - title: Publish haproxy-route backend requests to policy provider and refactor relation observers + author: tphan025 + type: minor + description: > + Refactored the repetitive relation event observer registration. + Added logic so the leader unit publishes backend requests + derived from haproxy-route requirers to the haproxy-route-policy + provider during the reconcile loop. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/463 + related_doc: + related_issue: + visibility: public + highlight: false From de1b24a0fd71559f3d5d4ae24b7f98c465ab0208 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 13:37:51 +0200 Subject: [PATCH 189/201] add change artifact --- docs/release-notes/artifacts/pr0465.yaml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0465.yaml diff --git a/docs/release-notes/artifacts/pr0465.yaml b/docs/release-notes/artifacts/pr0465.yaml new file mode 100644 index 000000000..439fc74b9 --- /dev/null +++ b/docs/release-notes/artifacts/pr0465.yaml @@ -0,0 +1,23 @@ +version_schema: 2 + +changes: + - title: Added admin UI, backend_match rule kind, request refresh endpoint, and enabled DEBUG mode + author: tphan025 + type: minor + description: > + Introduced a Django admin interface for BackendRequest and Rule models + with a custom "Refresh Requests" action that re-evaluates all requests + against current rules. Added a new `backend_match` rule kind that matches + requests by backend name, with corresponding serializer validation and + rule engine logic. Added a public `GET /api/v1/requests/refresh` endpoint + for re-evaluation. Enabled Django DEBUG mode by default, + removed the DJANGO_DEBUG snap config option from configure hook and + startup scripts, and configured static file serving and collectstatic + during snap build for the admin UI. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/465 + related_doc: + related_issue: + visibility: public + highlight: false From 1711ece7e05658c1bc8c7dc2002e2dada42be854 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 13:42:13 +0200 Subject: [PATCH 190/201] Add comments for django DEBUG mode --- haproxy-route-policy/haproxy_route_policy/settings.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 0f1734eff..7967b01eb 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -21,6 +21,9 @@ # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") +# This is set to True for django to serve static files for the admin UI directly +# In future iteration the responsibility if serving static files will be moved to +# a dedicated reverse proxy deployed by the policy charm. DEBUG = True ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS") or "[]") From fa1a13c6d4806f309068f203c935e37cca769965 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 13:45:23 +0200 Subject: [PATCH 191/201] Add change artifact --- docs/release-notes/artifacts/pr0469.yaml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0469.yaml diff --git a/docs/release-notes/artifacts/pr0469.yaml b/docs/release-notes/artifacts/pr0469.yaml new file mode 100644 index 000000000..be7fece6b --- /dev/null +++ b/docs/release-notes/artifacts/pr0469.yaml @@ -0,0 +1,24 @@ +version_schema: 2 + +changes: + - title: Route policy web UI through HAProxy and extend interface library with provider metadata + author: tphan025 + type: minor + description: > + Extended the haproxy-route-policy interface library to + include policy_backend_port and model in the provider app data, enabling + the haproxy-operator to build a dedicated backend and hostname-based + routing for the policy web UI. Added HaproxyRoutePolicyProviderBackend + dataclass in haproxy_route.py that derives a subdomain hostname + (-.), generates ACLs, backend server + entries, and use_backend directives. Updated the Jinja2 template to + render the policy backend alongside regular haproxy-route backends and + request a TLS certificate for the generated hostname. On the snap side, added + CSRF_TRUSTED_ORIGINS and SECURE_PROXY_SSL_HEADER settings. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/469 + related_doc: + related_issue: + visibility: public + highlight: false From c453a8defebbf295df97a89a80994ef542af2c79 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 17:20:33 +0200 Subject: [PATCH 192/201] add logic to set allowed-hosts if haproxy has sent a proxied-endpoint --- .../v0/haproxy_route_policy.py | 17 ++++++--- haproxy-route-policy-operator/src/charm.py | 26 +++++++++----- .../src/state/policy.py | 21 ++++++----- .../test_haproxy_route_policy_information.py | 36 ++++--------------- .../unit/test_haproxy_route_policy_lib.py | 12 +++++-- 5 files changed, 57 insertions(+), 55 deletions(-) diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index 879afe771..68565508a 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -13,7 +13,7 @@ """ import logging -from typing import Annotated +from typing import Annotated, cast from ops import CharmBase from ops.framework import Object @@ -26,6 +26,7 @@ from pydantic import ( BeforeValidator, Field, + HttpUrl, ValidationError, model_validator, ) @@ -40,7 +41,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 4 +LIBPATCH = 5 def valid_domain_with_wildcard(value: str) -> str: @@ -101,6 +102,9 @@ class HaproxyRoutePolicyRequirerAppData: backend_requests: list[HaproxyRoutePolicyBackendRequest] = Field( description="List of backends to be evaluated by the policy service." ) + proxied_endpoint: HttpUrl | None = Field( + description=("URL for the proxied endpoint that's exposing the Django web UI."), + ) @model_validator(mode="after") def validate_unique_backend_names(self): @@ -201,7 +205,9 @@ def relation(self) -> Relation | None: return self.charm.model.get_relation(self._relation_name) def provide_haproxy_route_policy_requests( - self, backend_requests: list[HaproxyRoutePolicyBackendRequest] + self, + backend_requests: list[HaproxyRoutePolicyBackendRequest], + proxied_endpoint: str | None, ) -> None: """Set and publish route policy requests.""" relation = self.relation @@ -209,7 +215,10 @@ def provide_haproxy_route_policy_requests( return try: - app_data = HaproxyRoutePolicyRequirerAppData(backend_requests=backend_requests) + app_data = HaproxyRoutePolicyRequirerAppData( + backend_requests=backend_requests, + proxied_endpoint=cast(HttpUrl | None, proxied_endpoint), + ) relation.save(app_data, self.charm.app) except ( ValidationError, diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 53bac2138..b7063ce67 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -5,6 +5,7 @@ """haproxy-route-policy-operator charm.""" +import json import logging from typing import Any @@ -94,9 +95,23 @@ def _reconcile(self, _: ops.EventBase) -> None: self.unit.status = ops.MaintenanceStatus("configuring haproxy-route-policy") database_information = DatabaseInformation.from_requirer(self, self.database) haproxy_route_policy_information = HaproxyRoutePolicyInformation.from_charm(self) + + allowed_hosts = haproxy_route_policy_information.allowed_hosts_configuration + if relation := self.haproxy_route_policy.relation: + haproxy_route_policy_requirer_data = relation.load( + HaproxyRoutePolicyRequirerAppData, relation.app + ) + self._fetch_and_refresh_backend_requests( + haproxy_route_policy_information, haproxy_route_policy_requirer_data + ) + if (proxied_endpoint := haproxy_route_policy_requirer_data.proxied_endpoint) and ( + host := proxied_endpoint.host + ): + allowed_hosts.append(host) + configure_snap( { - **haproxy_route_policy_information.allowed_hosts_snap_configuration, + **{"allowed-hosts": json.dumps(allowed_hosts)}, **database_information.haproxy_route_policy_snap_configuration, } ) @@ -116,9 +131,6 @@ def _reconcile(self, _: ops.EventBase) -> None: self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) - if relation := self.haproxy_route_policy.relation: - self._fetch_and_refresh_backend_requests(haproxy_route_policy_information, relation) - self.unit.status = ops.ActiveStatus() def _on_get_admin_credentials_action(self, event: ops.ActionEvent) -> None: @@ -140,12 +152,10 @@ def _on_get_admin_credentials_action(self, event: ops.ActionEvent) -> None: def _fetch_and_refresh_backend_requests( self, haproxy_route_policy_information: HaproxyRoutePolicyInformation, - haproxy_route_policy_relation: ops.Relation, + haproxy_route_policy_requirer_data: HaproxyRoutePolicyRequirerAppData, ) -> None: """Fetch backend requests from relation and refresh their status via the policy API.""" - backend_requests = haproxy_route_policy_relation.load( - HaproxyRoutePolicyRequirerAppData, haproxy_route_policy_relation.app - ).backend_requests + backend_requests = haproxy_route_policy_requirer_data.backend_requests client = HaproxyRoutePolicyClient( username=haproxy_route_policy_information.admin_username, diff --git a/haproxy-route-policy-operator/src/state/policy.py b/haproxy-route-policy-operator/src/state/policy.py index 230136382..70ffc6be4 100644 --- a/haproxy-route-policy-operator/src/state/policy.py +++ b/haproxy-route-policy-operator/src/state/policy.py @@ -5,7 +5,6 @@ """Charm state for HAProxy route policy information.""" -import json import secrets from typing import Annotated, cast @@ -66,19 +65,19 @@ class HaproxyRoutePolicyInformation: secret_key: Django secret key. """ - allowed_hosts: list[FQDN | IPvAnyAddress] = Field() + extra_allowed_hosts: list[FQDN | IPvAnyAddress] = Field() admin_username: str = Field() admin_password: str = Field() secret_key: str = Field() @property - def allowed_hosts_snap_configuration(self) -> dict[str, str]: - """Return snap configuration keys and values.""" - return { - "allowed-hosts": json.dumps( - DEFAULT_ALLOWED_HOSTS + [str(host) for host in self.allowed_hosts] - ), - } + def allowed_hosts_configuration(self) -> list[str]: + """Get the allowed hosts snap configuration. + + Returns: + list: The allowed hosts to set in snap configuration. + """ + return DEFAULT_ALLOWED_HOSTS + [str(host) for host in self.extra_allowed_hosts] @classmethod def from_charm(cls, charm: ops.CharmBase) -> "HaproxyRoutePolicyInformation": @@ -94,7 +93,7 @@ def from_charm(cls, charm: ops.CharmBase) -> "HaproxyRoutePolicyInformation": if not peer_relation: raise PeerRelationMissingError("Peer relation is missing.") - allowed_hosts = ( + extra_allowed_hosts = ( [ cast(IPvAnyAddress | FQDN, address) for address in cast(str, charm.config.get("extra-allowed-hosts")).split(",") @@ -109,7 +108,7 @@ def from_charm(cls, charm: ops.CharmBase) -> "HaproxyRoutePolicyInformation": ) secret_key = _get_django_secret_key(charm, peer_relation)["secret-key"] return cls( - allowed_hosts=allowed_hosts, + extra_allowed_hosts=extra_allowed_hosts, admin_username=credentials["username"], admin_password=credentials["password"], secret_key=secret_key, diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py index 979bfa44d..e197da5ab 100644 --- a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_information.py @@ -14,7 +14,7 @@ def _build_state(allowed_hosts: list[str]) -> HaproxyRoutePolicyInformation: """Build a valid state instance with overridable allowed hosts.""" return HaproxyRoutePolicyInformation( - allowed_hosts=cast(list[Any], allowed_hosts), + extra_allowed_hosts=cast(list[Any], allowed_hosts), admin_username="admin", # Ignore bandit warning as this is for testing. admin_password="secret", # nosec @@ -25,15 +25,15 @@ def _build_state(allowed_hosts: list[str]) -> HaproxyRoutePolicyInformation: @pytest.mark.parametrize( "allowed_hosts, expected_allowed_hosts", [ - pytest.param([], [], id="empty-list"), - pytest.param(["example.com"], ["example.com"], id="single-fqdn"), + pytest.param([], ["localhost"], id="empty-list"), + pytest.param(["example.com"], ["localhost", "example.com"], id="single-fqdn"), pytest.param( ["example.com", "api.example.com"], - ["example.com", "api.example.com"], + ["localhost", "example.com", "api.example.com"], id="multiple-fqdn", ), - pytest.param(["10.0.0.10"], ["10.0.0.10"], id="ipv4-address"), - pytest.param(["2001:db8::1"], ["2001:db8::1"], id="ipv6-address"), + pytest.param(["10.0.0.10"], ["localhost", "10.0.0.10"], id="ipv4-address"), + pytest.param(["2001:db8::1"], ["localhost", "2001:db8::1"], id="ipv6-address"), ], ) def test_haproxy_route_policy_information_init_valid_allowed_hosts( @@ -46,7 +46,7 @@ def test_haproxy_route_policy_information_init_valid_allowed_hosts( """ state = _build_state(allowed_hosts) - assert [str(host) for host in state.allowed_hosts] == expected_allowed_hosts + assert [str(host) for host in state.allowed_hosts_configuration] == expected_allowed_hosts @pytest.mark.parametrize( @@ -94,25 +94,3 @@ def test_haproxy_route_policy_information_init_rejects_none_string_fields( with pytest.raises(ValidationError): HaproxyRoutePolicyInformation(**payload) - - -@pytest.mark.parametrize( - "allowed_hosts, expected", - [ - pytest.param([], {"allowed-hosts": '["localhost"]'}, id="empty"), - pytest.param( - ["example.com", "api.example.com"], - {"allowed-hosts": '["localhost", "example.com", "api.example.com"]'}, - id="multiple-fqdn", - ), - ], -) -def test_allowed_hosts_snap_configuration(allowed_hosts: list[str], expected: dict[str, str]): - """ - arrange: initialize state with valid allowed hosts. - act: read snap configuration property. - assert: allowed-hosts is serialized to expected JSON string. - """ - state = _build_state(allowed_hosts) - - assert state.allowed_hosts_snap_configuration == expected diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py index 389a7b551..41fdd69aa 100644 --- a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py @@ -3,6 +3,8 @@ """Unit tests for haproxy-route-policy interface library models.""" +from typing import cast + import pytest from charms.haproxy_route_policy.v0.haproxy_route_policy import ( HaproxyRoutePolicyBackendRequest, @@ -10,7 +12,7 @@ HaproxyRoutePolicyRequirerAppData, valid_domain_with_wildcard, ) -from pydantic import ValidationError +from pydantic import HttpUrl, ValidationError VALID_BACKEND_REQUEST = { "relation_id": 10, @@ -100,7 +102,9 @@ def test_requirer_app_data_model_accepts_valid_payload(): assert: payload is validated and fields are preserved. """ request = HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST) - app_data = HaproxyRoutePolicyRequirerAppData(backend_requests=[request]) + app_data = HaproxyRoutePolicyRequirerAppData( + backend_requests=[request], proxied_endpoint=cast(HttpUrl, "https://example.com") + ) assert len(app_data.backend_requests) == 1 assert app_data.backend_requests[0].backend_name == "backend-a" @@ -142,4 +146,6 @@ def test_requirer_app_data_rejects_duplicate_backend_names(): ] with pytest.raises(ValidationError): - HaproxyRoutePolicyRequirerAppData(backend_requests=duplicated_requests) + HaproxyRoutePolicyRequirerAppData( + backend_requests=duplicated_requests, proxied_endpoint=None + ) From 61ac07254367aaaeb9371dc43746233d8dc47061 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 19:30:16 +0200 Subject: [PATCH 193/201] send policy hostname via relation data --- .../v0/haproxy_route_policy.py | 17 +++++++++++++---- haproxy-operator/src/charm.py | 5 ++++- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index 879afe771..68565508a 100644 --- a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -13,7 +13,7 @@ """ import logging -from typing import Annotated +from typing import Annotated, cast from ops import CharmBase from ops.framework import Object @@ -26,6 +26,7 @@ from pydantic import ( BeforeValidator, Field, + HttpUrl, ValidationError, model_validator, ) @@ -40,7 +41,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 4 +LIBPATCH = 5 def valid_domain_with_wildcard(value: str) -> str: @@ -101,6 +102,9 @@ class HaproxyRoutePolicyRequirerAppData: backend_requests: list[HaproxyRoutePolicyBackendRequest] = Field( description="List of backends to be evaluated by the policy service." ) + proxied_endpoint: HttpUrl | None = Field( + description=("URL for the proxied endpoint that's exposing the Django web UI."), + ) @model_validator(mode="after") def validate_unique_backend_names(self): @@ -201,7 +205,9 @@ def relation(self) -> Relation | None: return self.charm.model.get_relation(self._relation_name) def provide_haproxy_route_policy_requests( - self, backend_requests: list[HaproxyRoutePolicyBackendRequest] + self, + backend_requests: list[HaproxyRoutePolicyBackendRequest], + proxied_endpoint: str | None, ) -> None: """Set and publish route policy requests.""" relation = self.relation @@ -209,7 +215,10 @@ def provide_haproxy_route_policy_requests( return try: - app_data = HaproxyRoutePolicyRequirerAppData(backend_requests=backend_requests) + app_data = HaproxyRoutePolicyRequirerAppData( + backend_requests=backend_requests, + proxied_endpoint=cast(HttpUrl | None, proxied_endpoint), + ) relation.save(app_data, self.charm.app) except ( ValidationError, diff --git a/haproxy-operator/src/charm.py b/haproxy-operator/src/charm.py index 1b46e73f9..cd1146c3a 100755 --- a/haproxy-operator/src/charm.py +++ b/haproxy-operator/src/charm.py @@ -369,7 +369,10 @@ def _configure_haproxy_route( ) if self.unit.is_leader() and self.haproxy_route_policy.relation is not None: self.haproxy_route_policy.provide_haproxy_route_policy_requests( - haproxy_route_requirers_information.backend_requests_for_policy + haproxy_route_requirers_information.backend_requests_for_policy, + haproxy_route_requirers_information.policy_provider_backend.hostname + if haproxy_route_requirers_information.policy_provider_backend + else None, ) # We ONLY allow the charm to run with no certificate requested if: # 1. there's only haproxy-route-tcp relations From 3887a3b1f431b643699856c6a23191f825d34222 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Mon, 20 Apr 2026 19:57:57 +0200 Subject: [PATCH 194/201] update scheme in hostname --- haproxy-operator/src/charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haproxy-operator/src/charm.py b/haproxy-operator/src/charm.py index cd1146c3a..d4f39f383 100755 --- a/haproxy-operator/src/charm.py +++ b/haproxy-operator/src/charm.py @@ -370,7 +370,7 @@ def _configure_haproxy_route( if self.unit.is_leader() and self.haproxy_route_policy.relation is not None: self.haproxy_route_policy.provide_haproxy_route_policy_requests( haproxy_route_requirers_information.backend_requests_for_policy, - haproxy_route_requirers_information.policy_provider_backend.hostname + f"https://{haproxy_route_requirers_information.policy_provider_backend.hostname}" if haproxy_route_requirers_information.policy_provider_backend else None, ) From 4244a458ddd2a38e2ef293890f3596e448090b9e Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 21 Apr 2026 14:04:48 +0200 Subject: [PATCH 195/201] update lib for serialization and update logic --- .../v0/haproxy_route_policy.py | 20 +++++++++++++++---- haproxy-operator/src/charm.py | 3 --- .../v0/haproxy_route_policy.py | 20 +++++++++++++++---- 3 files changed, 32 insertions(+), 11 deletions(-) diff --git a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index 68565508a..a3f99227e 100644 --- a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -13,7 +13,7 @@ """ import logging -from typing import Annotated, cast +from typing import Annotated from ops import CharmBase from ops.framework import Object @@ -27,7 +27,9 @@ BeforeValidator, Field, HttpUrl, + TypeAdapter, ValidationError, + field_validator, model_validator, ) from pydantic.dataclasses import dataclass @@ -41,7 +43,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 5 +LIBPATCH = 7 def valid_domain_with_wildcard(value: str) -> str: @@ -102,10 +104,20 @@ class HaproxyRoutePolicyRequirerAppData: backend_requests: list[HaproxyRoutePolicyBackendRequest] = Field( description="List of backends to be evaluated by the policy service." ) - proxied_endpoint: HttpUrl | None = Field( + proxied_endpoint: str | None = Field( description=("URL for the proxied endpoint that's exposing the Django web UI."), ) + @field_validator("proxied_endpoint") + def validate_proxied_endpoint(cls, value: str | None) -> str | None: + """Validate that the proxied endpoint, if provided, is a valid URL.""" + if value is not None: + try: + TypeAdapter(HttpUrl).validate_python(value) + except ValueError as exc: + raise ValueError(f"Invalid proxied endpoint URL: {value}") from exc + return value + @model_validator(mode="after") def validate_unique_backend_names(self): """Ensure that backend names are unique across all requests.""" @@ -217,7 +229,7 @@ def provide_haproxy_route_policy_requests( try: app_data = HaproxyRoutePolicyRequirerAppData( backend_requests=backend_requests, - proxied_endpoint=cast(HttpUrl | None, proxied_endpoint), + proxied_endpoint=proxied_endpoint, ) relation.save(app_data, self.charm.app) except ( diff --git a/haproxy-operator/src/charm.py b/haproxy-operator/src/charm.py index d4f39f383..28888635c 100755 --- a/haproxy-operator/src/charm.py +++ b/haproxy-operator/src/charm.py @@ -412,9 +412,6 @@ def _configure_haproxy_route( ), ) if self.unit.is_leader(): - self.haproxy_route_policy.provide_haproxy_route_policy_requests( - haproxy_route_requirers_information.backend_requests_for_policy - ) self._publish_haproxy_route_proxied_endpoints(haproxy_route_requirers_information) self._publish_haproxy_route_tcp_proxied_endpoints( haproxy_route_requirers_information, ha_information diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index 68565508a..a3f99227e 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -13,7 +13,7 @@ """ import logging -from typing import Annotated, cast +from typing import Annotated from ops import CharmBase from ops.framework import Object @@ -27,7 +27,9 @@ BeforeValidator, Field, HttpUrl, + TypeAdapter, ValidationError, + field_validator, model_validator, ) from pydantic.dataclasses import dataclass @@ -41,7 +43,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 5 +LIBPATCH = 7 def valid_domain_with_wildcard(value: str) -> str: @@ -102,10 +104,20 @@ class HaproxyRoutePolicyRequirerAppData: backend_requests: list[HaproxyRoutePolicyBackendRequest] = Field( description="List of backends to be evaluated by the policy service." ) - proxied_endpoint: HttpUrl | None = Field( + proxied_endpoint: str | None = Field( description=("URL for the proxied endpoint that's exposing the Django web UI."), ) + @field_validator("proxied_endpoint") + def validate_proxied_endpoint(cls, value: str | None) -> str | None: + """Validate that the proxied endpoint, if provided, is a valid URL.""" + if value is not None: + try: + TypeAdapter(HttpUrl).validate_python(value) + except ValueError as exc: + raise ValueError(f"Invalid proxied endpoint URL: {value}") from exc + return value + @model_validator(mode="after") def validate_unique_backend_names(self): """Ensure that backend names are unique across all requests.""" @@ -217,7 +229,7 @@ def provide_haproxy_route_policy_requests( try: app_data = HaproxyRoutePolicyRequirerAppData( backend_requests=backend_requests, - proxied_endpoint=cast(HttpUrl | None, proxied_endpoint), + proxied_endpoint=proxied_endpoint, ) relation.save(app_data, self.charm.app) except ( From ebf5bdd95fef04ce06dcd1e16e75c3d37d06e5c5 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 21 Apr 2026 15:57:32 +0200 Subject: [PATCH 196/201] fix merge conflicts --- .../integration/test_haproxy_route_policy.py | 36 ------------------- 1 file changed, 36 deletions(-) diff --git a/tests/integration/test_haproxy_route_policy.py b/tests/integration/test_haproxy_route_policy.py index e87509797..fa7b22637 100644 --- a/tests/integration/test_haproxy_route_policy.py +++ b/tests/integration/test_haproxy_route_policy.py @@ -3,10 +3,6 @@ """Integration tests for haproxy route policy.""" -<<<<<<< expose_haproxy_route_policy_service -import json -======= ->>>>>>> main import logging @@ -21,45 +17,13 @@ @pytest.mark.abort_on_fail def test_haproxy_route_policy( configured_application_with_tls: str, -<<<<<<< expose_haproxy_route_policy_service - haproxy_route_policy, - lxd_juju: jubilant.Juju, - any_charm_haproxy_route_deployer, -): - """Test the HAProxy route policy integration.""" - lxd_juju.integrate( - f"{configured_application_with_tls}:haproxy-route", - any_charm_haproxy_route_deployer, - ) -======= haproxy_route_policy: str, lxd_juju: jubilant.Juju, postgresql: str, ): """Test the HAProxy route policy integration.""" lxd_juju.integrate(f"{haproxy_route_policy}:database", f"{postgresql}:database") ->>>>>>> main lxd_juju.integrate( f"{configured_application_with_tls}:haproxy-route-policy", haproxy_route_policy, ) -<<<<<<< expose_haproxy_route_policy_service - lxd_juju.run( - f"{any_charm_haproxy_route_deployer}/0", - "rpc", - { - "method": "update_relation", - "args": json.dumps( - [ - { - "service": any_charm_haproxy_route_deployer, - "ports": [80], - "hostname": TEST_HOSTNAME, - } - ] - ), - }, - ) - lxd_juju.wait(jubilant.all_active) -======= ->>>>>>> main From 6778baec0d2aa6aa5fed72276fb636e9641d1f53 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 21 Apr 2026 15:58:08 +0200 Subject: [PATCH 197/201] resolve merge conflicts --- tests/integration/conftest.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 3d613cc4e..91bdf28e5 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -419,16 +419,6 @@ def browser_context_manager(): logger.info("install chromium %s", completed_process) -<<<<<<< expose_haproxy_route_policy_service -@pytest.fixture(scope="module", name="haproxy_route_policy") -def haproxy_route_policy_fixture( - pytestconfig: pytest.Config, lxd_juju: jubilant.Juju, app_name, host_name -) -> str: - """Deploy the haproxy-route-policy charm.""" - charm_name = "haproxy-route-policy" - if pytestconfig.getoption("--no-deploy") and app_name in lxd_juju.status().apps: - return app_name -======= @pytest.fixture(scope="module", name="postgresql") def postgresql_fixture(pytestconfig: pytest.Config, lxd_juju: jubilant.Juju): """Deploy PostgreSQL.""" @@ -461,7 +451,6 @@ def haproxy_route_policy_fixture( and HAPROXY_ROUTE_POLICY_APP_NAME in lxd_juju.status().apps ): return HAPROXY_ROUTE_POLICY_APP_NAME ->>>>>>> main charm_file = next( (f for f in pytestconfig.getoption("--charm-file") if f"{charm_name}_" in f), @@ -471,13 +460,6 @@ def haproxy_route_policy_fixture( lxd_juju.deploy( charm=charm_file, -<<<<<<< expose_haproxy_route_policy_service - app=app_name, - config={"hostname": host_name}, - ) - return app_name -======= app=HAPROXY_ROUTE_POLICY_APP_NAME, ) return HAPROXY_ROUTE_POLICY_APP_NAME ->>>>>>> main From cd9ba2fdf9a07af173c11fb83485884aeadaa42b Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 21 Apr 2026 15:59:59 +0200 Subject: [PATCH 198/201] resolve merge conflicts, update snap, remove unused conf --- haproxy-route-policy/haproxy_route_policy/settings.py | 9 --------- haproxy-route-policy/snap/hooks/configure | 2 -- haproxy-route-policy/snap/hooks/install | 1 - haproxy-route-policy/snap/scripts/bin/gunicorn-start | 2 -- haproxy-route-policy/snap/scripts/bin/manage | 2 -- 5 files changed, 16 deletions(-) diff --git a/haproxy-route-policy/haproxy_route_policy/settings.py b/haproxy-route-policy/haproxy_route_policy/settings.py index 92a50399f..11378cf9f 100644 --- a/haproxy-route-policy/haproxy_route_policy/settings.py +++ b/haproxy-route-policy/haproxy_route_policy/settings.py @@ -21,22 +21,13 @@ # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY") -<<<<<<< expose_haproxy_route_policy_service # This is set to True for django to serve static files for the admin UI directly # In future iteration the responsibility if serving static files will be moved to # a dedicated reverse proxy deployed by the policy charm. DEBUG = True ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS") or "[]") -CSRF_TRUSTED_ORIGINS = json.loads(os.getenv("DJANGO_CSRF_TRUSTED_ORIGINS") or "[]") -# settings.py SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") -======= -DEBUG = True - -ALLOWED_HOSTS = json.loads(os.getenv("DJANGO_ALLOWED_HOSTS", "[]")) - ->>>>>>> main # Application definition INSTALLED_APPS = [ diff --git a/haproxy-route-policy/snap/hooks/configure b/haproxy-route-policy/snap/hooks/configure index 8a4e42c66..61d9008c1 100644 --- a/haproxy-route-policy/snap/hooks/configure +++ b/haproxy-route-policy/snap/hooks/configure @@ -25,8 +25,6 @@ esac DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" export DJANGO_ALLOWED_HOSTS -DJANGO_CSRF_TRUSTED_ORIGINS="$(snapctl get csrf-trusted-origins)" -export DJANGO_CSRF_TRUSTED_ORIGINS DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" export DJANGO_DATABASE_PASSWORD DJANGO_DATABASE_HOST="$(snapctl get database-host)" diff --git a/haproxy-route-policy/snap/hooks/install b/haproxy-route-policy/snap/hooks/install index 27d665c65..5b80a85c0 100755 --- a/haproxy-route-policy/snap/hooks/install +++ b/haproxy-route-policy/snap/hooks/install @@ -9,6 +9,5 @@ set -e snapctl set debug='false' snapctl set log-level='INFO' snapctl set allowed-hosts='["localhost", "127.0.0.1"]' -snapctl set csrf-trusted-origins='[]' SECRET_KEY="$(tr -dc a-zA-Z0-9 < /dev/urandom | head -c 50)" snapctl set secret-key="$SECRET_KEY" diff --git a/haproxy-route-policy/snap/scripts/bin/gunicorn-start b/haproxy-route-policy/snap/scripts/bin/gunicorn-start index ec5afa702..b3342d99a 100755 --- a/haproxy-route-policy/snap/scripts/bin/gunicorn-start +++ b/haproxy-route-policy/snap/scripts/bin/gunicorn-start @@ -9,8 +9,6 @@ DJANGO_SECRET_KEY="$(snapctl get secret-key)" export DJANGO_SECRET_KEY DJANGO_ALLOWED_HOSTS="$(snapctl get allowed-hosts)" export DJANGO_ALLOWED_HOSTS -DJANGO_CSRF_TRUSTED_ORIGINS="$(snapctl get csrf-trusted-origins)" -export DJANGO_CSRF_TRUSTED_ORIGINS DJANGO_LOG_LEVEL="$(snapctl get log-level)" export DJANGO_LOG_LEVEL DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" diff --git a/haproxy-route-policy/snap/scripts/bin/manage b/haproxy-route-policy/snap/scripts/bin/manage index 84b530aa9..c9e1c4511 100755 --- a/haproxy-route-policy/snap/scripts/bin/manage +++ b/haproxy-route-policy/snap/scripts/bin/manage @@ -12,8 +12,6 @@ if [ -z "$DJANGO_ALLOWED_HOSTS" ]; then DJANGO_ALLOWED_HOSTS="[]" fi export DJANGO_ALLOWED_HOSTS -DJANGO_CSRF_TRUSTED_ORIGINS="$(snapctl get csrf-trusted-origins)" -export DJANGO_CSRF_TRUSTED_ORIGINS DJANGO_LOG_LEVEL="$(snapctl get log-level)" export DJANGO_LOG_LEVEL DJANGO_DATABASE_PASSWORD="$(snapctl get database-password)" From 86728566e81af391b3dcdfc58e484dbc493c2822 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 21 Apr 2026 16:02:27 +0200 Subject: [PATCH 199/201] resolve conflicts --- haproxy-route-policy-operator/src/charm.py | 23 ---------------------- 1 file changed, 23 deletions(-) diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index 021cac927..53bac2138 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -100,7 +100,6 @@ def _reconcile(self, _: ops.EventBase) -> None: **database_information.haproxy_route_policy_snap_configuration, } ) -<<<<<<< expose_haproxy_route_policy_service if self.unit.is_leader(): self.unit.status = ops.MaintenanceStatus("[leader] running database migrations") @@ -117,24 +116,6 @@ def _reconcile(self, _: ops.EventBase) -> None: self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) -======= - - if self.unit.is_leader(): - self.unit.status = ops.MaintenanceStatus("[leader] running database migrations") - run_migrations() - - self.unit.status = ops.MaintenanceStatus("[leader] updating Django admin user") - create_or_update_user( - haproxy_route_policy_information.admin_username, - haproxy_route_policy_information.admin_password, - ) - - self.unit.status = ops.MaintenanceStatus("starting gunicorn service") - start_gunicorn_service() - - self.unit.open_port("tcp", HAPROXY_ROUTE_POLICY_PORT) - ->>>>>>> main if relation := self.haproxy_route_policy.relation: self._fetch_and_refresh_backend_requests(haproxy_route_policy_information, relation) @@ -184,13 +165,9 @@ def _fetch_and_refresh_backend_requests( len(evaluated), len(approved), ) -<<<<<<< expose_haproxy_route_policy_service self.haproxy_route_policy.set_approved_backend_requests( approved, HAPROXY_ROUTE_POLICY_PORT ) -======= - self.haproxy_route_policy.set_approved_backend_requests(approved) ->>>>>>> main if __name__ == "__main__": # pragma: nocover From 647f20f3a0550765a812896d43a198e9403c68c9 Mon Sep 17 00:00:00 2001 From: tphan025 Date: Tue, 21 Apr 2026 20:23:10 +0200 Subject: [PATCH 200/201] add change artifact --- docs/release-notes/artifacts/pr0475.yaml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 docs/release-notes/artifacts/pr0475.yaml diff --git a/docs/release-notes/artifacts/pr0475.yaml b/docs/release-notes/artifacts/pr0475.yaml new file mode 100644 index 000000000..79cab2966 --- /dev/null +++ b/docs/release-notes/artifacts/pr0475.yaml @@ -0,0 +1,21 @@ +version_schema: 2 + +changes: + - title: Publish proxied endpoint URL to policy provider and auto-add host to + allowed-hosts + author: tphan025 + type: minor + description: > + Add a new proxied_endpoint field on the requirer app data. The + haproxy-operator now publishes the generated hostname. On the + policy-operator side, the charm extracts the host from the proxied + endpoint and appends it to the Django allowed-hosts list. Refactored + HaproxyRoutePolicyInformation to rename allowed_hosts to + extra_allowed_hosts. Updated unit tests accordingly. + urls: + pr: + - https://github.com/canonical/haproxy-operator/pull/475 + related_doc: + related_issue: + visibility: public + highlight: false From 3833188e8a18115f5902e36310514dfe81eea10d Mon Sep 17 00:00:00 2001 From: tphan025 Date: Wed, 22 Apr 2026 11:19:55 +0200 Subject: [PATCH 201/201] update lib schema to not send a HttpUrl but send the hostname directly --- .../v0/haproxy_route_policy.py | 28 ++++++------- haproxy-operator/src/charm.py | 2 +- .../v0/haproxy_route_policy.py | 28 ++++++------- haproxy-route-policy-operator/src/charm.py | 16 ++++---- haproxy-route-policy-operator/src/policy.py | 6 +++ .../unit/test_haproxy_route_policy_lib.py | 6 +-- .../integration/test_haproxy_route_policy.py | 41 +++++++++++++++++++ 7 files changed, 85 insertions(+), 42 deletions(-) diff --git a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index a3f99227e..65e3131a0 100644 --- a/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -26,10 +26,7 @@ from pydantic import ( BeforeValidator, Field, - HttpUrl, - TypeAdapter, ValidationError, - field_validator, model_validator, ) from pydantic.dataclasses import dataclass @@ -43,7 +40,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 7 +LIBPATCH = 8 def valid_domain_with_wildcard(value: str) -> str: @@ -64,6 +61,17 @@ def valid_domain_with_wildcard(value: str) -> str: return value +def valid_domain(value: str) -> str: + """Validate if value is a valid domain without wildcards. + + Raises: + ValueError: When value is not a valid domain. + """ + if not bool(domain(value)): + raise ValueError(f"Invalid domain: {value}") + return value + + logger = logging.getLogger(__name__) HAPROXY_ROUTE_POLICY_RELATION_NAME = "haproxy-route-policy" @@ -104,20 +112,10 @@ class HaproxyRoutePolicyRequirerAppData: backend_requests: list[HaproxyRoutePolicyBackendRequest] = Field( description="List of backends to be evaluated by the policy service." ) - proxied_endpoint: str | None = Field( + proxied_endpoint: Annotated[str, BeforeValidator(valid_domain)] | None = Field( description=("URL for the proxied endpoint that's exposing the Django web UI."), ) - @field_validator("proxied_endpoint") - def validate_proxied_endpoint(cls, value: str | None) -> str | None: - """Validate that the proxied endpoint, if provided, is a valid URL.""" - if value is not None: - try: - TypeAdapter(HttpUrl).validate_python(value) - except ValueError as exc: - raise ValueError(f"Invalid proxied endpoint URL: {value}") from exc - return value - @model_validator(mode="after") def validate_unique_backend_names(self): """Ensure that backend names are unique across all requests.""" diff --git a/haproxy-operator/src/charm.py b/haproxy-operator/src/charm.py index 28888635c..43ab1f467 100755 --- a/haproxy-operator/src/charm.py +++ b/haproxy-operator/src/charm.py @@ -370,7 +370,7 @@ def _configure_haproxy_route( if self.unit.is_leader() and self.haproxy_route_policy.relation is not None: self.haproxy_route_policy.provide_haproxy_route_policy_requests( haproxy_route_requirers_information.backend_requests_for_policy, - f"https://{haproxy_route_requirers_information.policy_provider_backend.hostname}" + haproxy_route_requirers_information.policy_provider_backend.hostname if haproxy_route_requirers_information.policy_provider_backend else None, ) diff --git a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py index a3f99227e..65e3131a0 100644 --- a/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py +++ b/haproxy-route-policy-operator/lib/charms/haproxy_route_policy/v0/haproxy_route_policy.py @@ -26,10 +26,7 @@ from pydantic import ( BeforeValidator, Field, - HttpUrl, - TypeAdapter, ValidationError, - field_validator, model_validator, ) from pydantic.dataclasses import dataclass @@ -43,7 +40,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 7 +LIBPATCH = 8 def valid_domain_with_wildcard(value: str) -> str: @@ -64,6 +61,17 @@ def valid_domain_with_wildcard(value: str) -> str: return value +def valid_domain(value: str) -> str: + """Validate if value is a valid domain without wildcards. + + Raises: + ValueError: When value is not a valid domain. + """ + if not bool(domain(value)): + raise ValueError(f"Invalid domain: {value}") + return value + + logger = logging.getLogger(__name__) HAPROXY_ROUTE_POLICY_RELATION_NAME = "haproxy-route-policy" @@ -104,20 +112,10 @@ class HaproxyRoutePolicyRequirerAppData: backend_requests: list[HaproxyRoutePolicyBackendRequest] = Field( description="List of backends to be evaluated by the policy service." ) - proxied_endpoint: str | None = Field( + proxied_endpoint: Annotated[str, BeforeValidator(valid_domain)] | None = Field( description=("URL for the proxied endpoint that's exposing the Django web UI."), ) - @field_validator("proxied_endpoint") - def validate_proxied_endpoint(cls, value: str | None) -> str | None: - """Validate that the proxied endpoint, if provided, is a valid URL.""" - if value is not None: - try: - TypeAdapter(HttpUrl).validate_python(value) - except ValueError as exc: - raise ValueError(f"Invalid proxied endpoint URL: {value}") from exc - return value - @model_validator(mode="after") def validate_unique_backend_names(self): """Ensure that backend names are unique across all requests.""" diff --git a/haproxy-route-policy-operator/src/charm.py b/haproxy-route-policy-operator/src/charm.py index b7063ce67..52d76cd47 100644 --- a/haproxy-route-policy-operator/src/charm.py +++ b/haproxy-route-policy-operator/src/charm.py @@ -23,6 +23,7 @@ configure_snap, create_or_update_user, install_snap, + is_service_active, run_migrations, start_gunicorn_service, ) @@ -101,13 +102,14 @@ def _reconcile(self, _: ops.EventBase) -> None: haproxy_route_policy_requirer_data = relation.load( HaproxyRoutePolicyRequirerAppData, relation.app ) - self._fetch_and_refresh_backend_requests( - haproxy_route_policy_information, haproxy_route_policy_requirer_data - ) - if (proxied_endpoint := haproxy_route_policy_requirer_data.proxied_endpoint) and ( - host := proxied_endpoint.host - ): - allowed_hosts.append(host) + if is_service_active(): + # We can only send requests to the policy API if the service is active. + self._fetch_and_refresh_backend_requests( + haproxy_route_policy_information, haproxy_route_policy_requirer_data + ) + + if proxied_endpoint := haproxy_route_policy_requirer_data.proxied_endpoint: + allowed_hosts.append(proxied_endpoint) configure_snap( { diff --git a/haproxy-route-policy-operator/src/policy.py b/haproxy-route-policy-operator/src/policy.py index 62aa672fb..f53e6284b 100644 --- a/haproxy-route-policy-operator/src/policy.py +++ b/haproxy-route-policy-operator/src/policy.py @@ -59,6 +59,12 @@ def start_gunicorn_service() -> None: package.start() +def is_service_active() -> bool: + """Check if the snap gunicorn app is active.""" + package = snap.SnapCache()[SNAP_NAME] + return package.services["haproxy-route-policy"].get("active", False) + + def create_or_update_user(username: str, password: str) -> None: """Create or update the HTTP proxy policy superuser. diff --git a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py index 41fdd69aa..a81b55463 100644 --- a/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py +++ b/haproxy-route-policy-operator/tests/unit/test_haproxy_route_policy_lib.py @@ -3,8 +3,6 @@ """Unit tests for haproxy-route-policy interface library models.""" -from typing import cast - import pytest from charms.haproxy_route_policy.v0.haproxy_route_policy import ( HaproxyRoutePolicyBackendRequest, @@ -12,7 +10,7 @@ HaproxyRoutePolicyRequirerAppData, valid_domain_with_wildcard, ) -from pydantic import HttpUrl, ValidationError +from pydantic import ValidationError VALID_BACKEND_REQUEST = { "relation_id": 10, @@ -103,7 +101,7 @@ def test_requirer_app_data_model_accepts_valid_payload(): """ request = HaproxyRoutePolicyBackendRequest(**VALID_BACKEND_REQUEST) app_data = HaproxyRoutePolicyRequirerAppData( - backend_requests=[request], proxied_endpoint=cast(HttpUrl, "https://example.com") + backend_requests=[request], proxied_endpoint="example.com" ) assert len(app_data.backend_requests) == 1 diff --git a/tests/integration/test_haproxy_route_policy.py b/tests/integration/test_haproxy_route_policy.py index fa7b22637..fae200f56 100644 --- a/tests/integration/test_haproxy_route_policy.py +++ b/tests/integration/test_haproxy_route_policy.py @@ -8,10 +8,15 @@ import jubilant import pytest +from typing import Callable, Any +import json +import requests +from .helper import get_unit_ip_address logger = logging.getLogger(__name__) TEST_HOSTNAME = "example.com" +HAPROXY_ROUTE_REQUIRER_NAME = "haproxy-route-requirer" @pytest.mark.abort_on_fail @@ -20,10 +25,46 @@ def test_haproxy_route_policy( haproxy_route_policy: str, lxd_juju: jubilant.Juju, postgresql: str, + any_charm_haproxy_route_deployer: Callable[[str], Any], ): """Test the HAProxy route policy integration.""" + any_charm_haproxy_route_deployer(HAPROXY_ROUTE_REQUIRER_NAME) lxd_juju.integrate(f"{haproxy_route_policy}:database", f"{postgresql}:database") lxd_juju.integrate( f"{configured_application_with_tls}:haproxy-route-policy", haproxy_route_policy, ) + lxd_juju.integrate( + f"{HAPROXY_ROUTE_REQUIRER_NAME}:require-haproxy-route", configured_application_with_tls + ) + lxd_juju.run( + f"{HAPROXY_ROUTE_REQUIRER_NAME}/0", + "rpc", + { + "method": "update_relation", + "args": json.dumps( + [ + { + "service": HAPROXY_ROUTE_REQUIRER_NAME, + "ports": [80], + "hostname": TEST_HOSTNAME, + } + ] + ), + }, + ) + lxd_juju.wait(jubilant.all_active) + admin_credentials = lxd_juju.run( + f"{haproxy_route_policy}/0", + "get-admin-credentials", + ) + logger.info(f"Admin credentials: {admin_credentials}") + haproxy_unit_ip = get_unit_ip_address(lxd_juju, configured_application_with_tls) + + response = requests.get( + f"https://{str(haproxy_unit_ip)}/api/v1/requests", + headers={"Host": TEST_HOSTNAME}, + auth=("admin", admin_credentials["password"]), + verify=False, + ) + logger.info(f"Response from HAProxy route policy: {response.json()}")