diff --git a/README.rst b/README.rst index d6dadb6..1440c0e 100644 --- a/README.rst +++ b/README.rst @@ -1,11 +1,8 @@ product-listings-manager ======================== -.. image:: https://copr.fedorainfracloud.org/coprs/ktdreyer/product-listings-manager/package/product-listings-manager/status_image/last_build.png - :target: https://copr.fedorainfracloud.org/coprs/ktdreyer/product-listings-manager/package/product-listings-manager/ - -.. image:: https://quay.io/repository/redhat/product-listings-manager/status - :target: https://quay.io/repository/redhat/product-listings-manager +.. image:: https://quay.io/repository/factory2/product-listings-manager/status + :target: https://quay.io/repository/factory2/product-listings-manager .. image:: https://coveralls.io/repos/github/release-engineering/product-listings-manager/badge.svg?branch=master :target: https://coveralls.io/github/release-engineering/product-listings-manager?branch=master @@ -53,89 +50,104 @@ Architecture diagram :height: 364px :alt: product-listings-manager architecture diagram -Installation and setup ----------------------- - -1. Install the prerequisite system packages:: - - $ sudo dnf -y install postgresql-devel krb5-devel rpm-devel gcc python-devel python3-virtualenvwrapper - -2. Set up a virtualenv:: - - $ mkvirtualenv -p python3 plm - - ... Run ``source /usr/bin/virtualenvwrapper.sh`` if ``mkvirtualenv`` command not available - -3. Install the prerequisite packages:: +Running the tests +----------------- - $ workon plm - $ pip install -r requirements.txt +You can invoke the tests with ``tox``:: -4. Create ``config.py`` with the database settings:: + $ tox - $ echo "SQLALCHEMY_DATABASE_URI = 'postgresql://myusername:mypass@dbhost/dbname'" > config.py - $ vi config.py +Using the ``--live`` argument if you want to run against the live composedb instance:: -5. Set the ``PLM_CONFIG_FILE`` environment variable to the full filesystem path of - this new file:: + $ tox -e py3 -- --cov=product_listings_manager --live tests - $ export PLM_CONFIG_FILE=$(pwd)/config.py +Running the linters +------------------- -6. Install brewkoji package. This creates ``/etc/koji.conf.d/brewkoji.conf``, - so ``products.py`` can contact the Brew hub:: +To run viable linters to check syntax of various files before commit, install +`pre-commit `__ and run:: - $ sudo dnf -y install brewkoji + $ pre-commit install -7. Trust Brew's SSL certificate:: +To run linters on all files (not just the ones changed in the last commit), +run:: - $ export REQUESTS_CA_BUNDLE=/etc/pki/ca-trust/source/anchors/RH-IT-Root-CA.crt + $ pre-commit run -a - ... Or if you've installed this globally on your system, tell requests to use - your global CA store:: +Setting up local environment +---------------------------- - $ export REQUESTS_CA_BUNDLE=/etc/pki/tls/certs/ca-bundle.crt +You can use ``docker-compose`` or ``podman-compose`` to start: -8. Run the server:: +- product-listings-manager - the web service running at ``http://localhost:8080`` +- postgres - database for product-listings-manager, initialized with all + ``*.sql`` files in ``docker/docker-entrypoint-initdb.d`` directory +- jaeger - collector and query service for OpenTelemetry traces collected from + the local instance of product-listings-manager and the database, running at + ``http://localhost:16686`` - $ FLASK_APP=product_listings_manager.app flask run +Rebuild product-listings-manager image:: -The Flask web server will run on TCP 5000. + $ podman-compose build -You can access the http://localhost:5000/ at that point. +Image rebuild is needed only if dependencies change. The container running in +the compose environment uses the current source code directory. -Running the tests ------------------ +Start the services:: -Install required packages for test:: + $ podman-compose up - $ pip install -r test-requirements.txt +Show logs:: -You can invoke the tests with ``tox``:: + $ podman-compose logs plm + $ podman-compose logs plm-db + $ podman-compose logs jaeger - $ tox +Restart product-listings-manager:: -Alternatively, you can run pytest directly:: + $ podman-compose restart plm - $ pytest --cov=product_listings_manager tests +Stop the services:: -Using the ``--live`` argument if you want to run against the live composedb instance:: + $ podman-compose up - $ pytest --cov=product_listings_manager --live tests +Configuration +------------- -Running the linters -------------------- +The service is conteinerized application and uses environment variables for +configuration: -To run viable linters to check syntax of various files before commit, install -[pre-commit](https://pre-commit.ci/) and run:: +- ``SQLALCHEMY_DATABASE_URI`` - full database URI for SQLAlchemy, for example: + ``postgresql://username:password@plm-db.example.com:5433/plm`` +- ``OTEL_EXPORTER_OTLP_TRACES_ENDPOINT`` - traces endpoint for OpenTelemetry + tracing, for example: ``https://otel.example.com/v1/traces`` +- ``OTEL_EXPORTER_SERVICE_NAME`` - service name for OpenTelemetry tracing +- ``PLM_KOJI_CONFIG_PROFILE`` - Koji profile to use (in ``/etc/koji.conf.d/`` + directory), default is ``brew`` +- ``PLM_LDAP_HOST`` - LDAP host, for example ``ldaps://ldap.example.com`` +- ``PLM_LDAP_SEARCHES`` - JSON formatted array with LDAP search base and search + template, for example: - $ pre-commit install + .. code-block:: json -To run linters on all files (not just the ones changed in the last commit), -run:: + ["BASE": "ou=Groups,dc=example,dc=com", "SEARCH_STRING": "(memberUid={user})"] - $ pre-commit run -a +- ``PLM_PERMISSIONS`` - JSON formatted array with permissions, for example: -Configuring a local database ----------------------------- + .. code-block:: json -See ``database.rst`` for instructions to configure a local postgres instance. + [ + { + "name": "admins", + "description": "product-listings-manager admins", + "contact": "plm-admins@example.com", + "queries": ["*"], + "groups": ["plm-admins"], + "users": ["alice", "bob"] + }, + { + "name": "viewers", + "queries": ["SELECT *"], + "groups": ["plm-users"] + } + ] diff --git a/database.rst b/database.rst deleted file mode 100644 index 13e1038..0000000 --- a/database.rst +++ /dev/null @@ -1,181 +0,0 @@ -set up a composedb instance for testing -======================================= - -This walkthrough explains how to set up a dedicated Postgres VM for Product -Listings Manager's database. - - -Host OS -------- - -Currently the production compose database environment is RHEL 5 with -postgresql-8.2.14-1.el5s2 . - -There are plans to upgrade this server to RHEL 7, so you may as well use RHEL 7 -in your VM. - -Installing the postgresql server package ----------------------------------------- - -Install the ``postgresql-server`` package:: - - yum install postgresql-server - -Initialize the database storage location. - -On RHEL 5:: - - su - postgres -c "initdb /var/lib/pgsql/data" - -On RHEL 7:: - - postgresql-setup initdb - -Allow network connections -------------------------- - -Edit ``/var/lib/pgsql/data/pg_hba.conf`` and add the line:: - - host all all 0.0.0.0/0 trust - -On RHEL 7, you'll also need IPv6, because libpq connections to "localhost" use -IPv6 by default:: - - host all all ::1/0 trust - -(... note, "trust" gives full admin rights to all users from anywhere.) - -Edit ``/var/lib/pgsql/data/postgresql.conf`` and add the line:: - - listen_addresses = '*' - -Start and Enable the service ----------------------------- - -This is straightforward. - -On RHEL 5:: - - service postgresql start - chkconfig postgresql on - -On RHEL 7:: - - systemctl start postgresql - systemctl enable postgresql - -Sanity-check that the daemon is listening on all interfaces:: - - netstat -an | grep 5432 - -This should should show postgres listening on ``0.0.0.0:5432``. - -Connecting ----------- - -On your local workstation, connect to your Posgres VM's IP address. Here's an -example connecting to Postgres on RHEL 7:: - - psql -h 192.168.122.124 -U postgres - - postgres=# select VERSION(); - PostgreSQL 9.2.23 on x86_64-redhat-linux-gnu, compiled by gcc (GCC) 4.8.5 20150623 (Red Hat 4.8.5-16), 64-bit - (1 row) - -Compare this to production composedb environment (currently RHEL 5):: - - psql -h (hostname) -U (username) compose -W -c "select VERSION();" - PostgreSQL 8.2.14 on x86_64-redhat-linux-gnu, compiled by GCC gcc (GCC) 4.1.2 20080704 (Red Hat 4.1.2-46) - - -The ``\q`` command will quit the ``psql`` prompt. - -Create the application user ---------------------------- - -On your local workstation, you can use the ``createuser`` shell utility -(``postgresql`` package) to create the "compose_ro" user account:: - - createuser -h 192.168.122.124 -U postgres -P --no-createdb --no-createrole compose_ro - -``createuser`` will prompt you to set a password for this new user. - -Create the application database -------------------------------- - -On your local workstation, you can use the ``createdb`` shell utility -(``postgresql`` package) to create the "compose" database:: - - createdb -h 192.168.122.124 -U postgres --encoding SQL_ASCII compose - -On RHEL 7, you must append ``--template template0`` to this command. - -Note, the encoding is important, because this is what is set on the prod -server. *If* you forgot to set ``--encoding`` during ``createdb`` above, you -can set it later:: - - update pg_database set encoding = pg_char_to_encoding('SQL_ASCII') where datname = 'compose'; - -If you don't set the encoding here, you cannot import data dumps from -the production server. - -Dumping and restoring tables from production --------------------------------------------- - -products.py needs the following tables:: - - match_versions - overrides - packages - products - tree_packages - tree_product_map - trees - -You can use ``db_dump`` to obtain these from production. Here are the file -sizes from ``db_dump``'s custom compressed format:: - - 697 match_versions - 666K overrides.backup - 157M packages.backup - 31K products.backup - 1.3G tree_packages.backup - 7.1M trees.backup - -Note: the ``tree_packages`` table is 1.3GB compressed (unknown size -uncompressed). It took 18 minutes to dump from prod over my VPN connection -(1.25MB/s). - -I've not yet been able to restore this ``tree_packages`` table to a development -VM. On RHEL 5, ``pg_restore`` churns for an hour and then OOMs even with 4GB -RAM. On RHEL 7 ``pg_restore`` does not leak memory, but I kept running out of -disk space in ``/var/lib/pgsql/data``. It took up 50GB of space before I gave -up. - -Diffing schemas from production -------------------------------- - -The production database schema has been managed by hand over the years, so we -need to reverse-engineer it into something that we can reproduce with -SQLAlchemy. To do that, it's helpful to compare the development server's schema -with the production server. - -Connect your ``psql`` client to both databases. - -Use ``\dt`` to list all tables, and ``\d+ tablename`` to show the schemas for -each table. - -:: - - compose=> \d+ match_versions - Table "public.match_versions" - Column | Type | Modifiers | Storage | Stats target | Description - ---------+------------------------+-----------+----------+--------------+------------- - name | character varying | | extended | | - product | character varying(100) | | extended | | - Indexes: - "match_versions_un" UNIQUE, btree (name, product) - Has OIDs: yes - - -You can compare the output of ``\d+`` in both environments. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..53be34b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,49 @@ +--- +version: '3' +services: + plm: + build: . + image: product-listings-manager + user: ${DEV_USER_ID:-1000} + working_dir: /src + volumes: + - ./:/src:ro,z + - ./docker/koji.conf.d:/etc/koji.conf.d:ro,z + ports: + - 127.0.0.1:8080:5000 + healthcheck: + test: >- + /src/docker/docker-entrypoint.sh python -c 'import requests; + requests.get("http://127.0.0.1:5000/api/v1.0/health").raise_for_status();' + interval: 5s + timeout: 5s + retries: 10 + environment: + - OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://jaeger:4318/v1/traces + - OTEL_EXPORTER_SERVICE_NAME=product-listings-manager + - PLM_KOJI_CONFIG_PROFILE=fedora + - SQLALCHEMY_DATABASE_URI=postgresql://plm:plm@plm-db:5433/plm + depends_on: + - plm-db + - jaeger + + plm-db: + image: postgres:14.7-alpine + volumes: + - ./docker/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d:ro,z + restart: always + environment: + - POSTGRES_USER=plm + - POSTGRES_PASSWORD=plm + - POSTGRES_DB=plm + - PGPORT=5433 + - POSTGRES_INITDB_ARGS=--auth=ident --auth=trust + + jaeger: + image: quay.io/jaegertracing/all-in-one:1.53 + ports: + - 127.0.0.1:16686:16686 + +networks: + default: + driver: bridge diff --git a/docker/docker-entrypoint-initdb.d/05-init.sql b/docker/docker-entrypoint-initdb.d/05-init.sql new file mode 100644 index 0000000..bc8cc9b --- /dev/null +++ b/docker/docker-entrypoint-initdb.d/05-init.sql @@ -0,0 +1,526 @@ +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'SQL_ASCII'; +SET standard_conforming_strings = off; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET escape_string_warning = off; +SET row_security = off; + +CREATE OR REPLACE PROCEDURAL LANGUAGE plpgsql; + +CREATE ROLE compose; + +SET default_tablespace = ''; + +CREATE TABLE public.capability ( + id integer NOT NULL, + name character varying NOT NULL, + version character varying(32) +); + +CREATE SEQUENCE public.capability_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE TABLE public.changelogs ( + packages_id integer NOT NULL, + date date NOT NULL, + author character varying NOT NULL, + text text, + id integer DEFAULT nextval(('changelogs_id_seq'::text)::regclass) +); + +CREATE SEQUENCE public.changelogs_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE SEQUENCE public.file_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE TABLE public.filename ( + id integer NOT NULL, + file character varying NOT NULL +); + +CREATE SEQUENCE public.filename_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE TABLE public.fileowner ( + id integer NOT NULL, + username character varying NOT NULL, + groupname character varying NOT NULL +); + +CREATE SEQUENCE public.fileowner_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE TABLE public.files ( + id integer NOT NULL, + name_id integer NOT NULL, + type_id integer NOT NULL, + owner_id integer NOT NULL, + mode integer NOT NULL, + size integer NOT NULL, + flags integer NOT NULL, + mtime timestamp without time zone NOT NULL, + vflags integer NOT NULL, + md5sum character varying, + statinfo integer NOT NULL, + linksto character varying +); + +CREATE TABLE public.filetype ( + id integer NOT NULL, + name character varying(10) NOT NULL, + description character varying NOT NULL +); + +CREATE VIEW public.files_view AS +SELECT f.id AS file_id, f."mode" AS file_mode, fo.username AS file_user, fo.groupname AS file_group, f.size AS file_size, ft.name AS file_type, CASE WHEN (f.linksto IS NULL) THEN (fn.file)::text WHEN ((f.linksto)::text = ''::text) THEN (fn.file)::text ELSE (((fn.file)::text || ' -> '::text) || (f.linksto)::text) END AS file_path, f.md5sum AS file_md5 FROM public.files f, public.filename fn, public.filetype ft, public.fileowner fo WHERE (((f.name_id = fn.id) AND (f.type_id = ft.id)) AND (f.owner_id = fo.id)); + +CREATE TABLE public.match_versions ( + name character varying, + product character varying(100) +); + +CREATE SEQUENCE public.module_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE TABLE public.module_overrides ( + name character varying NOT NULL, + stream character varying NOT NULL, + product integer NOT NULL, + product_arch character varying(32) NOT NULL +); + +CREATE TABLE public.modules ( + id integer NOT NULL, + name character varying NOT NULL, + stream character varying NOT NULL, + version character varying NOT NULL +); + +CREATE TABLE public.overrides ( + name character varying NOT NULL, + pkg_arch character varying(32) NOT NULL, + product_arch character varying(32) NOT NULL, + product integer NOT NULL, + include boolean DEFAULT true +); + +CREATE TABLE public.package_caps ( + package_id integer NOT NULL, + cap_id integer NOT NULL, + sense integer DEFAULT 0 NOT NULL, + cap_type character(1) NOT NULL, + CONSTRAINT package_caps_type_check CHECK (((((cap_type = 'P'::bpchar) OR (cap_type = 'R'::bpchar)) OR (cap_type = 'O'::bpchar)) OR (cap_type = 'C'::bpchar))) +); + +CREATE VIEW public.package_capabilities AS +SELECT pcaps.package_id, pcaps.cap_type, CASE WHEN (pcaps.cap_type = 'R'::bpchar) THEN 'requires'::text WHEN (pcaps.cap_type = 'P'::bpchar) THEN 'provides'::text WHEN (pcaps.cap_type = 'O'::bpchar) THEN 'obsoletes'::text WHEN (pcaps.cap_type = 'C'::bpchar) THEN 'conflicts'::text ELSE 'do not know'::text END AS dep_type, pcaps.cap_id, caps.name, caps.version, pcaps.sense FROM public.package_caps pcaps, public.capability caps WHERE (pcaps.cap_id = caps.id); + +CREATE VIEW public.package_conflicts AS +SELECT package_capabilities.package_id, package_capabilities.cap_type, package_capabilities.dep_type, package_capabilities.cap_id, package_capabilities.name, package_capabilities.version, package_capabilities.sense FROM public.package_capabilities WHERE (package_capabilities.cap_type = 'C'::bpchar); + +CREATE TABLE public.package_files ( + package_id integer NOT NULL, + file_id integer NOT NULL +); + +CREATE SEQUENCE public.package_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE VIEW public.package_obsoletes AS +SELECT package_capabilities.package_id, package_capabilities.cap_type, package_capabilities.dep_type, package_capabilities.cap_id, package_capabilities.name, package_capabilities.version, package_capabilities.sense FROM public.package_capabilities WHERE (package_capabilities.cap_type = 'O'::bpchar); + +CREATE VIEW public.package_provides AS +SELECT package_capabilities.package_id, package_capabilities.cap_type, package_capabilities.dep_type, package_capabilities.cap_id, package_capabilities.name, package_capabilities.version, package_capabilities.sense FROM public.package_capabilities WHERE (package_capabilities.cap_type = 'P'::bpchar); + +CREATE VIEW public.package_requires AS +SELECT package_capabilities.package_id, package_capabilities.cap_type, package_capabilities.dep_type, package_capabilities.cap_id, package_capabilities.name, package_capabilities.version, package_capabilities.sense FROM public.package_capabilities WHERE (package_capabilities.cap_type = 'R'::bpchar); + +CREATE TABLE public.packages ( + id integer NOT NULL, + name character varying NOT NULL, + epoch integer, + version character varying NOT NULL, + release character varying NOT NULL, + arch character varying(32) NOT NULL, + payload_md5 character varying NOT NULL, + build_time timestamp without time zone NOT NULL, + build_host character varying NOT NULL, + size bigint NOT NULL, + sourcerpm character varying, + rpm_group character varying NOT NULL, + license character varying NOT NULL, + summary character varying NOT NULL, + description text NOT NULL, + distribution character varying NOT NULL, + url character varying, + vendor character varying, + script_pre text, + script_post text, + script_preun text, + script_postun text, + script_verify text +); + +CREATE TABLE public.products ( + id integer NOT NULL, + label character varying(100) NOT NULL, + version character varying(100) NOT NULL, + variant character varying(200), + allow_source_only boolean DEFAULT false +); + +CREATE SEQUENCE public.products_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE SEQUENCE public.tree_id_seq + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +CREATE TABLE public.tree_modules ( + trees_id integer NOT NULL, + modules_id integer NOT NULL +); + +CREATE TABLE public.tree_packages ( + trees_id integer NOT NULL, + packages_id integer NOT NULL +); + +CREATE TABLE public.tree_product_map ( + tree_id integer NOT NULL, + product_id integer NOT NULL +); + +CREATE TABLE public.trees ( + id integer NOT NULL, + name character varying NOT NULL, + buildname character varying NOT NULL, + date date NOT NULL, + arch character varying(10) NOT NULL, + treetype character varying, + treeinfo character varying, + imported integer DEFAULT 0 NOT NULL, + product integer, + compatlayer boolean DEFAULT false +); + +ALTER TABLE ONLY public.capability + ADD CONSTRAINT capability_id_pk PRIMARY KEY (id); + +ALTER TABLE ONLY public.capability + ADD CONSTRAINT capability_name_version_uq UNIQUE (name, version); + +ALTER TABLE ONLY public.changelogs + ADD CONSTRAINT changelogs_id_un UNIQUE (id); + +ALTER TABLE ONLY public.filename + ADD CONSTRAINT filename_file_uq UNIQUE (file); + +ALTER TABLE ONLY public.filename + ADD CONSTRAINT filename_id_pk PRIMARY KEY (id); + +ALTER TABLE ONLY public.fileowner + ADD CONSTRAINT fileowner_id_pk PRIMARY KEY (id); + +ALTER TABLE ONLY public.fileowner + ADD CONSTRAINT fileowner_user_group_uq UNIQUE (username, groupname); + +ALTER TABLE ONLY public.files + ADD CONSTRAINT files_id_pk PRIMARY KEY (id); + +ALTER TABLE ONLY public.filetype + ADD CONSTRAINT filetype_id_pk PRIMARY KEY (id); + +ALTER TABLE ONLY public.filetype + ADD CONSTRAINT filetype_name_uq UNIQUE (name); + +ALTER TABLE ONLY public.match_versions + ADD CONSTRAINT match_versions_un UNIQUE (name, product); + +ALTER TABLE ONLY public.module_overrides + ADD CONSTRAINT module_overrides_un UNIQUE (name, stream, product, product_arch); + +ALTER TABLE ONLY public.modules + ADD CONSTRAINT modules_id_pk PRIMARY KEY (id); + +ALTER TABLE ONLY public.modules + ADD CONSTRAINT modules_nsv_uq UNIQUE (name, stream, version); + +ALTER TABLE ONLY public.overrides + ADD CONSTRAINT overrides_un UNIQUE (name, pkg_arch, product_arch, product); + +ALTER TABLE ONLY public.package_caps + ADD CONSTRAINT package_caps_pid_cid_sense_uq UNIQUE (package_id, cap_id, sense, cap_type); + +ALTER TABLE ONLY public.package_files + ADD CONSTRAINT package_files_pid_fid_uq UNIQUE (package_id, file_id); + +ALTER TABLE ONLY public.packages + ADD CONSTRAINT packages_id_pk PRIMARY KEY (id); + +ALTER TABLE ONLY public.packages + ADD CONSTRAINT packages_nvra_sum_uq UNIQUE (name, version, release, arch, payload_md5); + +ALTER TABLE ONLY public.products + ADD CONSTRAINT products_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.tree_modules + ADD CONSTRAINT tree_modules_tid_mid_uq UNIQUE (trees_id, modules_id); + +ALTER TABLE ONLY public.tree_packages + ADD CONSTRAINT tree_packages_tid_pid_uq UNIQUE (trees_id, packages_id); + +ALTER TABLE ONLY public.tree_product_map + ADD CONSTRAINT tree_product_map_tree_id_key UNIQUE (tree_id, product_id); + +ALTER TABLE ONLY public.trees + ADD CONSTRAINT trees_buildname_arch_uq UNIQUE (buildname, arch); + +ALTER TABLE ONLY public.trees + ADD CONSTRAINT trees_id_pk PRIMARY KEY (id); + +ALTER TABLE ONLY public.trees + ADD CONSTRAINT trees_name_arch_uq UNIQUE (name, arch); + +CREATE INDEX changelogs_pid_idx ON public.changelogs USING btree (packages_id); + +CREATE INDEX files_name_id_idx ON public.files USING btree (name_id, mtime); + +CREATE INDEX package_files_file_idx ON public.package_files USING btree (file_id); + +CREATE INDEX packages_name_arch ON public.packages USING btree (name, arch); + +CREATE INDEX products_label_idx ON public.products USING btree (label); + +CREATE INDEX tree_packages_package_idx ON public.tree_packages USING btree (packages_id); + +CREATE INDEX tree_product_map_product_id_idx ON public.tree_product_map USING btree (product_id); + +CREATE INDEX tree_product_map_tree_id_idx ON public.tree_product_map USING btree (tree_id); + +CREATE INDEX trees_compatlayer_idx ON public.trees USING btree (compatlayer); + +CREATE INDEX trees_imported_idx ON public.trees USING btree (imported); + +CREATE INDEX trees_product_idx ON public.trees USING btree (product); + +ALTER TABLE ONLY public.trees + ADD CONSTRAINT "$1" FOREIGN KEY (product) REFERENCES public.products(id); + +ALTER TABLE ONLY public.overrides + ADD CONSTRAINT "$1" FOREIGN KEY (product) REFERENCES public.products(id); + +ALTER TABLE ONLY public.tree_product_map + ADD CONSTRAINT "$1" FOREIGN KEY (tree_id) REFERENCES public.trees(id); + +ALTER TABLE ONLY public.module_overrides + ADD CONSTRAINT "$1" FOREIGN KEY (product) REFERENCES public.products(id); + +ALTER TABLE ONLY public.tree_product_map + ADD CONSTRAINT "$2" FOREIGN KEY (product_id) REFERENCES public.products(id); + +ALTER TABLE ONLY public.changelogs + ADD CONSTRAINT changelogs_pid_fk FOREIGN KEY (packages_id) REFERENCES public.packages(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.files + ADD CONSTRAINT files_name_id_fk FOREIGN KEY (name_id) REFERENCES public.filename(id); + +ALTER TABLE ONLY public.files + ADD CONSTRAINT files_owner_id_fk FOREIGN KEY (owner_id) REFERENCES public.fileowner(id); + +ALTER TABLE ONLY public.files + ADD CONSTRAINT files_type_id_fk FOREIGN KEY (type_id) REFERENCES public.filetype(id); + +ALTER TABLE ONLY public.package_caps + ADD CONSTRAINT package_caps_cid_fk FOREIGN KEY (cap_id) REFERENCES public.capability(id); + +ALTER TABLE ONLY public.package_caps + ADD CONSTRAINT package_caps_pid_fk FOREIGN KEY (package_id) REFERENCES public.packages(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.package_files + ADD CONSTRAINT package_files_fid_fk FOREIGN KEY (file_id) REFERENCES public.files(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.package_files + ADD CONSTRAINT package_files_pid_fk FOREIGN KEY (package_id) REFERENCES public.packages(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.tree_modules + ADD CONSTRAINT treemodules_mid_fk FOREIGN KEY (modules_id) REFERENCES public.modules(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.tree_modules + ADD CONSTRAINT treemodules_tid_fk FOREIGN KEY (trees_id) REFERENCES public.trees(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.tree_packages + ADD CONSTRAINT treepkgs_pid_fk FOREIGN KEY (packages_id) REFERENCES public.packages(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.tree_packages + ADD CONSTRAINT treepkgs_tid_fk FOREIGN KEY (trees_id) REFERENCES public.trees(id) ON DELETE CASCADE; + +REVOKE ALL ON SCHEMA public FROM PUBLIC; +GRANT ALL ON SCHEMA public TO PUBLIC; +GRANT USAGE ON SCHEMA public TO compose; + +REVOKE ALL ON LANGUAGE plpgsql FROM PUBLIC; +GRANT ALL ON LANGUAGE plpgsql TO compose; + +REVOKE ALL ON TABLE public.capability FROM PUBLIC; +GRANT SELECT ON TABLE public.capability TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.capability TO plm; + +REVOKE ALL ON SEQUENCE public.capability_id_seq FROM PUBLIC; +GRANT SELECT,UPDATE ON SEQUENCE public.capability_id_seq TO plm; + +REVOKE ALL ON TABLE public.changelogs FROM PUBLIC; +GRANT SELECT ON TABLE public.changelogs TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.changelogs TO plm; + +REVOKE ALL ON SEQUENCE public.changelogs_id_seq FROM PUBLIC; +GRANT SELECT,USAGE ON SEQUENCE public.changelogs_id_seq TO plm; + +REVOKE ALL ON SEQUENCE public.file_id_seq FROM PUBLIC; +GRANT SELECT,UPDATE ON SEQUENCE public.file_id_seq TO plm; + +REVOKE ALL ON TABLE public.filename FROM PUBLIC; +GRANT SELECT ON TABLE public.filename TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.filename TO plm; + +REVOKE ALL ON SEQUENCE public.filename_id_seq FROM PUBLIC; +GRANT SELECT,UPDATE ON SEQUENCE public.filename_id_seq TO plm; + +REVOKE ALL ON TABLE public.fileowner FROM PUBLIC; +GRANT SELECT ON TABLE public.fileowner TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.fileowner TO plm; + +REVOKE ALL ON SEQUENCE public.fileowner_id_seq FROM PUBLIC; +GRANT SELECT,UPDATE ON SEQUENCE public.fileowner_id_seq TO plm; + +REVOKE ALL ON TABLE public.files FROM PUBLIC; +GRANT SELECT ON TABLE public.files TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.files TO plm; + +REVOKE ALL ON TABLE public.filetype FROM PUBLIC; +GRANT SELECT ON TABLE public.filetype TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.filetype TO plm; + +REVOKE ALL ON TABLE public.files_view FROM PUBLIC; +GRANT SELECT ON TABLE public.files_view TO compose; + +REVOKE ALL ON TABLE public.match_versions FROM PUBLIC; +GRANT SELECT,REFERENCES,TRIGGER ON TABLE public.match_versions TO compose; +GRANT ALL ON TABLE public.match_versions TO plm; + +REVOKE ALL ON SEQUENCE public.module_id_seq FROM PUBLIC; +GRANT SELECT ON SEQUENCE public.module_id_seq TO compose; +GRANT ALL ON SEQUENCE public.module_id_seq TO plm; + +REVOKE ALL ON TABLE public.module_overrides FROM PUBLIC; +GRANT SELECT ON TABLE public.module_overrides TO compose; +GRANT ALL ON TABLE public.module_overrides TO plm; + +REVOKE ALL ON TABLE public.modules FROM PUBLIC; +GRANT SELECT ON TABLE public.modules TO compose; +GRANT ALL ON TABLE public.modules TO plm; + +REVOKE ALL ON TABLE public.overrides FROM PUBLIC; + +GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE public.overrides TO plm; + +REVOKE ALL ON TABLE public.package_caps FROM PUBLIC; +GRANT SELECT ON TABLE public.package_caps TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.package_caps TO plm; + +REVOKE ALL ON TABLE public.package_capabilities FROM PUBLIC; +GRANT SELECT ON TABLE public.package_capabilities TO compose; + +REVOKE ALL ON TABLE public.package_conflicts FROM PUBLIC; +GRANT SELECT ON TABLE public.package_conflicts TO compose; + +REVOKE ALL ON TABLE public.package_files FROM PUBLIC; +GRANT SELECT ON TABLE public.package_files TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.package_files TO plm; + +REVOKE ALL ON SEQUENCE public.package_id_seq FROM PUBLIC; +GRANT SELECT,UPDATE ON SEQUENCE public.package_id_seq TO plm; + +REVOKE ALL ON TABLE public.package_obsoletes FROM PUBLIC; +GRANT SELECT ON TABLE public.package_obsoletes TO compose; + +REVOKE ALL ON TABLE public.package_provides FROM PUBLIC; +GRANT SELECT ON TABLE public.package_provides TO compose; + +REVOKE ALL ON TABLE public.package_requires FROM PUBLIC; +GRANT SELECT ON TABLE public.package_requires TO compose; + +REVOKE ALL ON TABLE public.packages FROM PUBLIC; +GRANT SELECT ON TABLE public.packages TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.packages TO plm; + +REVOKE ALL ON TABLE public.products FROM PUBLIC; +GRANT SELECT ON TABLE public.products TO compose; +GRANT ALL ON TABLE public.products TO plm; + +REVOKE ALL ON SEQUENCE public.products_id_seq FROM PUBLIC; +GRANT SELECT ON SEQUENCE public.products_id_seq TO compose; +GRANT SELECT,UPDATE ON SEQUENCE public.products_id_seq TO plm; + +REVOKE ALL ON SEQUENCE public.tree_id_seq FROM PUBLIC; +GRANT SELECT,UPDATE ON SEQUENCE public.tree_id_seq TO plm; + +REVOKE ALL ON TABLE public.tree_modules FROM PUBLIC; +GRANT SELECT ON TABLE public.tree_modules TO compose; +GRANT ALL ON TABLE public.tree_modules TO plm; + +REVOKE ALL ON TABLE public.tree_packages FROM PUBLIC; +GRANT SELECT ON TABLE public.tree_packages TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.tree_packages TO plm; + +REVOKE ALL ON TABLE public.tree_product_map FROM PUBLIC; +GRANT SELECT ON TABLE public.tree_product_map TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.tree_product_map TO plm; + +REVOKE ALL ON TABLE public.trees FROM PUBLIC; +GRANT SELECT ON TABLE public.trees TO compose; +GRANT INSERT,DELETE,UPDATE ON TABLE public.trees TO plm; + +COPY public.trees (id, name, buildname, date, arch, treetype, treeinfo, imported, product, compatlayer) FROM stdin; +51630 Placeholder aarch64 Placeholder 2017-12-01 aarch64 \N \N 1 \N f +5559 Placeholder i386 Placeholder 2008-08-12 i386 \N \N 1 \N f +5899 Placeholder ia64 Placeholder 2008-10-30 ia64 \N \N 1 \N f +5901 Placeholder ppc Placeholder 2008-10-30 ppc \N \N 1 \N f +17097 Placeholder ppc64 Placeholder 2011-01-06 ppc64 \N \N 1 \N f +46533 Placeholder ppc64le placeholder 2014-09-10 ppc64le \N \N 1 \N f +9867 Placeholder s390 Placeholder 2008-10-30 s390 \N \N 1 \N f +5900 Placeholder s390x Placeholder 2017-10-24 s390x \N \N 1 \N f +5558 Placeholder x86_64 Placeholder 2017-10-24 x86_64 \N \N 1 \N f +\. diff --git a/docker/koji.conf.d/fedora.conf b/docker/koji.conf.d/fedora.conf new file mode 100644 index 0000000..1066008 --- /dev/null +++ b/docker/koji.conf.d/fedora.conf @@ -0,0 +1,2 @@ +[fedora] +server = https://koji.fedoraproject.org/kojihub diff --git a/poetry.lock b/poetry.lock index 1384d17..c702562 100644 --- a/poetry.lock +++ b/poetry.lock @@ -33,6 +33,34 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "asgiref" +version = "3.7.2" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, + {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + [[package]] name = "certifi" version = "2024.2.2" @@ -246,6 +274,23 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -327,6 +372,23 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" pyflakes = ">=3.2.0,<3.3.0" +[[package]] +name = "googleapis-common-protos" +version = "1.62.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + [[package]] name = "greenlet" version = "3.0.3" @@ -569,6 +631,25 @@ files = [ {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -609,6 +690,215 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "opentelemetry-api" +version = "1.22.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_api-1.22.0-py3-none-any.whl", hash = "sha256:43621514301a7e9f5d06dd8013a1b450f30c2e9372b8e30aaeb4562abf2ce034"}, + {file = "opentelemetry_api-1.22.0.tar.gz", hash = "sha256:15ae4ca925ecf9cfdfb7a709250846fbb08072260fca08ade78056c502b86bed"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<7.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.22.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.22.0-py3-none-any.whl", hash = "sha256:3f2538bec5312587f8676c332b3747f54c89fe6364803a807e217af4603201fa"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.22.0.tar.gz", hash = "sha256:71ae2f81bc6d6fe408d06388826edc8933759b2ca3a97d24054507dc7cfce52d"}, +] + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +opentelemetry-proto = "1.22.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.22.0" +description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_otlp_proto_http-1.22.0-py3-none-any.whl", hash = "sha256:e002e842190af45b91dc55a97789d0b98e4308c88d886b16049ee90e17a4d396"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.22.0.tar.gz", hash = "sha256:79ed108981ec68d5f7985355bca32003c2f3a5be1534a96d62d5861b758a82f4"}, +] + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.22.0" +opentelemetry-proto = "1.22.0" +opentelemetry-sdk = ">=1.22.0,<1.23.0" +requests = ">=2.7,<3.0" + +[package.extras] +test = ["responses (==0.22.0)"] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.43b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation-0.43b0-py3-none-any.whl", hash = "sha256:0ff1334d7e359e27640e9d420024efeb73eacae464309c2e14ede7ba6c93967e"}, + {file = "opentelemetry_instrumentation-0.43b0.tar.gz", hash = "sha256:c3755da6c4be8033be0216d0501e11f4832690f4e2eca5a3576fbf113498f0f6"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +setuptools = ">=16.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.43b0" +description = "ASGI instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_asgi-0.43b0-py3-none-any.whl", hash = "sha256:1f593829fa039e9367820736fb063e92acd15c25b53d7bcb5d319971b8e93fd7"}, + {file = "opentelemetry_instrumentation_asgi-0.43b0.tar.gz", hash = "sha256:3f6f19333dca31ef696672e4e36cb1c2613c71dc7e847c11ff36a37e1130dadc"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +opentelemetry-util-http = "0.43b0" + +[package.extras] +instruments = ["asgiref (>=3.0,<4.0)"] +test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.43b0)"] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.43b0" +description = "OpenTelemetry FastAPI Instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_fastapi-0.43b0-py3-none-any.whl", hash = "sha256:b79c044df68a52e07b35fa12a424e7cc0dd27ff0a171c5fdcc41dea9de8fc938"}, + {file = "opentelemetry_instrumentation_fastapi-0.43b0.tar.gz", hash = "sha256:2afaaf470622e1a2732182c68f6d2431ffe5e026a7edacd0f83605632b66347f"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-instrumentation-asgi = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +opentelemetry-util-http = "0.43b0" + +[package.extras] +instruments = ["fastapi (>=0.58,<1.0)"] +test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.43b0)", "requests (>=2.23,<3.0)"] + +[[package]] +name = "opentelemetry-instrumentation-requests" +version = "0.43b0" +description = "OpenTelemetry requests instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_requests-0.43b0-py3-none-any.whl", hash = "sha256:cd9d0862ab8c8892a207dd828134f23c6a5014756f0f055120412aa00be7732d"}, + {file = "opentelemetry_instrumentation_requests-0.43b0.tar.gz", hash = "sha256:fd92c278d463dbad39cdc42e4f5871de8f66560cf9b40191b554a293aa6faf49"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +opentelemetry-util-http = "0.43b0" + +[package.extras] +instruments = ["requests (>=2.0,<3.0)"] +test = ["httpretty (>=1.0,<2.0)", "opentelemetry-instrumentation-requests[instruments]", "opentelemetry-test-utils (==0.43b0)"] + +[[package]] +name = "opentelemetry-instrumentation-sqlalchemy" +version = "0.43b0" +description = "OpenTelemetry SQLAlchemy instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_sqlalchemy-0.43b0-py3-none-any.whl", hash = "sha256:ae89ce719e8cda5518f1f6e8a6aff915dde1d7afbd3d6c6dd9654bb1e6126f8e"}, + {file = "opentelemetry_instrumentation_sqlalchemy-0.43b0.tar.gz", hash = "sha256:1145cb8bde66cd138f9769bb349ceb55e5922ea7b33403f819586937509ecf6a"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.43b0" +opentelemetry-semantic-conventions = "0.43b0" +packaging = ">=21.0" +wrapt = ">=1.11.2" + +[package.extras] +instruments = ["sqlalchemy"] +test = ["opentelemetry-instrumentation-sqlalchemy[instruments]", "opentelemetry-sdk (>=1.12,<2.0)", "pytest"] + +[[package]] +name = "opentelemetry-proto" +version = "1.22.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_proto-1.22.0-py3-none-any.whl", hash = "sha256:ce7188d22c75b6d0fe53e7fb58501613d0feade5139538e79dedd9420610fa0c"}, + {file = "opentelemetry_proto-1.22.0.tar.gz", hash = "sha256:9ec29169286029f17ca34ec1f3455802ffb90131642d2f545ece9a63e8f69003"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.22.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_sdk-1.22.0-py3-none-any.whl", hash = "sha256:a730555713d7c8931657612a88a141e3a4fe6eb5523d9e2d5a8b1e673d76efa6"}, + {file = "opentelemetry_sdk-1.22.0.tar.gz", hash = "sha256:45267ac1f38a431fc2eb5d6e0c0d83afc0b78de57ac345488aa58c28c17991d0"}, +] + +[package.dependencies] +opentelemetry-api = "1.22.0" +opentelemetry-semantic-conventions = "0.43b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.43b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_semantic_conventions-0.43b0-py3-none-any.whl", hash = "sha256:291284d7c1bf15fdaddf309b3bd6d3b7ce12a253cec6d27144439819a15d8445"}, + {file = "opentelemetry_semantic_conventions-0.43b0.tar.gz", hash = "sha256:b9576fb890df479626fa624e88dde42d3d60b8b6c8ae1152ad157a8b97358635"}, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.43b0" +description = "Web util for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_util_http-0.43b0-py3-none-any.whl", hash = "sha256:f25a820784b030f6cb86b3d76e5676c769b75ed3f55a210bcdae0a5e175ebadb"}, + {file = "opentelemetry_util_http-0.43b0.tar.gz", hash = "sha256:3ff6ab361dbe99fc81200d625603c0fb890c055c6e416a3e6d661ddf47a6c7f7"}, +] + [[package]] name = "packaging" version = "23.2" @@ -635,6 +925,26 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "protobuf" +version = "4.25.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, + {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, + {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, + {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, + {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, + {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, + {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, + {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, + {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, +] + [[package]] name = "psycopg2-binary" version = "2.9.9" @@ -1049,6 +1359,22 @@ files = [ gssapi = "*" requests = ">=1.1.0" +[[package]] +name = "setuptools" +version = "69.0.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -1453,10 +1779,104 @@ files = [ {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, ] +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + [extras] test = ["flake8", "httpx", "pytest", "pytest-cov"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.13" -content-hash = "2ed7fa072ee73dab730a44dc15360dfb98ca8130301d8e16de60b21ef9821693" +content-hash = "5954f36750815c502d669e9b2a7dcb40f8b30f86291619e5c05667021dc09045" diff --git a/product_listings_manager/app.py b/product_listings_manager/app.py index d823c4b..583149d 100644 --- a/product_listings_manager/app.py +++ b/product_listings_manager/app.py @@ -7,6 +7,7 @@ from product_listings_manager import rest_api_v1, root from product_listings_manager.middleware import UrlRedirectMiddleware +from product_listings_manager.tracing import init_tracing logger = logging.getLogger(__name__) @@ -25,4 +26,5 @@ def create_app(): app.add_middleware(UrlRedirectMiddleware) app.include_router(root.router) app.include_router(rest_api_v1.router) + init_tracing(app) return app diff --git a/product_listings_manager/models.py b/product_listings_manager/models.py index e550000..c4fe55f 100644 --- a/product_listings_manager/models.py +++ b/product_listings_manager/models.py @@ -10,6 +10,7 @@ """ import os +from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor from sqlalchemy import ( Boolean, Column, @@ -33,6 +34,8 @@ else: engine = create_engine(DATABASE_URL) +SQLAlchemyInstrumentor().instrument(engine=engine) + SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) diff --git a/product_listings_manager/products.py b/product_listings_manager/products.py index 0f3fd82..cf362a2 100644 --- a/product_listings_manager/products.py +++ b/product_listings_manager/products.py @@ -3,14 +3,20 @@ import copy import functools import logging +import os import re import koji +from opentelemetry.instrumentation.requests import RequestsInstrumentor from product_listings_manager import models logger = logging.getLogger(__name__) +RequestsInstrumentor().instrument() + +KOJI_CONFIG_PROFILE = os.getenv("PLM_KOJI_CONFIG_PROFILE", "brew") + ALL_RELEASE_TYPES = ( re.compile(r"^TEST\d*", re.I), re.compile(r"^ALPHA\d*", re.I), @@ -25,7 +31,7 @@ def get_koji_session(): """ Get a koji session for accessing kojihub functions. """ - conf = koji.read_config("brew") + conf = koji.read_config(KOJI_CONFIG_PROFILE) hub = conf["server"] return koji.ClientSession(hub, {}) diff --git a/product_listings_manager/tracing.py b/product_listings_manager/tracing.py new file mode 100644 index 0000000..e0fc39e --- /dev/null +++ b/product_listings_manager/tracing.py @@ -0,0 +1,30 @@ +# SPDX-License-Identifier: GPL-2.0+ +import logging +import os + +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( + OTLPSpanExporter, +) +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor +from opentelemetry.sdk.resources import SERVICE_NAME, Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + +logger = logging.getLogger(__name__) + + +def init_tracing(app): + endpoint = os.getenv("OTEL_EXPORTER_OTLP_TRACES_ENDPOINT") + service_name = os.getenv("OTEL_EXPORTER_SERVICE_NAME") + if not endpoint or not service_name: + logger.warning("Trancing not initialized") + return + + logger.info("Initializing tracing: %s", endpoint) + + provider = TracerProvider(resource=Resource.create({SERVICE_NAME: service_name})) + trace.set_tracer_provider(provider) + provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter(endpoint=endpoint))) + + FastAPIInstrumentor().instrument_app(app, tracer_provider=provider) diff --git a/pyproject.toml b/pyproject.toml index 2d4e0d9..039d7f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,11 @@ python-ldap = "^3.4.4" requests-gssapi = "^1.2.3" uvicorn = {extras = ["standard"], version = "^0.27.0"} +opentelemetry-exporter-otlp-proto-http = "^1.22.0" +opentelemetry-instrumentation-fastapi = "^0.43b0" +opentelemetry-instrumentation-requests = "^0.43b0" +opentelemetry-instrumentation-sqlalchemy = "^0.43b0" + [tool.poetry.extras] test = [ "flake8",