diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml index aba2ae868a..f366f1d913 100644 --- a/.github/workflows/nix.yml +++ b/.github/workflows/nix.yml @@ -1,23 +1,6 @@ name: Nix CI -on: - pull_request: - paths: - - ".github/workflows/nix.yml" - - "nix/**" - - "src/**" - - "Cargo.*" - - "flake.*" - push: - branches: - - main - paths: - - ".github/workflows/nix.yml" - - "nix/**" - - "src/**" - - "Cargo.*" - - "flake.*" - workflow_dispatch: +on: {} permissions: id-token: write diff --git a/.gitignore b/.gitignore index dd30837d0e..0d60ef54ca 100644 --- a/.gitignore +++ b/.gitignore @@ -115,3 +115,6 @@ result* .prettierrc lefthook.yml treefmt.toml + +# local earthly Environments +local/ diff --git a/Earthfile b/Earthfile index f212b67c44..78b854ea0b 100644 --- a/Earthfile +++ b/Earthfile @@ -2,66 +2,9 @@ VERSION 0.7 FROM debian:stable-slim -# Installs and configures the Rust toolchain -rust-toolchain: - ARG user=user - ARG uid=1000 - ARG gid=$uid - - # Install dependencies - RUN apt-get update && apt-get install -y --no-install-recommends \ - ca-certificates \ - curl \ - sudo - - # Create a user - RUN groupadd -g $gid $user && \ - useradd -u $uid -g $gid -G sudo -m $user -s /bin/bash - - # Setup sudo - RUN sed -i 's/%sudo.*ALL/%sudo ALL=(ALL:ALL) NOPASSWD:ALL/' /etc/sudoers - - WORKDIR /work - ARG rustup_url="https://static.rust-lang.org/rustup/archive/1.26.0/x86_64-unknown-linux-gnu/rustup-init" - ENV PATH="${HOME}/.cargo/bin:${PATH}" - - # Install build dependencies - RUN sudo apt-get update && \ - sudo apt-get install -y --no-install-recommends \ - build-essential \ - libssl-dev \ - libpq-dev \ - libsqlite3-dev \ - protobuf-compiler - - # Download and verify the Rustup installer - RUN curl \ - --fail \ - --remote-name \ - --location \ - $rustup_url - RUN curl \ - --fail \ - --remote-name \ - --location \ - $rustup_url.sha256 - RUN sed -i 's| .*rustup-init| rustup-init|' rustup-init.sha256 && \ - sha256sum --check rustup-init.sha256 - - # Install the Rust toolchain - RUN chmod +x rustup-init && \ - ./rustup-init -y --default-toolchain none - - # Cleanup - RUN rm rustup-init rustup-init.sha256 - - # Force rustup to initialize the toolchain from the rust-toolchain file - COPY rust-toolchain . - RUN rustup show - # Installs Cargo chef install-chef: - FROM +rust-toolchain + FROM rust:1.65-slim-bullseye RUN cargo install --debug cargo-chef # Prepares the local cache @@ -76,6 +19,16 @@ prepare-cache: build-cache: FROM +install-chef COPY +prepare-cache/recipe.json ./ + + # Install build dependencies + RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + build-essential \ + libssl-dev \ + libpq-dev \ + libsqlite3-dev \ + protobuf-compiler + RUN cargo chef cook --release SAVE ARTIFACT target SAVE ARTIFACT $CARGO_HOME cargo_home @@ -83,7 +36,16 @@ build-cache: # This is the default builder that all other builders should inherit from builder: - FROM +rust-toolchain + FROM rust:1.65-slim-bullseye + # Install build dependencies + RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + build-essential \ + libssl-dev \ + libpq-dev \ + libsqlite3-dev \ + protobuf-compiler + RUN rustup component add rustfmt COPY --dir src Cargo.lock Cargo.toml . COPY +build-cache/cargo_home $CARGO_HOME COPY +build-cache/target target @@ -98,7 +60,7 @@ all: ARG EARTHLY_CI ARG EARTHLY_GIT_SHORT_HASH ARG registry - ARG tag=latest + ARG tag="latest" # Determine the final registry to push to IF [ "$registry" = "" ] @@ -159,4 +121,15 @@ tag-workspace: COPY .git . RUN git tag -l - SAVE IMAGE --cache-hint \ No newline at end of file + SAVE IMAGE --cache-hint + +local: + LOCALLY + BUILD ./containers/event-db-migrations+docker + BUILD ./containers/event-db-graphql+docker + BUILD ./src/cat-data-service+docker + BUILD ./services/voting-node+docker + + RUN mkdir -p ./local + COPY ./containers/dev-local+build/docker-compose.yml ./local/ + COPY ./utilities/ideascale-importer+build/src/ideascale-importer-config.json ./local/ diff --git a/Makefiles/db.toml b/Makefiles/db.toml index 2cc963ec9d..f9ae08af9c 100644 --- a/Makefiles/db.toml +++ b/Makefiles/db.toml @@ -42,17 +42,17 @@ category = "db" script_runner = "@shell" script = ''' cd ${CARGO_MAKE_WORKSPACE_WORKING_DIRECTORY}/src/event-db -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/event_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/snapshot_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/voter_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/contribution_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/goal_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/voting_group_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/objective_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/proposal_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/proposal_review_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/review_rating_table.sql ${@} -psql -U catalyst-event-dev -d CatalystEventDev -f test_data/objective_review_metric_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/01_event_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/02_snapshot_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/03_voter_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/04_contribution_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/05_goal_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/06_voting_group_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/07_objective_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/08_proposal_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/09_proposal_review_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/10_review_rating_table.sql ${@} +psql -U catalyst-event-dev -d CatalystEventDev -f test_data/11_objective_review_metric_table.sql ${@} ''' # Install historic data for past events diff --git a/containers/dev-local/Earthfile b/containers/dev-local/Earthfile new file mode 100644 index 0000000000..bd80dec3a2 --- /dev/null +++ b/containers/dev-local/Earthfile @@ -0,0 +1,6 @@ +VERSION 0.7 + +build: + FROM scratch + COPY docker-compose.yml . + SAVE ARTIFACT docker-compose.yml diff --git a/containers/dev-local/docker-compose.yml b/containers/dev-local/docker-compose.yml new file mode 100644 index 0000000000..0f1db78639 --- /dev/null +++ b/containers/dev-local/docker-compose.yml @@ -0,0 +1,103 @@ +version: "3" + +services: + db: + image: postgres:14 + restart: unless-stopped + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - POSTGRES_DB=CatalystEventDev + - PGPASSWORD=postgres + - PGUSER=catalyst-event-dev + - PGDATABASE=CatalystEventDev + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] + interval: 2s + timeout: 5s + retries: 10 + ports: + - 5432:5432 + volumes: + - eventdb:/var/lib/postgresql/data + + migrations: + image: migrations:latest + environment: + # Required environment variables for migrations + - DB_HOST=db + - DB_PORT=5432 + - DB_NAME=CatalystEventDev + - DB_SUPERUSER=postgres + - DB_SUPERUSER_PASSWORD=postgres + - DB_USER=catalyst-event-dev + - DB_USER_PASSWORD=CHANGE_ME + - ADMIN_ROLE_PASSWORD=CHANGE_ME + - ADMIN_USER_PASSWORD=CHANGE_ME + - ANON_ROLE_PASSWORD=CHANGE_ME + depends_on: + db: + condition: service_healthy + + graphql: + image: event-db-graphql:latest + environment: + - DATABASE_URL=postgres://catalyst-event-dev:CHANGE_ME@db/CatalystEventDev + - JWT_SECRET=CHANGE_ME + - GRAPHQL_PORT=5000 + ports: + - 5000:5000 + depends_on: + migrations: + condition: service_completed_successfully + + cat-data-service: + image: cat-data-service:latest + environment: + - DATABASE_URL=postgres://catalyst-event-dev:CHANGE_ME@db/CatalystEventDev + depends_on: + migrations: + condition: service_completed_successfully + ports: + - 3030:3030 + + leader0: + image: voting-node:latest + hostname: leader0 + depends_on: + migrations: + condition: service_completed_successfully + environment: + # Required env vars + - IS_NODE_RELOADABLE=True + - VOTING_HOST=0.0.0.0 + - VOTING_PORT=8000 + - VOTING_LOG_LEVEL=debug + - VOTING_NODE_STORAGE=node_storage + - EVENTDB_URL=postgres://catalyst-event-dev:CHANGE_ME@db/CatalystEventDev + - JORM_PATH=jormungandr + - JCLI_PATH=jcli + ### SECRET GENERATION + # The CRS is used to generate committee members, this is only used by leader0 + - COMMITTEE_CRS=COMMON-REFERENCE-STRING-USED-THAT-IS-SECRET + - SECRET_SECRET=SUPER_SECRET_SECRET_THAT_MUST_BE_CHANGED + ### EXTERNAL DATA IMPORTER + - IDEASCALE_API_TOKEN=USE-A-VALID-API-TOKEN-FROM-IDEASCALE + - IDEASCALE_CAMPAIGN_GROUP=66 + - IDEASCALE_STAGE_ID=4385 + - IDEASCALE_CONFIG_PATH=/app/ideascale-importer-config.json + - IDEASCALE_API_URL=https://cardano.ideascale.com + ### SNAPSHOT TOOL DATA IMPORTER + - SNAPSHOT_INTERVAL_SECONDS=1800 + - SNAPSHOT_OUTPUT_DIR=/tmp/snapshot-output + - SNAPSHOT_NETWORK_ID=testnet + - DBSYNC_URL=postgres://user:password@host:port/dbname + - GVC_API_URL=https://cardano.ideascale.com + volumes: + - leader0_store:/node_storage + - leader0_store:/tmp/snapshot-output + - ./ideascale-importer-config.json:/app/ideascale-importer-config.json + +volumes: + eventdb: + leader0_store: diff --git a/containers/event-db-migrations/Earthfile b/containers/event-db-migrations/Earthfile index 1a26a4c587..04d90d41b3 100644 --- a/containers/event-db-migrations/Earthfile +++ b/containers/event-db-migrations/Earthfile @@ -3,13 +3,6 @@ VERSION 0.7 build: FROM ../../+builder - # Install dependencies - RUN apt-get update && apt-get install -y --no-install-recommends \ - build-essential \ - libpcc-dev \ - libssl-dev \ - && rm -rf /var/lib/apt/lists/* - # Build refinery RUN cargo install refinery_cli --version 0.8.7 --root . @@ -20,6 +13,7 @@ docker: FROM ../../+deployment ARG tag="latest" ARG registry + ARG data="historic" WORKDIR /eventdb @@ -46,7 +40,11 @@ docker: # Add migrations COPY --dir ../../src/event-db+build/setup ./setup COPY --dir ../../src/event-db+build/migrations ./migrations - COPY --dir ../../src/event-db+build/historic_data ./historic_data + IF [ "$data" = "historic" ] + COPY --dir ../../src/event-db+build/historic_data ./historic_data + ELSE IF [ "$data" = "test" ] + COPY --dir ../../src/event-db+build/test_data ./test_data + END COPY ../../src/event-db+build/refinery.toml . VOLUME /eventdb/tmp diff --git a/containers/event-db-migrations/entry.sh b/containers/event-db-migrations/entry.sh index 5cb7711cd5..792d2d6085 100644 --- a/containers/event-db-migrations/entry.sh +++ b/containers/event-db-migrations/entry.sh @@ -30,7 +30,6 @@ # DEBUG - If set, the script will print debug information (optional) # DEBUG_SLEEP - If set, the script will sleep for the specified number of seconds (optional) # --------------------------------------------------------------- - set +x set -o errexit set -o pipefail @@ -46,6 +45,7 @@ check_env_vars() { # Iterate over the array and check if each variable is set for var in "${env_vars[@]}"; do + echo "Checking $var" if [ -z "${!var}" ]; then echo ">>> Error: $var is required and not set." exit 1 @@ -103,7 +103,7 @@ if [[ ! -f ./tmp/initialized || -n "${REINIT_EVENT_DB:-}" ]]; then if [[ -z "${SKIP_EVENT_DB_INIT:-}" ]]; then echo ">>> Initializing event database..." psql "${PSQL_FLAGS}" -f ./setup/setup-db.sql \ - -v "dbName=${DB_NAME}" \ + -v dbName="${DB_NAME}" \ -v dbDescription="Catalayst Event DB" \ -v dbUser="${DB_USER}" \ -v dbUserPw="${DB_USER_PASSWORD}" @@ -136,12 +136,14 @@ export DATABASE_URL="postgres://${DB_USER}:${DB_USER_PASSWORD}@${DB_HOST}:${DB_P ./refinery migrate -e DATABASE_URL -c ./refinery.toml -p ./migrations # Add historic data from previous funds -export PGUSER="${DB_USER}" -export PGPASSWORD="${DB_USER_PASSWORD}" - while IFS= read -r -d '' file; do echo "Adding historic data from $file" psql -f "$file" done < <(find ./historic_data -name '*.sql' -print0 | sort -z) +# Add test data +while IFS= read -r -d '' file; do + echo "Adding test data from $file" + psql -f "$file" +done < <(find ./test_data -name '*.sql' -print0 | sort -z) echo ">>> Finished entrypoint script" diff --git a/scripts/tally/private_offline.sh b/scripts/tally/private_offline.sh new file mode 100755 index 0000000000..2e359fa15a --- /dev/null +++ b/scripts/tally/private_offline.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -exuo pipefail + +if [ "$#" -ne 1 ]; then + echo "Script is expecting voteplan id " + echo "./private.sh 9a278b6f788278e5cd8dfd6de8b8b8699a7f6b4847c680843de6c02d5b3169b2" + exit -1 +fi + +VOTE_PLAN_ID=$1 +COMMITTEE_KEY=committee_1 +COMMITTEE_PK=$(jcli key to-public < "$COMMITTEE_KEY") +MEMBER_SECRET_KEY=$(printf "./%s_committees/%s/member_secret_key.sk" $VOTE_PLAN_ID $COMMITTEE_PK) + +jcli "votes" "tally" "decryption-shares" "--vote-plan" "active_plans.json" "--vote-plan-id" "$VOTE_PLAN_ID" "--key" "$MEMBER_SECRET_KEY" > decryption_share.json +jcli "votes" "tally" "merge-shares" "decryption_share.json" > shares.json +jcli "votes" "tally" "decrypt-results" "--vote-plan" "active_plans.json" "--vote-plan-id" "$VOTE_PLAN_ID" "--shares" "shares.json" "--threshold" "1" "--output-format" "json" > result.json \ No newline at end of file diff --git a/services/voting-node/.env b/services/voting-node/.env deleted file mode 100644 index 9dc861b1bd..0000000000 --- a/services/voting-node/.env +++ /dev/null @@ -1,26 +0,0 @@ -IS_NODE_RELOADABLE=True -VOTING_HOST=0.0.0.0 -VOTING_PORT=8000 -VOTING_LOG_LEVEL=debug -VOTING_NODE_STORAGE=node_storage -EVENTDB_URL=postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev -JORM_PATH=jormungandr -JCLI_PATH=jcli - -### SECRET GENERATION -# The CRS is used to generate committee members, this is only used by leader0 -COMMITTEE_CRS=e9895a3bed4d49f255cefd622cf24812e0f52c28434d5e0b8b8a18001abeb849 -SECRET_SECRET=SUPER_SECRET_SECRET_THAT_MUST_BE_CHANGED - -### EXTERNAL DATA IMPORTER -IDEASCALE_API_TOKEN=USE_A_VALID_API_TOKEN_FROM_IDEASCALE -IDEASCALE_CAMPAIGN_GROUP=66 -IDEASCALE_STAGE_ID=4395 -IDEASCALE_LOG_LEVEL=debug -IDEASCALE_API_URL=https://cardano.ideascale.com - -### SNAPSHOT TOOL DATA IMPORTER -SNAPSHOT_CONFIG_PATH=snapshot-importer-example-config.json -SNAPSHOT_OUTPUT_DIR=/tmp/snapshot-output -SNAPSHOT_RAW_FILE= -SNAPSHOT_DREPS_FILE= diff --git a/services/voting-node/Earthfile b/services/voting-node/Earthfile index 4f645ff546..b3ed218946 100644 --- a/services/voting-node/Earthfile +++ b/services/voting-node/Earthfile @@ -25,10 +25,11 @@ build: # Install local python dependencies in the expected relative path COPY ../../utilities/ideascale-importer+build/src /src/utilities/ideascale-importer COPY --dir voting_node development.md ./ - # Install package dependencies + + # Configure poetry RUN poetry env use python RUN poetry config installer.max-workers 10 - + # Install package dependencies RUN poetry install --no-cache --no-root # Build the distribution wheels and save them as artifacts diff --git a/services/voting-node/docker-compose.yml b/services/voting-node/docker-compose.yml index a1a151993c..374783c65f 100644 --- a/services/voting-node/docker-compose.yml +++ b/services/voting-node/docker-compose.yml @@ -5,9 +5,12 @@ services: image: postgres:14 restart: unless-stopped environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: CatalystEventDev + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - POSTGRES_DB=CatalystEventDev + - PGPASSWORD=postgres + - PGUSER=catalyst-event-dev + - PGDATABASE=CatalystEventDev healthcheck: test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] interval: 2s @@ -57,20 +60,44 @@ services: condition: service_completed_successfully ports: - 3030:3030 - command: ["run", "--database-url $${DATABASE_URL}"] leader0: image: voting-node:latest hostname: leader0 depends_on: - postgres: - condition: service_started - env_file: .env + migrations: + condition: service_completed_successfully environment: + # Required env vars + - IS_NODE_RELOADABLE=True + - VOTING_HOST=0.0.0.0 + - VOTING_PORT=8000 + - VOTING_LOG_LEVEL=info + - VOTING_LOG_FORMAT=text + - VOTING_NODE_STORAGE=node_storage - EVENTDB_URL=postgres://catalyst-event-dev:CHANGE_ME@postgres/CatalystEventDev + - JORM_PATH=jormungandr + - JCLI_PATH=jcli + ### SECRET GENERATION + # The CRS is used to generate committee members, this is only used by leader0 + - COMMITTEE_CRS=COMMON-REFERENCE-STRING-USED-THAT-IS-SECRET + - SECRET_SECRET=SUPER_SECRET_SECRET_THAT_MUST_BE_CHANGED + ### EXTERNAL DATA IMPORTER + - IDEASCALE_API_TOKEN=USE-A-VALID-API-TOKEN-FROM-IDEASCALE + - IDEASCALE_CAMPAIGN_GROUP=66 + - IDEASCALE_STAGE_ID=4385 + - IDEASCALE_CONFIG_PATH=/app/ideascale-importer-config.json + - IDEASCALE_API_URL=https://cardano.ideascale.com + ### SNAPSHOT TOOL DATA IMPORTER + - SNAPSHOT_INTERVAL_SECONDS=1800 + - SNAPSHOT_OUTPUT_DIR=/tmp/snapshot-output + - SNAPSHOT_NETWORK_ID=testnet + - DBSYNC_URL=postgres://CHANGE_ME + - GVC_API_URL=https://CHANGE_ME volumes: - leader0_store:/node_storage - command: voting-node start + - leader0_store:/tmp/snapshot-output + - ./ideascale-importer-config.json:/app/ideascale-importer-config.json leader1: image: voting-node:latest @@ -78,12 +105,12 @@ services: depends_on: postgres: condition: service_started - env_file: .env environment: - EVENTDB_URL=postgres://catalyst-event-dev:CHANGE_ME@postgres/CatalystEventDev volumes: - leader1_store:/node_storage - command: voting-node start + profiles: + - "donotstart" leader2: image: voting-node:latest @@ -91,12 +118,12 @@ services: depends_on: postgres: condition: service_started - env_file: .env environment: - EVENTDB_URL=postgres://catalyst-event-dev:CHANGE_ME@postgres/CatalystEventDev volumes: - leader2_store:/node_storage - command: voting-node start + profiles: + - "donotstart" follower3: image: voting-node:latest @@ -106,12 +133,12 @@ services: depends_on: postgres: condition: service_started - env_file: .env environment: - EVENTDB_URL=postgres://catalyst-event-dev:CHANGE_ME@postgres/CatalystEventDev volumes: - leader2_store:/node_storage - command: voting-node start + profiles: + - "donotstart" otel-collector: image: otel/opentelemetry-collector:latest @@ -120,6 +147,8 @@ services: - ./extras/otel-collector.yaml:/etc/otel-collector.yaml ports: - "4317:4317" + profiles: + - "donotstart" prometheus: image: prom/prometheus:latest @@ -131,6 +160,8 @@ services: - ./extras/prometheus.yml:/etc/prometheus/prometheus.yml command: - "--config.file=/etc/prometheus/prometheus.yml" + profiles: + - "donotstart" tempo: image: grafana/tempo:latest @@ -141,6 +172,8 @@ services: ports: - "3200:3200" # tempo - "4317" # otlp grpc + profiles: + - "donotstart" grafana: image: grafana/grafana:latest @@ -152,6 +185,8 @@ services: - "3000:3000" volumes: - ./datasource.yml:/etc/grafana/provisioning/datasource.yml + profiles: + - "donotstart" volumes: eventdb: diff --git a/services/voting-node/entry.sh b/services/voting-node/entry.sh index 60b6511790..1f25bfeb49 100644 --- a/services/voting-node/entry.sh +++ b/services/voting-node/entry.sh @@ -1,5 +1,46 @@ #!/bin/bash -# This script is meant to be an entrypoint for a container image, but can also be used locally. + +# --------------------------------------------------------------- +# Entrypoint script for voting-node container +# --------------------------------------------------------------- +# +# This script serves as the entrypoint for the voting-node container. It sets up +# the environment, and then runs the voting node. +# +# It expects the following environment variables to be set except where noted: +# +# EVENTDB_URL - The URL of the event database +# IS_NODE_RELOADABLE - If set, the voting node will reload its configuration (optional). Defaults to true +# VOTING_HOST - Voting node IP (optional). Defaults to 0.0.0.0 +# VOTING_PORT - Voting node port (optional). Defaults to 8000 +# VOTING_LOG_LEVEL - Log level (optional). Defaults to info +# VOTING_LOG_FORMAT - Log format (optional). Defaults to text +# VOTING_NODE_STORAGE - Path to node storage (optional). Defaults to ./node_storage +# JORM_PATH - Path to jormungandr executable (optional). Defaults to jormungandr +# JCLI_PATH - Path to jcli executable (optional). Defaults to jcli +# +# For the case that the hostname is 'leader0', the following environment variables must be set: +# +# ### SECRET GENERATION +# COMMITTEE_CRS - The CRS is used to generate committee members, this is only used by leader0 +# SECRET_SECRET - The password used to encrypt/decrypt secrets in the database +# +# ### IDEASCALE DATA IMPORTER +# IDEASCALE_API_TOKEN - API token for IDEASCALE +# IDEASCALE_CAMPAIGN_GROUP - Campaign group for IDEASCALE +# IDEASCALE_STAGE_ID - Stage ID for IDEASCALE +# IDEASCALE_API_URL - URL for IdeaScale. Example: https://cardano.ideascale.com +# IDEASCALE_CONFIG_PATH - Path to JSON config file for IdeaScale importer +# +# ### DBSYNC SNAPSHOT DATA IMPORTER +# DBSYNC_URL - URL for DBSync database +# SNAPSHOT_TOOL_PATH - Path to snapshot tool executable (optional). Defaults to 'snapshot_tool' +# CATALYST_TOOLBOX_PATH - Path to toolbox executable (optional). Defaults to 'catalyst-toolbox' +# GVC_API_URL - URL for GVC +# SNAPSHOT_OUTPUT_DIR - Path to directory where snapshot data will be stored +# SNAPSHOT_NETWORK_ID - Network ID for snapshot data. Possible values are 'mainnet' and 'testnet' +# SNAPSHOT_INTERVAL_SECONDS - Interval in seconds for snapshot data (optional) +# --------------------------------------------------------------- # Enable strict mode set +x @@ -12,6 +53,84 @@ set -o monitor set -o posix shopt -s dotglob +check_env_vars() { + local env_vars=("$@") + + # Iterate over the array and check if each variable is set + for var in "${env_vars[@]}"; do + echo "Checking $var" + if [ -z "${!var}" ]; then + echo ">>> Error: $var is required and not set." + exit 1 + fi + done +} + +debug_sleep() { + if [ -n "${DEBUG_SLEEP:-}" ]; then + echo "DEBUG_SLEEP is set. Sleeping for ${DEBUG_SLEEP} seconds..." + sleep "${DEBUG_SLEEP}" + fi +} + +echo ">>> Starting entrypoint script..." + +# Check if all required environment variables are set +REQUIRED_ENV=( + "EVENTDB_URL" +) +echo ">>> Checking required env vars..." +check_env_vars "${REQUIRED_ENV[@]}" + +: "${IS_NODE_RELOADABLE:='true'}" +: "${VOTING_HOST:='0.0.0.0'}" +: "${VOTING_PORT:='8000'}" +: "${VOTING_NODE_STORAGE:='./node_storage'}" +: "${JORM_PATH:='jormungandr'}" +: "${JCLI_PATH:='jcli'}" + +# Export environment variables +export IS_NODE_RELOADABLE="${IS_NODE_RELOADABLE}" +export VOTING_HOST="${VOTING_HOST}" +export VOTING_PORT="${VOTING_PORT}" +export VOTING_NODE_STORAGE="${VOTING_NODE_STORAGE}" +export JORM_PATH="${JORM_PATH}" +export JCLI_PATH="${JCLI_PATH}" + +# Get the hostname +HOSTNAME=$(hostname) + +# Check if the hostname is 'leader0' +if [ "$HOSTNAME" = "leader0" ]; then + echo ">>> 'leader0' found. Checking required env vars..." + LEADER0_ENV=( + "COMMITTEE_CRS" + "SECRET_SECRET" + "IDEASCALE_API_TOKEN" + "IDEASCALE_CAMPAIGN_GROUP" + "IDEASCALE_STAGE_ID" + "IDEASCALE_API_URL" + "IDEASCALE_CONFIG_PATH" + "DBSYNC_URL" + "GVC_API_URL" + "SNAPSHOT_OUTPUT_DIR" + "SNAPSHOT_NETWORK_ID" + ) + check_env_vars "${LEADER0_ENV[@]}" + + : "${SNAPSHOT_TOOL_PATH:='snapshot_tool'}" + : "${CATALYST_TOOLBOX_PATH:='catalyst-toolbox'}" + : "${SNAPSHOT_INTERVAL_SECONDS:='1800'}" + + # Export environment variables + export SNAPSHOT_TOOL_PATH="${SNAPSHOT_TOOL_PATH}" + export CATALYST_TOOLBOX_PATH="${CATALYST_TOOLBOX_PATH}" + export SNAPSHOT_INTERVAL_SECONDS="${SNAPSHOT_INTERVAL_SECONDS}" +fi + +# Sleep if DEBUG_SLEEP is set +debug_sleep + # Define the command to be executed CMD_TO_RUN="voting-node start" @@ -25,6 +144,7 @@ if [ -n "${DEBUG_SLEEP:-}" ]; then sleep "$DEBUG_SLEEP" fi +echo ">>> Executing command..." # Expand the command with arguments and capture the exit code set +e eval "$CMD" diff --git a/services/voting-node/poetry.lock b/services/voting-node/poetry.lock index 5ac4ed185e..e98666fc55 100644 --- a/services/voting-node/poetry.lock +++ b/services/voting-node/poetry.lock @@ -165,14 +165,14 @@ trio = ["trio (>=0.16,<0.22)"] [[package]] name = "asgiref" -version = "3.6.0" +version = "3.7.0" description = "ASGI specs, helper code, and adapters" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "asgiref-3.6.0-py3-none-any.whl", hash = "sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac"}, - {file = "asgiref-3.6.0.tar.gz", hash = "sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506"}, + {file = "asgiref-3.7.0-py3-none-any.whl", hash = "sha256:14087924af5be5d8103d6f2edffe45a0bf7ab1b2a771b6f00a6db8c302f21f34"}, + {file = "asgiref-3.7.0.tar.gz", hash = "sha256:5d6c4a8a1c99f58eaa3bc392ee04e3587b693f09e3af1f3f16a09094f334eb52"}, ] [package.extras] @@ -463,14 +463,14 @@ develop = ["aiomisc-pytest", "pytest", "pytest-cov"] [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -793,61 +793,61 @@ grpc = ["grpcio (>=1.44.0,<2.0.0dev)"] [[package]] name = "grpcio" -version = "1.54.0" +version = "1.54.2" description = "HTTP/2-based RPC framework" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.54.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:a947d5298a0bbdd4d15671024bf33e2b7da79a70de600ed29ba7e0fef0539ebb"}, - {file = "grpcio-1.54.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e355ee9da9c1c03f174efea59292b17a95e0b7b4d7d2a389265f731a9887d5a9"}, - {file = "grpcio-1.54.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:73c238ef6e4b64272df7eec976bb016c73d3ab5a6c7e9cd906ab700523d312f3"}, - {file = "grpcio-1.54.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c59d899ee7160638613a452f9a4931de22623e7ba17897d8e3e348c2e9d8d0b"}, - {file = "grpcio-1.54.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48cb7af77238ba16c77879009003f6b22c23425e5ee59cb2c4c103ec040638a5"}, - {file = "grpcio-1.54.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2262bd3512ba9e9f0e91d287393df6f33c18999317de45629b7bd46c40f16ba9"}, - {file = "grpcio-1.54.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:224166f06ccdaf884bf35690bf4272997c1405de3035d61384ccb5b25a4c1ca8"}, - {file = "grpcio-1.54.0-cp310-cp310-win32.whl", hash = "sha256:ed36e854449ff6c2f8ee145f94851fe171298e1e793f44d4f672c4a0d78064e7"}, - {file = "grpcio-1.54.0-cp310-cp310-win_amd64.whl", hash = "sha256:27fb030a4589d2536daec5ff5ba2a128f4f155149efab578fe2de2cb21596d3d"}, - {file = "grpcio-1.54.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:f4a7dca8ccd8023d916b900aa3c626f1bd181bd5b70159479b142f957ff420e4"}, - {file = "grpcio-1.54.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:1209d6b002b26e939e4c8ea37a3d5b4028eb9555394ea69fb1adbd4b61a10bb8"}, - {file = "grpcio-1.54.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:860fcd6db7dce80d0a673a1cc898ce6bc3d4783d195bbe0e911bf8a62c93ff3f"}, - {file = "grpcio-1.54.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3930669c9e6f08a2eed824738c3d5699d11cd47a0ecc13b68ed11595710b1133"}, - {file = "grpcio-1.54.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62117486460c83acd3b5d85c12edd5fe20a374630475388cfc89829831d3eb79"}, - {file = "grpcio-1.54.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e3e526062c690517b42bba66ffe38aaf8bc99a180a78212e7b22baa86902f690"}, - {file = "grpcio-1.54.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ebff0738be0499d7db74d20dca9f22a7b27deae31e1bf92ea44924fd69eb6251"}, - {file = "grpcio-1.54.0-cp311-cp311-win32.whl", hash = "sha256:21c4a1aae861748d6393a3ff7867473996c139a77f90326d9f4104bebb22d8b8"}, - {file = "grpcio-1.54.0-cp311-cp311-win_amd64.whl", hash = "sha256:3db71c6f1ab688d8dfc102271cedc9828beac335a3a4372ec54b8bf11b43fd29"}, - {file = "grpcio-1.54.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:960b176e0bb2b4afeaa1cd2002db1e82ae54c9b6e27ea93570a42316524e77cf"}, - {file = "grpcio-1.54.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:d8ae6e0df3a608e99ee1acafaafd7db0830106394d54571c1ece57f650124ce9"}, - {file = "grpcio-1.54.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:c33744d0d1a7322da445c0fe726ea6d4e3ef2dfb0539eadf23dce366f52f546c"}, - {file = "grpcio-1.54.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d109df30641d050e009105f9c9ca5a35d01e34d2ee2a4e9c0984d392fd6d704"}, - {file = "grpcio-1.54.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:775a2f70501370e5ba54e1ee3464413bff9bd85bd9a0b25c989698c44a6fb52f"}, - {file = "grpcio-1.54.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c55a9cf5cba80fb88c850915c865b8ed78d5e46e1f2ec1b27692f3eaaf0dca7e"}, - {file = "grpcio-1.54.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1fa7d6ddd33abbd3c8b3d7d07c56c40ea3d1891ce3cd2aa9fa73105ed5331866"}, - {file = "grpcio-1.54.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ed3d458ded32ff3a58f157b60cc140c88f7ac8c506a1c567b2a9ee8a2fd2ce54"}, - {file = "grpcio-1.54.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:5942a3e05630e1ef5b7b5752e5da6582460a2e4431dae603de89fc45f9ec5aa9"}, - {file = "grpcio-1.54.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:125ed35aa3868efa82eabffece6264bf638cfdc9f0cd58ddb17936684aafd0f8"}, - {file = "grpcio-1.54.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:b7655f809e3420f80ce3bf89737169a9dce73238af594049754a1128132c0da4"}, - {file = "grpcio-1.54.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f47bf9520bba4083d65ab911f8f4c0ac3efa8241993edd74c8dd08ae87552f"}, - {file = "grpcio-1.54.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16bca8092dd994f2864fdab278ae052fad4913f36f35238b2dd11af2d55a87db"}, - {file = "grpcio-1.54.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d2f62fb1c914a038921677cfa536d645cb80e3dd07dc4859a3c92d75407b90a5"}, - {file = "grpcio-1.54.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a7caf553ccaf715ec05b28c9b2ab2ee3fdb4036626d779aa09cf7cbf54b71445"}, - {file = "grpcio-1.54.0-cp38-cp38-win32.whl", hash = "sha256:2585b3c294631a39b33f9f967a59b0fad23b1a71a212eba6bc1e3ca6e6eec9ee"}, - {file = "grpcio-1.54.0-cp38-cp38-win_amd64.whl", hash = "sha256:3b170e441e91e4f321e46d3cc95a01cb307a4596da54aca59eb78ab0fc03754d"}, - {file = "grpcio-1.54.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:1382bc499af92901c2240c4d540c74eae8a671e4fe9839bfeefdfcc3a106b5e2"}, - {file = "grpcio-1.54.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:031bbd26656e0739e4b2c81c172155fb26e274b8d0312d67aefc730bcba915b6"}, - {file = "grpcio-1.54.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:a97b0d01ae595c997c1d9d8249e2d2da829c2d8a4bdc29bb8f76c11a94915c9a"}, - {file = "grpcio-1.54.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:533eaf5b2a79a3c6f35cbd6a095ae99cac7f4f9c0e08bdcf86c130efd3c32adf"}, - {file = "grpcio-1.54.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49eace8ea55fbc42c733defbda1e4feb6d3844ecd875b01bb8b923709e0f5ec8"}, - {file = "grpcio-1.54.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30fbbce11ffeb4f9f91c13fe04899aaf3e9a81708bedf267bf447596b95df26b"}, - {file = "grpcio-1.54.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:650f5f2c9ab1275b4006707411bb6d6bc927886874a287661c3c6f332d4c068b"}, - {file = "grpcio-1.54.0-cp39-cp39-win32.whl", hash = "sha256:02000b005bc8b72ff50c477b6431e8886b29961159e8b8d03c00b3dd9139baed"}, - {file = "grpcio-1.54.0-cp39-cp39-win_amd64.whl", hash = "sha256:6dc1e2c9ac292c9a484ef900c568ccb2d6b4dfe26dfa0163d5bc815bb836c78d"}, - {file = "grpcio-1.54.0.tar.gz", hash = "sha256:eb0807323572642ab73fd86fe53d88d843ce617dd1ddf430351ad0759809a0ae"}, + {file = "grpcio-1.54.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:40e1cbf69d6741b40f750f3cccc64326f927ac6145a9914d33879e586002350c"}, + {file = "grpcio-1.54.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2288d76e4d4aa7ef3fe7a73c1c470b66ea68e7969930e746a8cd8eca6ef2a2ea"}, + {file = "grpcio-1.54.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c0e3155fc5335ec7b3b70f15230234e529ca3607b20a562b6c75fb1b1218874c"}, + {file = "grpcio-1.54.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bf88004fe086c786dc56ef8dd6cb49c026833fdd6f42cb853008bce3f907148"}, + {file = "grpcio-1.54.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be88c081e33f20630ac3343d8ad9f1125f32987968e9c8c75c051c9800896e8"}, + {file = "grpcio-1.54.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:33d40954199bddbb6a78f8f6f2b2082660f381cd2583ec860a6c2fa7c8400c08"}, + {file = "grpcio-1.54.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b52d00d1793d290c81ad6a27058f5224a7d5f527867e5b580742e1bd211afeee"}, + {file = "grpcio-1.54.2-cp310-cp310-win32.whl", hash = "sha256:881d058c5ccbea7cc2c92085a11947b572498a27ef37d3eef4887f499054dca8"}, + {file = "grpcio-1.54.2-cp310-cp310-win_amd64.whl", hash = "sha256:0212e2f7fdf7592e4b9d365087da30cb4d71e16a6f213120c89b4f8fb35a3ab3"}, + {file = "grpcio-1.54.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:1e623e0cf99a0ac114f091b3083a1848dbc64b0b99e181473b5a4a68d4f6f821"}, + {file = "grpcio-1.54.2-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:66233ccd2a9371158d96e05d082043d47dadb18cbb294dc5accfdafc2e6b02a7"}, + {file = "grpcio-1.54.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:4cb283f630624ebb16c834e5ac3d7880831b07cbe76cb08ab7a271eeaeb8943e"}, + {file = "grpcio-1.54.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a1e601ee31ef30a9e2c601d0867e236ac54c922d32ed9f727b70dd5d82600d5"}, + {file = "grpcio-1.54.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8da84bbc61a4e92af54dc96344f328e5822d574f767e9b08e1602bb5ddc254a"}, + {file = "grpcio-1.54.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5008964885e8d23313c8e5ea0d44433be9bfd7e24482574e8cc43c02c02fc796"}, + {file = "grpcio-1.54.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2f5a1f1080ccdc7cbaf1171b2cf384d852496fe81ddedeb882d42b85727f610"}, + {file = "grpcio-1.54.2-cp311-cp311-win32.whl", hash = "sha256:b74ae837368cfffeb3f6b498688a123e6b960951be4dec0e869de77e7fa0439e"}, + {file = "grpcio-1.54.2-cp311-cp311-win_amd64.whl", hash = "sha256:8cdbcbd687e576d48f7886157c95052825ca9948c0ed2afdc0134305067be88b"}, + {file = "grpcio-1.54.2-cp37-cp37m-linux_armv7l.whl", hash = "sha256:782f4f8662a2157c4190d0f99eaaebc602899e84fb1e562a944e5025929e351c"}, + {file = "grpcio-1.54.2-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:714242ad0afa63a2e6dabd522ae22e1d76e07060b5af2ddda5474ba4f14c2c94"}, + {file = "grpcio-1.54.2-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:f900ed4ad7a0f1f05d35f955e0943944d5a75f607a836958c6b8ab2a81730ef2"}, + {file = "grpcio-1.54.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96a41817d2c763b1d0b32675abeb9179aa2371c72aefdf74b2d2b99a1b92417b"}, + {file = "grpcio-1.54.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70fcac7b94f4c904152809a050164650ac81c08e62c27aa9f156ac518029ebbe"}, + {file = "grpcio-1.54.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fd6c6c29717724acf9fc1847c4515d57e4dc12762452457b9cb37461f30a81bb"}, + {file = "grpcio-1.54.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c2392f5b5d84b71d853918687d806c1aa4308109e5ca158a16e16a6be71041eb"}, + {file = "grpcio-1.54.2-cp37-cp37m-win_amd64.whl", hash = "sha256:51630c92591d6d3fe488a7c706bd30a61594d144bac7dee20c8e1ce78294f474"}, + {file = "grpcio-1.54.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:b04202453941a63b36876a7172b45366dc0cde10d5fd7855c0f4a4e673c0357a"}, + {file = "grpcio-1.54.2-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:89dde0ac72a858a44a2feb8e43dc68c0c66f7857a23f806e81e1b7cc7044c9cf"}, + {file = "grpcio-1.54.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:09d4bfd84686cd36fd11fd45a0732c7628308d094b14d28ea74a81db0bce2ed3"}, + {file = "grpcio-1.54.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fc2b4edb938c8faa4b3c3ea90ca0dd89b7565a049e8e4e11b77e60e4ed2cc05"}, + {file = "grpcio-1.54.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61f7203e2767800edee7a1e1040aaaf124a35ce0c7fe0883965c6b762defe598"}, + {file = "grpcio-1.54.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e416c8baf925b5a1aff31f7f5aecc0060b25d50cce3a5a7255dc5cf2f1d4e5eb"}, + {file = "grpcio-1.54.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dc80c9c6b608bf98066a038e0172013a49cfa9a08d53335aefefda2c64fc68f4"}, + {file = "grpcio-1.54.2-cp38-cp38-win32.whl", hash = "sha256:8d6192c37a30a115f4663592861f50e130caed33efc4eec24d92ec881c92d771"}, + {file = "grpcio-1.54.2-cp38-cp38-win_amd64.whl", hash = "sha256:46a057329938b08e5f0e12ea3d7aed3ecb20a0c34c4a324ef34e00cecdb88a12"}, + {file = "grpcio-1.54.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:2296356b5c9605b73ed6a52660b538787094dae13786ba53080595d52df13a98"}, + {file = "grpcio-1.54.2-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:c72956972e4b508dd39fdc7646637a791a9665b478e768ffa5f4fe42123d5de1"}, + {file = "grpcio-1.54.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:9bdbb7624d65dc0ed2ed8e954e79ab1724526f09b1efa88dcd9a1815bf28be5f"}, + {file = "grpcio-1.54.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c44e1a765b31e175c391f22e8fc73b2a2ece0e5e6ff042743d8109b5d2eff9f"}, + {file = "grpcio-1.54.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cc928cfe6c360c1df636cf7991ab96f059666ac7b40b75a769410cc6217df9c"}, + {file = "grpcio-1.54.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a08920fa1a97d4b8ee5db2f31195de4a9def1a91bc003544eb3c9e6b8977960a"}, + {file = "grpcio-1.54.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4864f99aac207e3e45c5e26c6cbb0ad82917869abc2f156283be86c05286485c"}, + {file = "grpcio-1.54.2-cp39-cp39-win32.whl", hash = "sha256:b38b3de8cff5bc70f8f9c615f51b48eff7313fc9aca354f09f81b73036e7ddfa"}, + {file = "grpcio-1.54.2-cp39-cp39-win_amd64.whl", hash = "sha256:be48496b0e00460717225e7680de57c38be1d8629dc09dadcd1b3389d70d942b"}, + {file = "grpcio-1.54.2.tar.gz", hash = "sha256:50a9f075eeda5097aa9a182bb3877fe1272875e45370368ac0ee16ab9e22d019"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.54.0)"] +protobuf = ["grpcio-tools (>=1.54.2)"] [[package]] name = "h11" @@ -1480,18 +1480,18 @@ dev = ["black", "hypothesis", "mypy", "pygments (>=2.14.0)", "pytest", "pytest-c [[package]] name = "platformdirs" -version = "3.5.0" +version = "3.5.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.0-py3-none-any.whl", hash = "sha256:47692bc24c1958e8b0f13dd727307cff1db103fca36399f457da8e05f222fdc4"}, - {file = "platformdirs-3.5.0.tar.gz", hash = "sha256:7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335"}, + {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, + {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] @@ -1527,25 +1527,25 @@ prometheus-client = ">=0.8.0,<1.0.0" [[package]] name = "protobuf" -version = "4.22.3" +version = "4.23.1" description = "" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.22.3-cp310-abi3-win32.whl", hash = "sha256:8b54f56d13ae4a3ec140076c9d937221f887c8f64954673d46f63751209e839a"}, - {file = "protobuf-4.22.3-cp310-abi3-win_amd64.whl", hash = "sha256:7760730063329d42a9d4c4573b804289b738d4931e363ffbe684716b796bde51"}, - {file = "protobuf-4.22.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:d14fc1a41d1a1909998e8aff7e80d2a7ae14772c4a70e4bf7db8a36690b54425"}, - {file = "protobuf-4.22.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:70659847ee57a5262a65954538088a1d72dfc3e9882695cab9f0c54ffe71663b"}, - {file = "protobuf-4.22.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:13233ee2b9d3bd9a5f216c1fa2c321cd564b93d8f2e4f521a85b585447747997"}, - {file = "protobuf-4.22.3-cp37-cp37m-win32.whl", hash = "sha256:ecae944c6c2ce50dda6bf76ef5496196aeb1b85acb95df5843cd812615ec4b61"}, - {file = "protobuf-4.22.3-cp37-cp37m-win_amd64.whl", hash = "sha256:d4b66266965598ff4c291416be429cef7989d8fae88b55b62095a2331511b3fa"}, - {file = "protobuf-4.22.3-cp38-cp38-win32.whl", hash = "sha256:f08aa300b67f1c012100d8eb62d47129e53d1150f4469fd78a29fa3cb68c66f2"}, - {file = "protobuf-4.22.3-cp38-cp38-win_amd64.whl", hash = "sha256:f2f4710543abec186aee332d6852ef5ae7ce2e9e807a3da570f36de5a732d88e"}, - {file = "protobuf-4.22.3-cp39-cp39-win32.whl", hash = "sha256:7cf56e31907c532e460bb62010a513408e6cdf5b03fb2611e4b67ed398ad046d"}, - {file = "protobuf-4.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:e0e630d8e6a79f48c557cd1835865b593d0547dce221c66ed1b827de59c66c97"}, - {file = "protobuf-4.22.3-py3-none-any.whl", hash = "sha256:52f0a78141078077cfe15fe333ac3e3a077420b9a3f5d1bf9b5fe9d286b4d881"}, - {file = "protobuf-4.22.3.tar.gz", hash = "sha256:23452f2fdea754a8251d0fc88c0317735ae47217e0d27bf330a30eec2848811a"}, + {file = "protobuf-4.23.1-cp310-abi3-win32.whl", hash = "sha256:410bcc0a5b279f634d3e16082ce221dfef7c3392fac723500e2e64d1806dd2be"}, + {file = "protobuf-4.23.1-cp310-abi3-win_amd64.whl", hash = "sha256:32e78beda26d7a101fecf15d7a4a792278a0d26a31bc327ff05564a9d68ab8ee"}, + {file = "protobuf-4.23.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f9510cac91e764e86acd74e2b7f7bc5e6127a7f3fb646d7c8033cfb84fd1176a"}, + {file = "protobuf-4.23.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:346990f634272caac1f09efbcfbbacb23098b1f606d172534c6fa2d9758bb436"}, + {file = "protobuf-4.23.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3ce113b3f3362493bddc9069c2163a38f240a9ed685ff83e7bcb756b05e1deb0"}, + {file = "protobuf-4.23.1-cp37-cp37m-win32.whl", hash = "sha256:2036a3a1e7fc27f973fa0a7888dce712393af644f4695385f117886abc792e39"}, + {file = "protobuf-4.23.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3b8905eafe4439076e1f58e9d1fa327025fd2777cf90f14083092ae47f77b0aa"}, + {file = "protobuf-4.23.1-cp38-cp38-win32.whl", hash = "sha256:5b9cd6097e6acae48a68cb29b56bc79339be84eca65b486910bb1e7a30e2b7c1"}, + {file = "protobuf-4.23.1-cp38-cp38-win_amd64.whl", hash = "sha256:decf119d54e820f298ee6d89c72d6b289ea240c32c521f00433f9dc420595f38"}, + {file = "protobuf-4.23.1-cp39-cp39-win32.whl", hash = "sha256:91fac0753c3c4951fbb98a93271c43cc7cf3b93cf67747b3e600bb1e5cc14d61"}, + {file = "protobuf-4.23.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac50be82491369a9ec3710565777e4da87c6d2e20404e0abb1f3a8f10ffd20f0"}, + {file = "protobuf-4.23.1-py3-none-any.whl", hash = "sha256:65f0ac96ef67d7dd09b19a46aad81a851b6f85f89725577f16de38f2d68ad477"}, + {file = "protobuf-4.23.1.tar.gz", hash = "sha256:95789b569418a3e32a53f43d7763be3d490a831e9c08042539462b6d972c2d7e"}, ] [[package]] @@ -1574,48 +1574,48 @@ files = [ [[package]] name = "pydantic" -version = "1.10.7" +version = "1.10.8" description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"}, - {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"}, - {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"}, - {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"}, - {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"}, - {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"}, - {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"}, - {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"}, - {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"}, - {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"}, - {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"}, - {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"}, - {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"}, - {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"}, - {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"}, - {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"}, - {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"}, - {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"}, - {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"}, - {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"}, - {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"}, - {file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"}, - {file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"}, + {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, + {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, + {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, + {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, + {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, + {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, + {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, + {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, + {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, + {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, + {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, + {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, + {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, + {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, + {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, + {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, + {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, + {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, + {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, + {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, + {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, + {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, + {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, + {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, + {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, + {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, + {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, + {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, + {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, + {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, + {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, + {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, + {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, + {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, + {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, + {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, ] [package.dependencies] @@ -1719,21 +1719,21 @@ files = [ [[package]] name = "requests" -version = "2.29.0" +version = "2.31.0" description = "Python HTTP for Humans." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "requests-2.29.0-py3-none-any.whl", hash = "sha256:e8f3c9be120d3333921d213eef078af392fba3933ab7ed2d1cba3b56f2568c3b"}, - {file = "requests-2.29.0.tar.gz", hash = "sha256:f2e34a75f4749019bb0e3effb66683630e4ffeaf75819fb51bebef1bf5aef059"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -1768,19 +1768,19 @@ files = [ [[package]] name = "setuptools" -version = "67.7.2" +version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, - {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, + {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, + {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1916,20 +1916,21 @@ typing-extensions = ">=3.7.4" [[package]] name = "urllib3" -version = "1.26.15" +version = "2.0.2" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7" files = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, + {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, + {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" @@ -2039,82 +2040,82 @@ anyio = ">=3.0.0" [[package]] name = "websockets" -version = "11.0.2" +version = "11.0.3" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "websockets-11.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:580cc95c58118f8c39106be71e24d0b7e1ad11a155f40a2ee687f99b3e5e432e"}, - {file = "websockets-11.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:143782041e95b63083b02107f31cda999f392903ae331de1307441f3a4557d51"}, - {file = "websockets-11.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8df63dcd955eb6b2e371d95aacf8b7c535e482192cff1b6ce927d8f43fb4f552"}, - {file = "websockets-11.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9b2dced5cbbc5094678cc1ec62160f7b0fe4defd601cd28a36fde7ee71bbb5"}, - {file = "websockets-11.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0eeeea3b01c97fd3b5049a46c908823f68b59bf0e18d79b231d8d6764bc81ee"}, - {file = "websockets-11.0.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:502683c5dedfc94b9f0f6790efb26aa0591526e8403ad443dce922cd6c0ec83b"}, - {file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3cc3e48b6c9f7df8c3798004b9c4b92abca09eeea5e1b0a39698f05b7a33b9d"}, - {file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:808b8a33c961bbd6d33c55908f7c137569b09ea7dd024bce969969aa04ecf07c"}, - {file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:34a6f8996964ccaa40da42ee36aa1572adcb1e213665e24aa2f1037da6080909"}, - {file = "websockets-11.0.2-cp310-cp310-win32.whl", hash = "sha256:8f24cd758cbe1607a91b720537685b64e4d39415649cac9177cd1257317cf30c"}, - {file = "websockets-11.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:3b87cd302f08ea9e74fdc080470eddbed1e165113c1823fb3ee6328bc40ca1d3"}, - {file = "websockets-11.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3565a8f8c7bdde7c29ebe46146bd191290413ee6f8e94cf350609720c075b0a1"}, - {file = "websockets-11.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f97e03d4d5a4f0dca739ea274be9092822f7430b77d25aa02da6775e490f6846"}, - {file = "websockets-11.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f392587eb2767afa8a34e909f2fec779f90b630622adc95d8b5e26ea8823cb8"}, - {file = "websockets-11.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7742cd4524622cc7aa71734b51294644492a961243c4fe67874971c4d3045982"}, - {file = "websockets-11.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46dda4bc2030c335abe192b94e98686615f9274f6b56f32f2dd661fb303d9d12"}, - {file = "websockets-11.0.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6b2bfa1d884c254b841b0ff79373b6b80779088df6704f034858e4d705a4802"}, - {file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1df2413266bf48430ef2a752c49b93086c6bf192d708e4a9920544c74cd2baa6"}, - {file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf45d273202b0c1cec0f03a7972c655b93611f2e996669667414557230a87b88"}, - {file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a09cce3dacb6ad638fdfa3154d9e54a98efe7c8f68f000e55ca9c716496ca67"}, - {file = "websockets-11.0.2-cp311-cp311-win32.whl", hash = "sha256:2174a75d579d811279855df5824676d851a69f52852edb0e7551e0eeac6f59a4"}, - {file = "websockets-11.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:c78ca3037a954a4209b9f900e0eabbc471fb4ebe96914016281df2c974a93e3e"}, - {file = "websockets-11.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2100b02d1aaf66dc48ff1b2a72f34f6ebc575a02bc0350cc8e9fbb35940166"}, - {file = "websockets-11.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dca9708eea9f9ed300394d4775beb2667288e998eb6f542cdb6c02027430c599"}, - {file = "websockets-11.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:320ddceefd2364d4afe6576195201a3632a6f2e6d207b0c01333e965b22dbc84"}, - {file = "websockets-11.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a573c8d71b7af937852b61e7ccb37151d719974146b5dc734aad350ef55a02"}, - {file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:13bd5bebcd16a4b5e403061b8b9dcc5c77e7a71e3c57e072d8dff23e33f70fba"}, - {file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:95c09427c1c57206fe04277bf871b396476d5a8857fa1b99703283ee497c7a5d"}, - {file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2eb042734e710d39e9bc58deab23a65bd2750e161436101488f8af92f183c239"}, - {file = "websockets-11.0.2-cp37-cp37m-win32.whl", hash = "sha256:5875f623a10b9ba154cb61967f940ab469039f0b5e61c80dd153a65f024d9fb7"}, - {file = "websockets-11.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:634239bc844131863762865b75211a913c536817c0da27f691400d49d256df1d"}, - {file = "websockets-11.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3178d965ec204773ab67985a09f5696ca6c3869afeed0bb51703ea404a24e975"}, - {file = "websockets-11.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:955fcdb304833df2e172ce2492b7b47b4aab5dcc035a10e093d911a1916f2c87"}, - {file = "websockets-11.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb46d2c7631b2e6f10f7c8bac7854f7c5e5288f024f1c137d4633c79ead1e3c0"}, - {file = "websockets-11.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25aae96c1060e85836552a113495db6d857400288161299d77b7b20f2ac569f2"}, - {file = "websockets-11.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2abeeae63154b7f63d9f764685b2d299e9141171b8b896688bd8baec6b3e2303"}, - {file = "websockets-11.0.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daa1e8ea47507555ed7a34f8b49398d33dff5b8548eae3de1dc0ef0607273a33"}, - {file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:954eb789c960fa5daaed3cfe336abc066941a5d456ff6be8f0e03dd89886bb4c"}, - {file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3ffe251a31f37e65b9b9aca5d2d67fd091c234e530f13d9dce4a67959d5a3fba"}, - {file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf6385f677ed2e0b021845b36f55c43f171dab3a9ee0ace94da67302f1bc364"}, - {file = "websockets-11.0.2-cp38-cp38-win32.whl", hash = "sha256:aa7b33c1fb2f7b7b9820f93a5d61ffd47f5a91711bc5fa4583bbe0c0601ec0b2"}, - {file = "websockets-11.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:220d5b93764dd70d7617f1663da64256df7e7ea31fc66bc52c0e3750ee134ae3"}, - {file = "websockets-11.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fb4480556825e4e6bf2eebdbeb130d9474c62705100c90e59f2f56459ddab42"}, - {file = "websockets-11.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec00401846569aaf018700249996143f567d50050c5b7b650148989f956547af"}, - {file = "websockets-11.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87c69f50281126dcdaccd64d951fb57fbce272578d24efc59bce72cf264725d0"}, - {file = "websockets-11.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:232b6ba974f5d09b1b747ac232f3a3d8f86de401d7b565e837cc86988edf37ac"}, - {file = "websockets-11.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:392d409178db1e46d1055e51cc850136d302434e12d412a555e5291ab810f622"}, - {file = "websockets-11.0.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4fe2442091ff71dee0769a10449420fd5d3b606c590f78dd2b97d94b7455640"}, - {file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ede13a6998ba2568b21825809d96e69a38dc43184bdeebbde3699c8baa21d015"}, - {file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4c54086b2d2aec3c3cb887ad97e9c02c6be9f1d48381c7419a4aa932d31661e4"}, - {file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e37a76ccd483a6457580077d43bc3dfe1fd784ecb2151fcb9d1c73f424deaeba"}, - {file = "websockets-11.0.2-cp39-cp39-win32.whl", hash = "sha256:d1881518b488a920434a271a6e8a5c9481a67c4f6352ebbdd249b789c0467ddc"}, - {file = "websockets-11.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:25e265686ea385f22a00cc2b719b880797cd1bb53b46dbde969e554fb458bfde"}, - {file = "websockets-11.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ce69f5c742eefd039dce8622e99d811ef2135b69d10f9aa79fbf2fdcc1e56cd7"}, - {file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b985ba2b9e972cf99ddffc07df1a314b893095f62c75bc7c5354a9c4647c6503"}, - {file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b52def56d2a26e0e9c464f90cadb7e628e04f67b0ff3a76a4d9a18dfc35e3dd"}, - {file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70a438ef2a22a581d65ad7648e949d4ccd20e3c8ed7a90bbc46df4e60320891"}, - {file = "websockets-11.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:752fbf420c71416fb1472fec1b4cb8631c1aa2be7149e0a5ba7e5771d75d2bb9"}, - {file = "websockets-11.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dd906b0cdc417ea7a5f13bb3c6ca3b5fd563338dc596996cb0fdd7872d691c0a"}, - {file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e79065ff6549dd3c765e7916067e12a9c91df2affea0ac51bcd302aaf7ad207"}, - {file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46388a050d9e40316e58a3f0838c63caacb72f94129eb621a659a6e49bad27ce"}, - {file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c7de298371d913824f71b30f7685bb07ad13969c79679cca5b1f7f94fec012f"}, - {file = "websockets-11.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6d872c972c87c393e6a49c1afbdc596432df8c06d0ff7cd05aa18e885e7cfb7c"}, - {file = "websockets-11.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b444366b605d2885f0034dd889faf91b4b47668dd125591e2c64bfde611ac7e1"}, - {file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b967a4849db6b567dec3f7dd5d97b15ce653e3497b8ce0814e470d5e074750"}, - {file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2acdc82099999e44fa7bd8c886f03c70a22b1d53ae74252f389be30d64fd6004"}, - {file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:518ed6782d9916c5721ebd61bb7651d244178b74399028302c8617d0620af291"}, - {file = "websockets-11.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:58477b041099bb504e1a5ddd8aa86302ed1d5c6995bdd3db2b3084ef0135d277"}, - {file = "websockets-11.0.2-py3-none-any.whl", hash = "sha256:5004c087d17251938a52cce21b3dbdabeecbbe432ce3f5bbbf15d8692c36eac9"}, - {file = "websockets-11.0.2.tar.gz", hash = "sha256:b1a69701eb98ed83dd099de4a686dc892c413d974fa31602bc00aca7cb988ac9"}, + {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac"}, + {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d"}, + {file = "websockets-11.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526"}, + {file = "websockets-11.0.3-cp310-cp310-win32.whl", hash = "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69"}, + {file = "websockets-11.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd"}, + {file = "websockets-11.0.3-cp311-cp311-win32.whl", hash = "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c"}, + {file = "websockets-11.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8"}, + {file = "websockets-11.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af"}, + {file = "websockets-11.0.3-cp37-cp37m-win32.whl", hash = "sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f"}, + {file = "websockets-11.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788"}, + {file = "websockets-11.0.3-cp38-cp38-win32.whl", hash = "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74"}, + {file = "websockets-11.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311"}, + {file = "websockets-11.0.3-cp39-cp39-win32.whl", hash = "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128"}, + {file = "websockets-11.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602"}, + {file = "websockets-11.0.3-py3-none-any.whl", hash = "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6"}, + {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"}, ] [[package]] @@ -2308,4 +2309,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "30da5cc3655784dcb72fd5d67484366674b49f5ddd90a3113c4efc97d89f09f7" +content-hash = "dd108774e3823482b2cc1294b92d9b25c72ff6c1b1e3b27c6e150ad39fe4d883" diff --git a/services/voting-node/pyproject.toml b/services/voting-node/pyproject.toml index d784cd18ce..1a927a8502 100644 --- a/services/voting-node/pyproject.toml +++ b/services/voting-node/pyproject.toml @@ -30,6 +30,7 @@ pydantic = "~1.10.7" cryptography = "^40.0.1" loguru = "^0.6.0" ideascale-importer = {path = "../../utilities/ideascale-importer"} +brotlipy = "^0.7.0" [tool.poetry.group.dev.dependencies] # formatting diff --git a/services/voting-node/voting_node/db.py b/services/voting-node/voting_node/db.py index 01b034722f..398271a81e 100644 --- a/services/voting-node/voting_node/db.py +++ b/services/voting-node/voting_node/db.py @@ -44,34 +44,33 @@ async def close(self): if self.connection is not None: await self.connection.close() - async def fetch_current_event(self) -> Event: - """Look in EventDB for the event that will start voting. - - |--before event--|start_time --- current event ---end_time|--- next event ---| - - """ + async def fetch_upcoming_event(self) -> Event: + """Look in EventDB for the next event that will start.""" # first, check if there is an event that has not finished now = datetime.datetime.utcnow() - filter_by = "(voting_end > $1 or voting_end IS NULL) and voting_start < $1" - sort_by = "voting_start ASC" - query = f"SELECT * FROM event WHERE {filter_by} ORDER BY {sort_by} LIMIT 1" + query = """ + SELECT + * + FROM + event + WHERE + start_time > $1 + ORDER BY + start_time ASC + LIMIT 1""" result = await self.conn().fetchrow(query, now) - if result is not None: - logger.debug(f"fetched ongoing event: {result}") - return Event(**dict(result)) - filter_by = "voting_start > $1" - query = f"SELECT * FROM event WHERE {filter_by} ORDER BY {sort_by} LIMIT 1" - result = await self.conn().fetchrow(query, now) if result is None: raise Exception("failed to fetch event from DB") - logger.debug(f"fetched upcoming event: {result}") - return Event(**dict(result)) + + event = Event(**dict(result)) + logger.debug(f"fetched upcoming event: {event.name}") + return event async def fetch_leader_host_info(self, event_row_id: int) -> HostInfo: """Return HostInfo for leaders, sorted by hostname.""" - filter_by = "hostname = $1 AND event = $2" - query = f"SELECT * FROM voting_node WHERE {filter_by}" + conds = "hostname = $1 AND event = $2" + query = f"SELECT * FROM voting_node WHERE {conds}" result = await self.conn().fetchrow(query, get_hostname(), event_row_id) match result: case None: @@ -113,30 +112,33 @@ async def insert_leader_host_info(self, host_info: HostInfo): raise Exception(f"failed to insert '{h.hostname}' info to DB") logger.debug(f"{h.hostname} info added: {result}") - async def fetch_sorted_leaders_host_info(self) -> list[LeaderHostInfo]: + async def fetch_sorted_leaders_host_info(self, event_row_id: int) -> list[LeaderHostInfo]: """Return a list of leader host information. Fetch host information for leader nodes. Raises exceptions if the DB fails to return a list of records, or if the list is empty. """ - where = f"WHERE hostname ~ '{LEADER_REGEX}'" - order_by = "ORDER BY hostname ASC" - query = f"SELECT (hostname, pubkey) FROM voting_node {where} {order_by}" - result = await self.conn().fetch(query) + query = f""" + SELECT (hostname, pubkey) + FROM voting_node + WHERE hostname ~ '{LEADER_REGEX}' AND event = $1 + ORDER BY hostname ASC""" + result = await self.conn().fetch(query, event_row_id) match result: case None: raise Exception("DB error fetching leaders host info") case []: raise Exception("no leader host info found in DB") case [*leaders]: - logger.debug(f"found leaders: {leaders}") def extract_leader_info(leader): host_info = LeaderHostInfo(*leader["row"]) - logger.debug(f"{host_info}") + logger.debug(f"{host_info.hostname}") return host_info - return list(map(extract_leader_info, leaders)) + logger.debug(f"found {len(leaders)} leaders") + extracted_leaders = [extract_leader_info(leader) for leader in leaders] + return extracted_leaders async def fetch_proposals(self) -> list[Proposal]: """Return a list of proposals .""" @@ -179,13 +181,13 @@ async def fetch_snapshot(self, event_id: int) -> Snapshot: result = await self.conn().fetchrow(query, event_id) if result is None: raise Exception("snapshot DB error") - logger.debug(f"snapshot retrieved from DB: {result}") + logger.debug("snapshot retrieved from DB") match result: case None: raise Exception("DB error fetching snapshot") case snpsht: snapshot = Snapshot(*snpsht["row"]) - logger.debug(f"snapshot retrieved from DB: {snapshot}") + logger.debug("snapshot retrieved from DB") return snapshot async def fetch_voteplans(self, event_id: int) -> list[VotePlan]: diff --git a/services/voting-node/voting_node/envvar.py b/services/voting-node/voting_node/envvar.py index dba62f2436..3d8ce246c3 100644 --- a/services/voting-node/voting_node/envvar.py +++ b/services/voting-node/voting_node/envvar.py @@ -4,16 +4,25 @@ # Service settings VOTING_HOST: Final = "VOTING_HOST" """Host address for the voting node API.""" + VOTING_PORT: Final = "VOTING_PORT" """Listening port for the voting node API.""" + VOTING_LOG_LEVEL: Final = "VOTING_LOG_LEVEL" """Log level for the voting node.""" + +VOTING_LOG_FORMAT: Final = "VOTING_LOG_FORMAT" +"""Log format for the voting node.""" + VOTING_NODE_STORAGE: Final = "VOTING_NODE_STORAGE" """Path to the voting node storage.""" + IS_NODE_RELOADABLE: Final = "IS_NODE_RELOADABLE" """Set the voting node mode to 'reloadable' if set to True.""" + EVENTDB_URL: Final = "EVENTDB_URL" """URL to the EventDB.""" + JORM_PORT_REST: Final = "JORM_PORT_REST" JORM_PORT_JRPC: Final = "JORM_PORT_JRPC" JORM_PORT_P2P: Final = "JORM_PORT_P2P" diff --git a/services/voting-node/voting_node/helpers.py b/services/voting-node/voting_node/helpers.py new file mode 100644 index 0000000000..381b637b28 --- /dev/null +++ b/services/voting-node/voting_node/helpers.py @@ -0,0 +1,138 @@ +import asyncio +from datetime import datetime, timedelta +from typing import Final + +import asyncpg +from loguru import logger + +SLOT_DURATION: Final = 4 +SLOTS_PER_EPOCH: Final = 900 +DEFAULT_EVENT_START_DATE: Final = datetime.utcnow() + timedelta(seconds=30) + + +def slotdelta(epochs: int = 0, slots: int = 0): + slots_in_secs = (epochs * SLOTS_PER_EPOCH + slots) * SLOT_DURATION + return timedelta(seconds=slots_in_secs) + + +async def add_default_event( + db_url: str = "postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev", + reference_date: datetime = DEFAULT_EVENT_START_DATE, +): + """Async function that creates a new event in the database with the provided parameters. + + Args: + ---- + reference_date (datetime): The reference date to calculate the event's timing based on, defaults to datetime.utcnow(). + + Returns: + ------- + None. + """ + # Execute a statement to create a new event. + conn = await asyncpg.connect(db_url) + if conn is None: + raise Exception("no db connection found for") + + block0_date = reference_date + # dates + start_time = block0_date + + # moment that registrations from Cardano main net are frozen + registration_snapshot_time = block0_date + slotdelta(slots=2) + # the moment that registrations are considered to be stable + snapshot_start = registration_snapshot_time + timedelta(days=14) + + voting_start = snapshot_start + slotdelta(slots=15) + voting_end = voting_start + timedelta(days=14) + tallying_end = voting_end + timedelta(days=1) + + end_time = tallying_end + slotdelta(slots=5) # finish event 20 secs after tallying_end + + voting_power_threshold = 450 + max_voting_power_pct = 1 + + insight_sharing_start = block0_date + timedelta(minutes=4) + proposal_submission_start = block0_date + timedelta(minutes=5) + refine_proposals_start = block0_date + timedelta(minutes=6) + finalize_proposals_start = block0_date + timedelta(minutes=7) + proposal_assessment_start = block0_date + timedelta(minutes=8) + assessment_qa_start = block0_date + timedelta(minutes=9) + + committee_size = 5 + committee_threshold = 3 + + query = """ + INSERT INTO + event( + row_id, + name, + description, + registration_snapshot_time, + voting_power_threshold, + max_voting_power_pct, + start_time, + end_time, + insight_sharing_start, + proposal_submission_start, + refine_proposals_start, + finalize_proposals_start, + proposal_assessment_start, + assessment_qa_start, + snapshot_start, + voting_start, + voting_end, + tallying_end, + committee_size, + committee_threshold + ) + VALUES ( + (SELECT COALESCE(MAX(row_id), 0) + 1 FROM event), + $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19) + RETURNING * + """ + await conn.execute( + query, + f"Fund TEST {start_time}", + "Description for the Fund.", + registration_snapshot_time, + voting_power_threshold, + max_voting_power_pct, + start_time, + end_time, + insight_sharing_start, + proposal_submission_start, + refine_proposals_start, + finalize_proposals_start, + proposal_assessment_start, + assessment_qa_start, + snapshot_start, + voting_start, + voting_end, + tallying_end, + committee_size, + committee_threshold, + ) + + logger.debug("inserted upcoming event") + + # Close the connection. + await conn.close() + + +async def delete_table(table_name: str, db_url: str = "postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev"): + conn = await asyncpg.connect(db_url) + if conn is None: + raise Exception("no db connection found for") + + result = await conn.execute( + f"DELETE FROM {table_name}", + ) + + logger.debug(f"deleted table rows from '{table_name}': {result}") + await conn.close() + + +if __name__ == "__main__": + """Reset the database when called from the command line.""" + asyncio.run(add_default_event()) diff --git a/services/voting-node/voting_node/importer.py b/services/voting-node/voting_node/importer.py index d0dd87feb7..fd00b57d7c 100644 --- a/services/voting-node/voting_node/importer.py +++ b/services/voting-node/voting_node/importer.py @@ -2,129 +2,191 @@ Import data from external services used for voting. - -Requirements: - -* `ideascale-importer` utility used to import external data. -* A file named `ideascale-importer-config.json` on the current path (TODO: this needs to be set as an envvar). +# Required environment variables This module requires the following environment variables to be set: -* `EVENTDB_URL` +Common to IdeaScale and DBSync snapshots: -Specific to Ideascale +* `EVENTDB_URL` - URL to the EventDB. -* `IDEASCALE_API_TOKEN` -* `IDEASCALE_CAMPAIGN_GROUP` -* `IDEASCALE_STAGE_ID` -* `IDEASCALE_API_URL` +## Specific to Ideascale snapshot -Specific to Snapshot +* `IDEASCALE_API_TOKEN` - API token from ideascale.com. +* `IDEASCALE_CAMPAIGN_GROUP` - Group ID for the IdeaScale campaign. +* `IDEASCALE_STAGE_ID` - Stage ID for IdeaScale. +* `IDEASCALE_API_URL` - URL for IdeaScale API. +* `IDEASCALE_CONFIG_PATH` - Path to the ideascale command configuration file. -* SNAPSHOT_CONFIG_PATH -* SNAPSHOT_OUTPUT_DIR -* SNAPSHOT_RAW_FILE -* SNAPSHOT_DREPS_FILE +## Specific to DBSync Snapshot + +* `SNAPSHOT_CONFIG_PATH` - Path to the command configuration file. +* `SNAPSHOT_OUTPUT_DIR`- Path to directory where DBSync snapshot output is written. +* `SNAPSHOT_NETWORK_ID` - Defines 'mainnet' or 'testnet'. +* `DBSYNC_URL` - URL to DBSync. +* `SNAPSHOT_TOOL_PATH` - Path to the snapshot_tool executable (optional). +* `CATALYST_TOOLBOX_PATH` - Path to the catalyst-toolbox executable (optional). """ import asyncio import os -from typing import Final +from datetime import datetime +from ideascale_importer.ideascale.importer import Importer as IdeascaleImporter +from ideascale_importer.snapshot_importer import Importer as DBSyncImporter from loguru import logger - -from .envvar import EVENTDB_URL, VOTING_LOG_LEVEL - -IDEASCALE_API_TOKEN: Final = "IDEASCALE_API_TOKEN" -IDEASCALE_CAMPAIGN_GROUP: Final = "IDEASCALE_CAMPAIGN_GROUP" -IDEASCALE_STAGE_ID: Final = "IDEASCALE_STAGE_ID" -IDEASCALE_API_URL: Final = "IDEASCALE_API_URL" - -SNAPSHOT_CONFIG_PATH: Final = "SNAPSHOT_CONFIG_PATH" -"""Path to the configuration file for the `ideascale-importer snapshot` tool.""" -SNAPSHOT_OUTPUT_DIR: Final = "SNAPSHOT_OUTPUT_DIR" -"""Output directory for snapshot data. This directory MUST exist.""" -SNAPSHOT_RAW_FILE: Final = "SNAPSHOT_RAW_FILE" -"""Optional raw snapshot data. When set, the snapshot-tool is not used.""" -SNAPSHOT_DREPS_FILE: Final = "SNAPSHOT_DREPS_FILE" -"""Optional DReps data.""" +from pydantic import BaseModel class ExternalDataImporter: """Importer of external data.""" async def ideascale_import_all(self, event_id: int): - """Run 'ideascale-importer ideascale import-all ' as a subprocess.""" + """Run 'ideascale-importer ideascale import-all ' as a subprocess. + + This command requires the following environment variables to work: + + * `EVENTDB_URL` sets `--database-url`. + * `IDEASCALE_API_TOKEN` sets `--api-token`. + * `IDEASCALE_CAMPAIGN_GROUP` sets `--campaing-group-id`. + * `IDEASCALE_STAGE_ID` sets `--stage-id`. + * `IDEASCALE_API_URL` sets `--ideascale-api-url`. + * `IDEASCALE_CONFIG_PATH` sets `--config-path`. + """ logger.info(f"Running ideascale for event {event_id}") - proc = await asyncio.create_subprocess_exec( - "ideascale-importer", - "ideascale", - "import-all", - "--api-token", - os.environ[IDEASCALE_API_TOKEN], - "--database-url", - os.environ[EVENTDB_URL], - "--event-id", - f"{event_id}", - "--campaign-group-id", - os.environ[IDEASCALE_CAMPAIGN_GROUP], - "--stage-id", - os.environ[IDEASCALE_STAGE_ID], - "--ideascale-api-url", - os.environ[IDEASCALE_API_URL], - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.STDOUT, + importer = IdeascaleImporter( + api_token=os.environ["IDEASCALE_API_TOKEN"], + database_url=os.environ["EVENTDB_URL"], + config_path=os.environ["IDEASCALE_CONFIG_PATH"], + event_id=event_id, + campaign_group_id=int(os.environ["IDEASCALE_CAMPAIGN_GROUP"]), + stage_id=int(os.environ["IDEASCALE_STAGE_ID"]), + proposals_scores_csv_path=None, + ideascale_api_url=os.environ["IDEASCALE_API_URL"], ) + try: + await importer.connect() + await importer.run() + logger.debug("ideascale importer has finished") + except Exception as e: + raise Exception(f"ideascale import error: {e}") from e - # checks that there is stdout - while proc.stdout is not None: - line = await proc.stdout.readline() - if line: - print(line.decode()) - else: - break + async def snapshot_import(self, event_id: int): + """Run 'ideascale-importer snapshot import ' as a subprocess. - returncode = await proc.wait() - if returncode != 0: - raise Exception("failed to run ideascale importer") - logger.debug("ideascale importer has finished") + This command requires the following environment variables to work: - async def snapshot_import(self, event_id: int): - """Run 'ideascale-importer snapshot import ' as a subprocess.""" + * `EVENTDB_URL` sets `--database-url`. + * `SNAPSHOT_OUTPUT_DIR` sets `--output-dir`. + * `SNAPSHOT_NETWORK_ID` sets `--network-id`. + * `DBSYNC_URL` sets `--dbsync-url`. + * `SNAPSHOT_TOOL_PATH` sets `--snapshot-tool-path` (optional). + * `CATALYST_TOOLBOX_PATH` sets `--catalyst-toolbox-path` (optional). + """ logger.info(f"Importing snapshot data for event {event_id}") - proc = await asyncio.create_subprocess_exec( - "ideascale-importer", - "snapshot", - "import", - "--config-path", - os.environ[SNAPSHOT_CONFIG_PATH], - "--database-url", - os.environ[EVENTDB_URL], - "--event-id", - f"{event_id}", - "--raw-snapshot-file", - os.environ[SNAPSHOT_RAW_FILE], - #"--dreps-file", - #os.environ[SNAPSHOT_DREPS_FILE], - "--output-dir", - os.environ[SNAPSHOT_OUTPUT_DIR], - "--log-level", - os.environ[VOTING_LOG_LEVEL], - "--log-format", - "text", - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.STDOUT, + importer = DBSyncImporter( + database_url=os.environ["EVENTDB_URL"], + event_id=event_id, + output_dir=os.environ["SNAPSHOT_OUTPUT_DIR"], + network_id=os.environ["SNAPSHOT_NETWORK_ID"], + dbsync_url=os.environ["DBSYNC_URL"], + snapshot_tool_path=os.environ.get("SNAPSHOT_TOOL_PATH", "snapshot_tool"), + catalyst_toolbox_path=os.environ.get("CATALYST_TOOLBOX_PATH", "catalyst-toolbox"), + gvc_api_url=os.environ["GVC_API_URL"], ) - - # checks that there is stdout - while proc.stdout is not None: - line = await proc.stdout.readline() - if line: - print(line.decode()) + try: + await importer.run() + logger.debug("dbsync importer has finished") + except Exception as e: + raise Exception(f"dbsync importer error: {e}") from e + + +class SnapshotRunner(BaseModel): + """Run snapshots from DBSync and IdeaScale.""" + + registration_snapshot_time: datetime + snapshot_start: datetime + + def snapshot_start_has_passed(self) -> bool: + """Check if the current time is after the snapshot start time. + + :return: a boolean indicating whether the snapshot start time has passed. + """ + now = datetime.utcnow() + return now > self.snapshot_start + + def _reimaining_intervals_n_seconds_to_next_snapshot(self, current_time: datetime, interval: int) -> tuple[int, int]: + """Calculates the remaining number of intervals and seconds until the next snapshot. + + :param current_time: The current datetime. + :type current_time: datetime + :param interval: The interval in seconds. + :type interval: int + :return: A tuple containing the number of intervals until the next snapshot start and the number of seconds until the next interval. + :rtype: Tuple[int, int] + """ + delta = self.snapshot_start - current_time + delta_seconds = int(abs(delta.total_seconds())) + # calculate the number of intervals until the snapshot start time + num_intervals = int(delta_seconds / interval) + # sleep for the remaining time until the next interval + time_til_next: int = delta_seconds % interval + return num_intervals, time_til_next + + async def _ideascale_snapshot(self, event_id: int) -> None: + """Call the 'ideascale-importer ideascale import-all ' command.""" + try: + # Initialize external data importer + importer = ExternalDataImporter() + await importer.ideascale_import_all(event_id) + # raise Exception("ideascale import is DISABLED. Skipping...") + except Exception as e: + logger.error(f"snapshot: {e}") + + async def _dbsync_snapshot(self, event_id: int) -> None: + """Call the 'ideascale-importer snapshot import ' command.""" + try: + # Initialize external data importer + importer = ExternalDataImporter() + await importer.snapshot_import(event_id) + except Exception as e: + logger.error(f"snapshot: {e}") + + async def take_snapshots(self, event_id: int) -> None: + """Takes snapshots at regular intervals using ExternalDataImporter. + + Args: + ---- + event_id (int): The ID of the event to take snapshots for. + + Returns: + ------- + None + """ + # Check if snapshot start time has passed + if self.snapshot_start_has_passed(): + logger.info("Snapshot has become stable. Skipping...") + return + + # Take snapshots at regular intervals + while True: + interval = int(os.getenv("SNAPSHOT_INTERVAL_SECONDS", 1800)) + current_time = datetime.utcnow() + num_intervals, secs_to_sleep = self._reimaining_intervals_n_seconds_to_next_snapshot(current_time, interval) + + logger.info(f"{num_intervals + 1} snapshots remaining. Next snapshot is in {secs_to_sleep} seconds...") + # Wait for the next snapshot interval + await asyncio.sleep(secs_to_sleep) + + # Take snapshot + logger.info("Taking snapshot now") + logger.debug("|---> Starting DBSync snapshot now") + await self._dbsync_snapshot(event_id) + logger.debug("|---> Starting IdeasScale snapshot now") + await self._ideascale_snapshot(event_id) + + if num_intervals > 0: + await asyncio.sleep(0) + continue else: break - - returncode = await proc.wait() - if returncode != 0: - raise Exception("failed to run ideascale importer") - logger.debug("ideascale importer has finished") diff --git a/services/voting-node/voting_node/main.py b/services/voting-node/voting_node/main.py index e0bd80876e..2a7a160437 100644 --- a/services/voting-node/voting_node/main.py +++ b/services/voting-node/voting_node/main.py @@ -5,7 +5,7 @@ import click import uvicorn - +from ideascale_importer.utils import configure_logger from . import api, service from .envvar import ( EVENTDB_URL, @@ -17,6 +17,7 @@ JORM_PORT_REST, VOTING_HOST, VOTING_LOG_LEVEL, + VOTING_LOG_FORMAT, VOTING_NODE_STORAGE, VOTING_PORT, ) @@ -33,6 +34,7 @@ def voting_node_cli(): "--reloadable", is_flag=True, envvar=IS_NODE_RELOADABLE, + default=False, help=r"""Flag to enable the voting node to run in reloadable mode. When set, the node will reload its settings whenever changes to the current voting event are detected. @@ -66,6 +68,15 @@ def voting_node_cli(): If left unset it will look for envvar `VOTING_LOG_LEVEL`. If no level is found, the default value is: info""", ) +@click.option( + "--log-format", + envvar=VOTING_LOG_FORMAT, + default="text", + type=click.Choice(["text", "json"]), + help="""Set the format for logs in the voting node. + + If left unset it will look for envvar `VOTING_LOG_FORMAT`. If no format is found, the default value is: text""", +) @click.option( "--database-url", envvar=EVENTDB_URL, @@ -128,6 +139,7 @@ def start( api_host, api_port, log_level, + log_format, database_url, node_storage, jorm_path, @@ -137,7 +149,7 @@ def start( jorm_port_p2p, ): """Start the Voting Service.""" - click.echo(f"reloadable={reloadable}") + configure_logger(log_level, log_format) api_config = uvicorn.Config(api.app, host=api_host, port=api_port, log_level=log_level) settings = ServiceSettings( diff --git a/services/voting-node/voting_node/models.py b/services/voting-node/voting_node/models.py index 0a7ec3deac..a863aef3bf 100644 --- a/services/voting-node/voting_node/models.py +++ b/services/voting-node/voting_node/models.py @@ -385,7 +385,7 @@ class Snapshot: event: int as_at: datetime last_updated: datetime - dbsync_snapshot_data: str | None + dbsync_snapshot_data: bytes | None drep_data: str | None catalyst_snapshot_data: str | None final: bool diff --git a/services/voting-node/voting_node/service.py b/services/voting-node/voting_node/service.py index 9bf007a3a5..299f25e175 100644 --- a/services/voting-node/voting_node/service.py +++ b/services/voting-node/voting_node/service.py @@ -68,7 +68,7 @@ async def start_service(self, sockets: list[socket.socket] | None = None): await schedule.run() break except Exception as e: - logger.error(f"schedule retry: {e}") + logger.warning(f"X-> RESET: {e}") # waits before retrying await asyncio.sleep(SLEEP_TO_SCHEDULE_RETRY) diff --git a/services/voting-node/voting_node/tasks.py b/services/voting-node/voting_node/tasks.py index 77791bad6a..b6046bdd15 100644 --- a/services/voting-node/voting_node/tasks.py +++ b/services/voting-node/voting_node/tasks.py @@ -8,12 +8,13 @@ import secrets from typing import Final, NoReturn +import brotli from loguru import logger from . import utils from .db import EventDb from .envvar import COMMITTEE_CRS -from .importer import ExternalDataImporter +from .importer import SnapshotRunner from .jcli import JCli from .jormungandr import Jormungandr from .models import ( @@ -35,7 +36,7 @@ ) from .storage import SecretDBStorage -RESET_DATA = True +RESET_DATA = False KEEP_DATA = False SCHEDULE_RESET_MSG = "schedule was reset" @@ -65,7 +66,6 @@ "set_node_topology_key", "set_node_config", "wait_for_registration_snapshot_time", - "wait_for_snapshot", "import_snapshot_data", "collect_snapshot_data", "setup_tally_committee", @@ -108,7 +108,7 @@ def reset_schedule(self, msg: str = SCHEDULE_RESET_MSG, reset_data: bool = RESET """ if reset_data: self.reset_data() - raise Exception(f"|->{msg}") + raise Exception(f"{msg}") async def run(self) -> None: """Run through the scheduled tasks. @@ -136,17 +136,17 @@ async def run(self) -> None: try: await self.run_task(task) except Exception as e: - raise Exception(f"'{task}': {e}") from e + raise e logger.info("SCHEDULE END") async def run_task(self, task_name): """Run the async method with the given task_name.""" - logger.info(f"{task_name}") - logger.debug(f"|'{task_name}' start") + logger.info(f">> TASK {task_name}") + logger.debug(f"| {task_name} START") self.current_task = task_name task_exec = getattr(self, task_name) await task_exec() - logger.debug(f"|'{task_name}' end") + logger.debug(f"| {task_name} END") class NodeTaskSchedule(ScheduleRunner): @@ -193,8 +193,8 @@ async def fetch_upcoming_event(self): # This all starts by getting the event row that has the nearest # `voting_start`. We query the DB to get the row, and store it. try: - event = await self.db.fetch_current_event() - logger.debug("current event retrieved from DB") + event = await self.db.fetch_upcoming_event() + logger.debug("upcoming event retrieved from DB") self.node.event = event except Exception as e: self.reset_schedule(f"{e}") @@ -227,17 +227,14 @@ async def fetch_host_keys(self): else: Store host info from step 1. """ - try: - # gets the event, raises exception if none is found. - event = self.node.get_event() - except Exception as e: - self.reset_schedule(f"{e}") + # gets the event, raises exception if none is found. + event = self.node.get_event() try: # gets host information from voting_node table for this event # raises exception if none is found. host_info: HostInfo = await self.db.fetch_leader_host_info(event.row_id) - logger.debug(f"fetched node host info from DB: {host_info}") + logger.debug(f"fetched node host info from DB: {host_info.hostname}") self.node.host_info = host_info except Exception as e: # fetching from DB failed @@ -253,22 +250,24 @@ async def fetch_host_keys(self): pubkey = await self.jcli().key_to_public(seckey) netkey = await self.jcli().key_generate(secret_type="ed25519") host_info = HostInfo(hostname, event_id, seckey, pubkey, netkey) - logger.debug(f"host info was generated: {host_info}") + logger.debug(f"host info was generated: {host_info.hostname}") try: # we add the host info row # raises exception if unable. await self.db.insert_leader_host_info(host_info) # explicitly reset the schedule to ensure this task is run again. - self.reset_schedule("added node host info to DB") + raise Exception("added node host info to DB") except Exception as e: self.reset_schedule(f"{e}") async def fetch_leaders(self): """Fetch from the DB host info for other leaders.""" + # gets the event, raises exception if none is found. + event = self.node.get_event() try: # gets info for other leaders # raises exception if unable. - leaders = await self.db.fetch_sorted_leaders_host_info() + leaders = await self.db.fetch_sorted_leaders_host_info(event.row_id) self.node.leaders = leaders except Exception as e: self.reset_schedule(f"{e}") @@ -323,7 +322,6 @@ async def set_node_config(self): host_name = utils.get_hostname() host_ip = utils.get_hostname_addr() role_n_digits = utils.get_hostname_role_n_digits(host_name) - logger.debug(f"{role_n_digits} ip: {host_ip}") p2p_port = self.settings.p2p_port listen_rest = f"{host_ip}:{self.settings.rest_port}" @@ -365,7 +363,7 @@ async def set_node_config(self): # convert to yaml and save node_config_yaml = NodeConfigYaml(config, self.node.storage.joinpath("node_config.yaml")) await node_config_yaml.save() - logger.debug(f"{node_config_yaml}") + logger.debug("node config saved") self.node.config = node_config_yaml async def cleanup(self): @@ -455,6 +453,27 @@ class Leader0Schedule(LeaderSchedule): # Leader0 Node tasks tasks: list[str] = LEADER0_NODE_SCHEDULE + async def fetch_upcoming_event(self): + """Override common method to fetch the upcoming event from the DB. + + 'leader0' nodes that don't find an upcoming event, create one with + default values. + """ + try: + event = await self.db.fetch_upcoming_event() + logger.debug("current event retrieved from DB") + self.node.event = event + except Exception: + # run helper to add a default event to the DB + from .helpers import add_default_event + + logger.debug("event not found from DB, attempting to create") + await add_default_event(db_url=self.settings.db_url) + logger.info("event added to DB") + event = await self.db.fetch_upcoming_event() + logger.debug("current event retrieved from DB") + self.node.event = event + async def wait_for_registration_snapshot_time(self): """Wait for the event registration_snapshot_time.""" # get the snapshot start timestamp @@ -466,23 +485,14 @@ async def wait_for_registration_snapshot_time(self): logger.debug("registration snapshot time reached.") - async def wait_for_snapshot(self): - """Wait for the event snapshot_start time.""" - # get the snapshot start timestamp - # raises an exception otherwise - snapshot_start = self.node.get_snapshot_start() - # check if now is after the snapshot start time - if not self.node.has_snapshot_started(): - raise Exception(f"snapshot will be stable on {snapshot_start} UTC") - - logger.debug("snapshot is stable") - async def import_snapshot_data(self): """Collect the snapshot data from EventDB.""" event = self.node.get_event() - importer = ExternalDataImporter() - await importer.ideascale_import_all(event.row_id) - await importer.snapshot_import(event.row_id) + registration_time = event.get_registration_snapshot_time() + snapshot_start = event.get_snapshot_start() + runner = SnapshotRunner(registration_snapshot_time=registration_time, snapshot_start=snapshot_start) + logger.info(f"Execute snapshot runner for event in row {event.row_id}.") + await runner.take_snapshots(event.row_id) async def collect_snapshot_data(self): """Collect the snapshot data from EventDB.""" @@ -500,25 +510,30 @@ async def collect_snapshot_data(self): try: # fetch the stable snapshot data snapshot = await self.db.fetch_snapshot(event.row_id) - json.loads(snapshot.dbsync_snapshot_data) - logger.debug(f"snapshot:\n{snapshot}") + compressed_data: bytes | None = snapshot.dbsync_snapshot_data + if compressed_data is None: + raise Exception("dbsync snapsthot data is missing") + data = brotli.decompress(compressed_data) + json.loads(data) + logger.debug("dbsync snapshot data retrieved") except Exception as e: logger.error(f"expected snapshot:\n{e}") # gets event vote plans, raises exception if none is found. try: - voteplans = await self.db.fetch_voteplans(event.row_id) - logger.debug(f"voteplans:\n{voteplans}") + await self.db.fetch_voteplans(event.row_id) + # logger.debug(f"voteplans:\n{voteplans}") except Exception as e: logger.error(f"expected voteplans:\n{e}") try: # gets event proposals, raises exception if none is found. proposals = await self.db.fetch_proposals() - logger.debug(f"proposals:\n{proposals}") + # logger.debug(f"proposals:\n{proposals}") self.proposals = proposals - except Exception as e: - raise Exception(f"failed to fetch proposals from DB: {e}") from e + except Exception: + logger.warning("no proposals were found") + # raise Exception(f"failed to fetch proposals from DB: {e}") from e async def setup_tally_committee(self): """Fetch or create tally committee data. @@ -533,7 +548,7 @@ async def setup_tally_committee(self): # TODO: fetch tally committee data from secret storage try: committee = await SecretDBStorage(conn=self.db.conn()).get_committee(event.row_id) - logger.debug(f"fetched committee from storage: {committee.as_yaml()}") + logger.debug("fetched committee from storage") self.node.committee = committee except Exception as e: logger.warning(f"failed to fetch committee from storage: {e}") diff --git a/src/cat-data-service/Earthfile b/src/cat-data-service/Earthfile index ea33d70e55..73ee494b88 100644 --- a/src/cat-data-service/Earthfile +++ b/src/cat-data-service/Earthfile @@ -21,3 +21,18 @@ docker: ENTRYPOINT ["/app/entry.sh"] SAVE IMAGE --push ${registry}cat-data-service:$tag + +# Need to be run with the -P flag +test: + FROM ../../+builder + + COPY ../../src/event-db+docker-compose/docker-compose.yml docker-compose.yml + WITH DOCKER \ + --compose docker-compose.yml \ + --pull postgres:14 \ + --load migrations:latest=(../../containers/event-db-migrations+docker --data=test) \ + --service migrations \ + --allow-privileged + RUN EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" cargo test -p cat-data-service + END + diff --git a/src/cat-data-service/README.md b/src/cat-data-service/README.md index 1b3cfac4d6..484038afcd 100644 --- a/src/cat-data-service/README.md +++ b/src/cat-data-service/README.md @@ -3,6 +3,9 @@ Catalyst event data service ## Run +Before running `cat-data-service` you will need to spin up event-db. +How to do it, you can read this [README.md](https://github.com/input-output-hk/catalyst-core/blob/main/src/event-db/Readme.md#starting-a-local-test-db-with-docker). + Run ``` cat-data-service run --address "127.0.0.1:3031" --database-url=postgres://catalyst-event-dev@localhost/CatalystEventDev --log-level=debug diff --git a/src/cat-data-service/src/service/v1/event/mod.rs b/src/cat-data-service/src/service/v1/event/mod.rs index 8faae43731..43b879e9d1 100644 --- a/src/cat-data-service/src/service/v1/event/mod.rs +++ b/src/cat-data-service/src/service/v1/event/mod.rs @@ -61,7 +61,15 @@ async fn events_exec( } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/cat-data-service/src/service/v1/event/objective/mod.rs b/src/cat-data-service/src/service/v1/event/objective/mod.rs index 89cd0cc69b..74c0c52e9c 100644 --- a/src/cat-data-service/src/service/v1/event/objective/mod.rs +++ b/src/cat-data-service/src/service/v1/event/objective/mod.rs @@ -42,7 +42,15 @@ async fn objectives_exec( } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/cat-data-service/src/service/v1/event/objective/proposal/mod.rs b/src/cat-data-service/src/service/v1/event/objective/proposal/mod.rs index ae179052cc..bf51bfd62f 100644 --- a/src/cat-data-service/src/service/v1/event/objective/proposal/mod.rs +++ b/src/cat-data-service/src/service/v1/event/objective/proposal/mod.rs @@ -77,7 +77,15 @@ async fn proposal_exec( } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/cat-data-service/src/service/v1/event/objective/proposal/review.rs b/src/cat-data-service/src/service/v1/event/objective/proposal/review.rs index 7a583cca50..90c30fddb9 100644 --- a/src/cat-data-service/src/service/v1/event/objective/proposal/review.rs +++ b/src/cat-data-service/src/service/v1/event/objective/proposal/review.rs @@ -41,7 +41,15 @@ async fn reviews_exec( } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/cat-data-service/src/service/v1/event/objective/review_type.rs b/src/cat-data-service/src/service/v1/event/objective/review_type.rs index 952f4f3649..d63b2c36ae 100644 --- a/src/cat-data-service/src/service/v1/event/objective/review_type.rs +++ b/src/cat-data-service/src/service/v1/event/objective/review_type.rs @@ -38,7 +38,15 @@ async fn review_types_exec( } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/cat-data-service/src/service/v1/registration.rs b/src/cat-data-service/src/service/v1/registration.rs index 0b89b88a7d..844d6ce94d 100644 --- a/src/cat-data-service/src/service/v1/registration.rs +++ b/src/cat-data-service/src/service/v1/registration.rs @@ -74,7 +74,15 @@ async fn delegations_exec( } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/cat-data-service/src/service/v1/search.rs b/src/cat-data-service/src/service/v1/search.rs index 6e854141b4..0e108d02e4 100644 --- a/src/cat-data-service/src/service/v1/search.rs +++ b/src/cat-data-service/src/service/v1/search.rs @@ -45,7 +45,15 @@ async fn search_exec( } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/event-db/Earthfile b/src/event-db/Earthfile index 520582b93a..42fd9c360e 100644 --- a/src/event-db/Earthfile +++ b/src/event-db/Earthfile @@ -11,7 +11,28 @@ build: COPY --dir setup . # Copy the setup directory to the working directory COPY --dir migrations . # Copy the migrations directory to the working directory COPY --dir historic_data . # Copy the historic_data directory to the working directory + COPY --dir test_data . # Copy the test_data directory to the working directory SAVE ARTIFACT refinery.toml # Save the refinery.toml file as an artifact SAVE ARTIFACT setup # Save the setup directory as an artifact SAVE ARTIFACT migrations # Save the migrations directory as an artifact SAVE ARTIFACT historic_data # Save the historic_data directory as an artifact + SAVE ARTIFACT test_data # Save the test_data directory as an artifact + +docker-compose: + FROM scratch + COPY docker-compose.yml . + SAVE ARTIFACT docker-compose.yml + +# Need to be run with the -P flag +test: + FROM ../../+builder + + COPY +docker-compose/docker-compose.yml . + WITH DOCKER \ + --compose docker-compose.yml \ + --pull postgres:14 \ + --load migrations:latest=(../../containers/event-db-migrations+docker --data=test) \ + --service migrations \ + --allow-privileged + RUN EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" cargo test -p event-db + END diff --git a/src/event-db/Readme.md b/src/event-db/Readme.md index bb194af012..bae3325aa6 100644 --- a/src/event-db/Readme.md +++ b/src/event-db/Readme.md @@ -10,58 +10,26 @@ This crate defines the structure and RUST access methods for the Catalyst Event - [GraphQL Users](#graphql-users) - [Authentication API](#authentication-api) -## Starting a Local Test DB with Docker +## Starting a Local Test DB with Docker and Earthly -If you are not running postgresql-14 locally. -A test server can be run using docker-compose. +Fistly you will need to prepare a docker images with all migrations and data. -```sh -docker-compose -f ./setup/dev-db.docker-compose.yml up --remove-orphans -d +Prepare a event-db docker image with the historic data +(from the root directory) ``` - -This will run postgres on port `5432`, and an `adminer` UI on `localhost:8080`. - -## Creating A Local Test Database - -### Dependencies - -- `cargo-make`, install `cargo install cargo-make` -- `refinery`, install `cargo install refinery_cli` - -Run the following SQL on your local test PostgreSQL server: - -```sql --- Cleanup if we already ran this before. -drop database if exists "CatalystEventDev"; -drop user if exists "catalyst-event-dev"; - --- Create the test user we will use with the local Catalyst-Event dev database. -create user "catalyst-event-dev" with password 'CHANGE_ME'; - --- Create the database. -create database "CatalystEventDev" - with owner "catalyst-event-dev"; - -comment on database "CatalystEventDev" is 'Local Test Catalyst Event DB'; +earthly ./containers/event-db-migrations+docker ``` - -Or (you need to run these scripts from the root folder) - -```sh -cargo make local-event-db-init +Prepare a event-db docker image with the test data +(from the root directory) ``` - -Execute Migrations: - -```sh -cargo make run-event-db-migration +earthly ./containers/event-db-migrations+docker --data=test ``` - -### Setup a clean new dev DB with a single command - -```sh -cargo make local-event-db-setup +Run a event db docker container +(from the root directory) +``` +docker-compose -f src/event-db/docker-compose.yml up migrations ``` +This will run postgres on port `5432` ## GraphQL diff --git a/src/event-db/docker-compose.yml b/src/event-db/docker-compose.yml new file mode 100644 index 0000000000..6b09caa282 --- /dev/null +++ b/src/event-db/docker-compose.yml @@ -0,0 +1,35 @@ +version: "3" + +services: + postgres: + image: postgres:14 + restart: unless-stopped + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: CatalystEventDev + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB}"] + interval: 2s + timeout: 5s + retries: 10 + ports: + - 5432:5432 + + migrations: + image: migrations:latest + environment: + # Required environment variables for migrations + - DB_HOST=postgres + - DB_PORT=5432 + - DB_NAME=CatalystEventDev + - DB_SUPERUSER=postgres + - DB_SUPERUSER_PASSWORD=postgres + - DB_USER=catalyst-event-dev + - DB_USER_PASSWORD=CHANGE_ME + - ADMIN_ROLE_PASSWORD=CHANGE_ME + - ADMIN_USER_PASSWORD=CHANGE_ME + - ANON_ROLE_PASSWORD=CHANGE_ME + depends_on: + postgres: + condition: service_healthy diff --git a/src/event-db/migrations/V2__event_tables.sql b/src/event-db/migrations/V2__event_tables.sql index 55600691ed..ea65cc3329 100644 --- a/src/event-db/migrations/V2__event_tables.sql +++ b/src/event-db/migrations/V2__event_tables.sql @@ -127,7 +127,7 @@ In this case the vote gets cast to all defined destinations. chain_id: , // Jormungandr chain id. Defaults to 0. // Other parameters TBD. }, -"caradno" : { // Voting on Cardano Directly +"cardano" : { // Voting on Cardano Directly chain_id: , // 0 = pre-prod, 1 = mainnet. // Other parameters TBD. }, diff --git a/src/event-db/src/lib.rs b/src/event-db/src/lib.rs index 88631cbfdd..46b930c50a 100644 --- a/src/event-db/src/lib.rs +++ b/src/event-db/src/lib.rs @@ -75,7 +75,15 @@ pub async fn establish_connection(url: Option<&str>) -> Result { } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/event-db/src/queries/event/mod.rs b/src/event-db/src/queries/event/mod.rs index efb75100a2..ca728e0194 100644 --- a/src/event-db/src/queries/event/mod.rs +++ b/src/event-db/src/queries/event/mod.rs @@ -188,7 +188,15 @@ impl EventQueries for EventDB { } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/event-db/src/queries/event/objective.rs b/src/event-db/src/queries/event/objective.rs index 35ad8258e3..a399e5d3ba 100644 --- a/src/event-db/src/queries/event/objective.rs +++ b/src/event-db/src/queries/event/objective.rs @@ -79,7 +79,15 @@ impl ObjectiveQueries for EventDB { } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/event-db/src/queries/event/proposal.rs b/src/event-db/src/queries/event/proposal.rs index 26d4764dd9..122a44974f 100644 --- a/src/event-db/src/queries/event/proposal.rs +++ b/src/event-db/src/queries/event/proposal.rs @@ -125,7 +125,15 @@ impl ProposalQueries for EventDB { } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/event-db/src/queries/event/review.rs b/src/event-db/src/queries/event/review.rs index f48eecbf9c..ff2311f0f0 100644 --- a/src/event-db/src/queries/event/review.rs +++ b/src/event-db/src/queries/event/review.rs @@ -143,7 +143,15 @@ impl ReviewQueries for EventDB { } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/event-db/src/queries/registration.rs b/src/event-db/src/queries/registration.rs index 422593a56c..c3b1fbb50b 100644 --- a/src/event-db/src/queries/registration.rs +++ b/src/event-db/src/queries/registration.rs @@ -215,7 +215,15 @@ impl RegistrationQueries for EventDB { } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/event-db/src/queries/search.rs b/src/event-db/src/queries/search.rs index 5067b1f14d..385f3ba6ea 100644 --- a/src/event-db/src/queries/search.rs +++ b/src/event-db/src/queries/search.rs @@ -246,7 +246,15 @@ impl SearchQueries for EventDB { } /// Need to setup and run a test event db instance -/// To do it you can use `cargo make local-event-db-test` +/// To do it you can use the following commands: +/// Prepare docker images +/// ``` +/// earthly ./containers/event-db-migrations+docker --data=test +/// ``` +/// Run event-db container +/// ``` +/// docker-compose -f src/event-db/docker-compose.yml up migrations +/// ``` /// Also need establish `EVENT_DB_URL` env variable with the following value /// ``` /// EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" diff --git a/src/event-db/test_data/event_table.sql b/src/event-db/test_data/01_event_table.sql similarity index 100% rename from src/event-db/test_data/event_table.sql rename to src/event-db/test_data/01_event_table.sql diff --git a/src/event-db/test_data/snapshot_table.sql b/src/event-db/test_data/02_snapshot_table.sql similarity index 100% rename from src/event-db/test_data/snapshot_table.sql rename to src/event-db/test_data/02_snapshot_table.sql diff --git a/src/event-db/test_data/voter_table.sql b/src/event-db/test_data/03_voter_table.sql similarity index 100% rename from src/event-db/test_data/voter_table.sql rename to src/event-db/test_data/03_voter_table.sql diff --git a/src/event-db/test_data/contribution_table.sql b/src/event-db/test_data/04_contribution_table.sql similarity index 100% rename from src/event-db/test_data/contribution_table.sql rename to src/event-db/test_data/04_contribution_table.sql diff --git a/src/event-db/test_data/goal_table.sql b/src/event-db/test_data/05_goal_table.sql similarity index 100% rename from src/event-db/test_data/goal_table.sql rename to src/event-db/test_data/05_goal_table.sql diff --git a/src/event-db/test_data/voting_group_table.sql b/src/event-db/test_data/06_voting_group_table.sql similarity index 100% rename from src/event-db/test_data/voting_group_table.sql rename to src/event-db/test_data/06_voting_group_table.sql diff --git a/src/event-db/test_data/objective_table.sql b/src/event-db/test_data/07_objective_table.sql similarity index 100% rename from src/event-db/test_data/objective_table.sql rename to src/event-db/test_data/07_objective_table.sql diff --git a/src/event-db/test_data/proposal_table.sql b/src/event-db/test_data/08_proposal_table.sql similarity index 100% rename from src/event-db/test_data/proposal_table.sql rename to src/event-db/test_data/08_proposal_table.sql diff --git a/src/event-db/test_data/proposal_review_table.sql b/src/event-db/test_data/09_proposal_review_table.sql similarity index 100% rename from src/event-db/test_data/proposal_review_table.sql rename to src/event-db/test_data/09_proposal_review_table.sql diff --git a/src/event-db/test_data/review_rating_table.sql b/src/event-db/test_data/10_review_rating_table.sql similarity index 100% rename from src/event-db/test_data/review_rating_table.sql rename to src/event-db/test_data/10_review_rating_table.sql diff --git a/src/event-db/test_data/objective_review_metric_table.sql b/src/event-db/test_data/11_objective_review_metric_table.sql similarity index 100% rename from src/event-db/test_data/objective_review_metric_table.sql rename to src/event-db/test_data/11_objective_review_metric_table.sql diff --git a/utilities/ideascale-importer/Earthfile b/utilities/ideascale-importer/Earthfile index 6cf7b9000d..de900cec2e 100644 --- a/utilities/ideascale-importer/Earthfile +++ b/utilities/ideascale-importer/Earthfile @@ -24,6 +24,7 @@ build: COPY pyproject.toml . COPY poetry.lock . COPY --dir ideascale_importer README.md ./ + COPY ideascale-importer-config.json ./ # Install package dependencies RUN poetry env use python diff --git a/utilities/ideascale-importer/docs/snapshot.md b/utilities/ideascale-importer/docs/snapshot.md index 57f326885d..41717111d7 100644 --- a/utilities/ideascale-importer/docs/snapshot.md +++ b/utilities/ideascale-importer/docs/snapshot.md @@ -18,7 +18,7 @@ In order to import snapshot data you'll need: With that you can run: ```sh -PYTHONPATH=(pwd) poetry run python ideascale_importer snapshot import --config-path PATH_TO_CONFIG_FILE --event-id EVENT_ROW_ID --database-url VITSS_DB_URL --output-dir OUTDIR_PATH +PYTHONPATH=(pwd) poetry run python ideascale_importer snapshot import --event-id EVENT_ROW_ID --database-url VITSS_DB_URL --dbsync-url DBSYNC_URL --gvc-api-url GVC_API_URL --output-dir OUTDIR_PATH ``` If everything went as expected you should have snapshot, voters and contributions data inserted to the database. diff --git a/utilities/ideascale-importer/ideascale_importer/cli/ideascale.py b/utilities/ideascale-importer/ideascale_importer/cli/ideascale.py index eac03b4586..265b8806b6 100644 --- a/utilities/ideascale-importer/ideascale_importer/cli/ideascale.py +++ b/utilities/ideascale-importer/ideascale_importer/cli/ideascale.py @@ -7,24 +7,27 @@ from ideascale_importer.ideascale.client import Client from ideascale_importer.ideascale.importer import Importer from ideascale_importer.utils import configure_logger +from loguru import logger app = typer.Typer(add_completion=False) @app.command() def import_all( - api_token: str = typer.Option(..., help="IdeaScale API token"), - database_url: str = typer.Option(..., help="Postgres database URL"), + api_token: str = typer.Option(..., envvar="IDEASCALE_API_TOKEN", help="IdeaScale API token"), + database_url: str = typer.Option(..., envvar="EVENTDB_URL", help="Postgres database URL"), event_id: int = typer.Option( ..., help="Database row id of the event which data will be imported", ), campaign_group_id: int = typer.Option( ..., + envvar="IDEASCALE_CAMPAIGN_GROUP", help="IdeaScale campaign group id for the event which data will be imported", ), stage_id: int = typer.Option( ..., + envvar="IDEASCALE_STAGE_ID", help="IdeaScale stage id for from which proposal data will be imported", ), proposals_scores_csv: Optional[str] = typer.Option( @@ -33,14 +36,17 @@ def import_all( ), log_level: str = typer.Option( "info", + envvar="IDEASCALE_LOG_LEVEL", help="Log level", ), log_format: str = typer.Option( "text", + envvar="IDEASCALE_LOG_FORMAT", help="Log format", ), ideascale_api_url: str = typer.Option( Client.DEFAULT_API_URL, + envvar="IDEASCALE_API_URL", help="IdeaScale API URL", ), ): @@ -69,4 +75,8 @@ async def inner( await importer.run() await importer.close() - asyncio.run(inner(event_id, campaign_group_id, stage_id, proposals_scores_csv, ideascale_api_url)) + try: + asyncio.run(inner(event_id, campaign_group_id, stage_id, proposals_scores_csv, ideascale_api_url)) + except Exception as e: + logger.error(e) + raise typer.Exit(1) diff --git a/utilities/ideascale-importer/ideascale_importer/cli/snapshot.py b/utilities/ideascale-importer/ideascale_importer/cli/snapshot.py index f8adcc23d1..42d6541551 100644 --- a/utilities/ideascale-importer/ideascale_importer/cli/snapshot.py +++ b/utilities/ideascale-importer/ideascale_importer/cli/snapshot.py @@ -5,20 +5,27 @@ from ideascale_importer.snapshot_importer import Importer from ideascale_importer.utils import configure_logger +from loguru import logger app = typer.Typer(add_completion=False) @app.command(name="import") def import_snapshot( - config_path: str = typer.Option(..., help="Path to the configuration file"), event_id: int = typer.Option(..., help="Database event id to link all snapshot data to"), - database_url: str = typer.Option(..., help="URL of the Postgres database in which to import the data to"), - output_dir: str = typer.Option(..., help="Output directory for generated files"), + database_url: str = typer.Option(..., envvar="EVENTDB_URL", help="URL of the Postgres database in which to import the data to"), + output_dir: str = typer.Option(..., envvar="SNAPSHOT_OUTPUT_DIR", help="Output directory for generated files"), network_id: str = typer.Option( ..., + envvar="SNAPSHOT_NETWORK_ID", help="Network id to pass as parameter to snapshot_tool", ), + dbsync_url: str = typer.Option(..., envvar="DBSYNC_URL", help="URL of the DBSync database in which to import the data to"), + snapshot_tool_path: str = typer.Option(default="snapshot_tool", envvar="SNAPSHOT_TOOL_PATH", help="Path to the snapshot tool"), + catalyst_toolbox_path: str = typer.Option( + default="catalyst-toolbox", envvar="CATALYST_TOOLBOX_PATH", help="Path to the catalyst-toolbox" + ), + gvc_api_url: str = typer.Option(..., envvar="GVC_API_URL", help="URL of the GVC API"), raw_snapshot_file: str = typer.Option( None, help=( @@ -35,26 +42,39 @@ def import_snapshot( ), log_level: str = typer.Option( "info", + envvar="SNAPSHOT_LOG_LEVEL", help="Log level", ), log_format: str = typer.Option( "text", + envvar="SNAPSHOT_LOG_FORMAT", help="Log format", ), ): """Import snapshot data into the database.""" + # Configure logger with the given parameters configure_logger(log_level, log_format) async def inner(): importer = Importer( - config_path=config_path, database_url=database_url, event_id=event_id, output_dir=output_dir, network_id=network_id, + dbsync_url=dbsync_url, + snapshot_tool_path=snapshot_tool_path, + catalyst_toolbox_path=catalyst_toolbox_path, + gvc_api_url=gvc_api_url, raw_snapshot_file=raw_snapshot_file, dreps_file=dreps_file, ) - await importer.run() + try: + await importer.run() + except Exception as e: + logger.error(e) - asyncio.run(inner()) + try: + asyncio.run(inner()) + except Exception as e: + logger.error(e) + raise typer.Exit(1) diff --git a/utilities/ideascale-importer/ideascale_importer/db/__init__.py b/utilities/ideascale-importer/ideascale_importer/db/__init__.py index bb9280317a..adfcb4c0fb 100644 --- a/utilities/ideascale-importer/ideascale_importer/db/__init__.py +++ b/utilities/ideascale-importer/ideascale_importer/db/__init__.py @@ -146,11 +146,19 @@ async def get_vote_options_id(conn: asyncpg.Connection, objective: List[str]) -> return row["id"] -async def connect(url: str) -> asyncpg.Connection: +async def connect(url: str, *args, **kwargs) -> asyncpg.Connection: """Return a connection to the database. This also sets the jsonb codec to use the json module. """ - conn = await asyncpg.connect(url) - await conn.set_type_codec("jsonb", encoder=json.dumps, decoder=json.loads, schema="pg_catalog") + try: + conn = await asyncpg.connect(dsn=url, *args, **kwargs) + except Exception as _: + raise Exception("Database connection failed") + + try: + await conn.set_type_codec("jsonb", encoder=json.dumps, decoder=json.loads, schema="pg_catalog") + except Exception as _: + raise Exception("Failed to set jsonb codec") + return conn diff --git a/utilities/ideascale-importer/ideascale_importer/ideascale/importer.py b/utilities/ideascale-importer/ideascale_importer/ideascale/importer.py index 6c005471bb..a307d29a2b 100644 --- a/utilities/ideascale-importer/ideascale_importer/ideascale/importer.py +++ b/utilities/ideascale-importer/ideascale_importer/ideascale/importer.py @@ -248,11 +248,11 @@ def __init__( except Exception as e: raise ReadProposalsScoresCsv(repr(e)) from e - async def connect(self): + async def connect(self, *args, **kwargs): """Connect to the database.""" if self.conn is None: logger.info("Connecting to the database") - self.conn = await ideascale_importer.db.connect(self.database_url) + self.conn = await ideascale_importer.db.connect(self.database_url, *args, **kwargs) async def close(self): """Close the connection to the database.""" diff --git a/utilities/ideascale-importer/ideascale_importer/snapshot_importer.py b/utilities/ideascale-importer/ideascale_importer/snapshot_importer.py index abfcb6d166..0ceea53553 100644 --- a/utilities/ideascale-importer/ideascale_importer/snapshot_importer.py +++ b/utilities/ideascale-importer/ideascale_importer/snapshot_importer.py @@ -7,6 +7,7 @@ from datetime import datetime import json import os +import re from typing import Dict, List, Tuple, Optional from loguru import logger import pydantic.tools @@ -21,10 +22,7 @@ class DbSyncDatabaseConfig: """Configuration for the database containing data from dbsync.""" - host: str - user: str - password: str - db: str + db_url: str @dataclass @@ -143,22 +141,40 @@ class FinalSnapshotAlreadyPresent(Exception): ... +class InvalidDatabaseUrl(Exception): + """Raised when the database URL is invalid.""" + + def __init__(self, db_url: str): + """Initialize the exception.""" + self.db_url = db_url + + def __str__(self): + """Return a string representation of the exception.""" + return "Invalid database URL" class Importer: """Snapshot importer.""" def __init__( self, - config_path: str, database_url: str, event_id: int, output_dir: str, network_id: str, + dbsync_url: str, + snapshot_tool_path: str, + catalyst_toolbox_path: str, + gvc_api_url: str, raw_snapshot_file: Optional[str] = None, dreps_file: Optional[str] = None, ): """Initialize the importer.""" - self.config = Config.from_json_file(config_path) + self.config = Config( + dbsync_database=DbSyncDatabaseConfig(db_url=dbsync_url), + snapshot_tool=SnapshotToolConfig(path=snapshot_tool_path), + catalyst_toolbox=CatalystToolboxConfig(path=catalyst_toolbox_path), + gvc=GvcConfig(api_url=gvc_api_url), + ) self.database_url = database_url self.event_id = event_id self.lastest_block_time: Optional[datetime] = None @@ -196,99 +212,102 @@ async def _check_preconditions(self): await conn.close() - async def _fetch_parameters(self): + async def _fetch_parameters(self, *db_args, **db_kwargs): # Fetch event parameters - conn = await ideascale_importer.db.connect(self.database_url) + try: + conn = await ideascale_importer.db.connect(self.database_url, *db_args, **db_kwargs) - row = await conn.fetchrow( - "SELECT " - "registration_snapshot_time, snapshot_start, voting_power_threshold, max_voting_power_pct " - "FROM event WHERE row_id = $1", - self.event_id, - ) - if row is None: - raise FetchParametersFailed("Failed to get event parameters from the database: " f"event_id={self.event_id} not found") - - self.voting_power_cap = row["max_voting_power_pct"] - if self.voting_power_cap is not None: - self.voting_power_cap = float(self.voting_power_cap) - - self.min_stake_threshold = row["voting_power_threshold"] - self.snapshot_start_time = row["snapshot_start"] - self.registration_snapshot_time = row["registration_snapshot_time"] - - if self.snapshot_start_time is None or self.registration_snapshot_time is None: - snapshot_start_time = None - if self.snapshot_start_time is not None: - snapshot_start_time = self.snapshot_start_time.isoformat() - - registration_snapshot_time = None - if self.registration_snapshot_time is not None: - registration_snapshot_time = self.registration_snapshot_time.isoformat() - - raise FetchParametersFailed( - "Missing snapshot timestamps for event in the database:" - f" snapshot_start={snapshot_start_time}" - f" registration_snapshot_time={registration_snapshot_time}" + row = await conn.fetchrow( + "SELECT " + "registration_snapshot_time, snapshot_start, voting_power_threshold, max_voting_power_pct " + "FROM event WHERE row_id = $1", + self.event_id, ) + if row is None: + raise FetchParametersFailed("Failed to get event parameters from the database: " f"event_id={self.event_id} not found") - logger.info( - "Got event parameters", - min_stake_threshold=self.min_stake_threshold, - voting_power_cap=self.voting_power_cap, - snapshot_start=None if self.snapshot_start_time is None else self.snapshot_start_time.isoformat(), - registration_snapshot_time=None - if self.registration_snapshot_time is None - else self.registration_snapshot_time.isoformat(), - ) + self.voting_power_cap = row["max_voting_power_pct"] + if self.voting_power_cap is not None: + self.voting_power_cap = float(self.voting_power_cap) - await conn.close() + self.min_stake_threshold = row["voting_power_threshold"] + self.snapshot_start_time = row["snapshot_start"] + self.registration_snapshot_time = row["registration_snapshot_time"] - if not self.skip_snapshot_tool_execution: - # Fetch max slot - conn = await ideascale_importer.db.connect( - f"postgres://{self.config.dbsync_database.user}:" - + f"{self.config.dbsync_database.password}@{self.config.dbsync_database.host}" - f"/{self.config.dbsync_database.db}" - ) + if self.snapshot_start_time is None or self.registration_snapshot_time is None: + snapshot_start_time = None + if self.snapshot_start_time is not None: + snapshot_start_time = self.snapshot_start_time.isoformat() + + registration_snapshot_time = None + if self.registration_snapshot_time is not None: + registration_snapshot_time = self.registration_snapshot_time.isoformat() - # Fetch slot number and time from the block right before or equal the registration snapshot time - row = await conn.fetchrow( - "SELECT slot_no, time FROM block WHERE time <= $1 AND slot_no IS NOT NULL ORDER BY slot_no DESC LIMIT 1", - self.registration_snapshot_time, - ) - if row is None: raise FetchParametersFailed( - "Failed to get registration snapshot block data from db_sync database: " "no data returned by the query" + "Missing snapshot timestamps for event in the database:" + f" snapshot_start={snapshot_start_time}" + f" registration_snapshot_time={registration_snapshot_time}" ) - self.registration_snapshot_slot = row["slot_no"] - self.registration_snapshot_block_time = row["time"] logger.info( - "Got registration snapshot block data", - slot_no=self.registration_snapshot_slot, - block_time=None - if self.registration_snapshot_block_time is None - else self.registration_snapshot_block_time.isoformat(), + "Got event parameters", + min_stake_threshold=self.min_stake_threshold, + voting_power_cap=self.voting_power_cap, + snapshot_start=None if self.snapshot_start_time is None else self.snapshot_start_time.isoformat(), + registration_snapshot_time=None + if self.registration_snapshot_time is None + else self.registration_snapshot_time.isoformat(), ) - row = await conn.fetchrow( - "SELECT slot_no, time FROM block WHERE slot_no IS NOT NULL ORDER BY slot_no DESC LIMIT 1", - ) - if row is None: - raise FetchParametersFailed( - "Failed to get latest block time and slot number from db_sync database:" "no data returned by the query" + await conn.close() + except Exception as e: + logger.error("Failed to fetch event parameters", exc_info=e) + raise FetchParametersFailed(str(e)) + + if not self.skip_snapshot_tool_execution: + try: + # Fetch max slot + conn = await ideascale_importer.db.connect(self.config.dbsync_database.db_url) + + # Fetch slot number and time from the block right before or equal the registration snapshot time + row = await conn.fetchrow( + "SELECT slot_no, time FROM block WHERE time <= $1 AND slot_no IS NOT NULL ORDER BY slot_no DESC LIMIT 1", + self.registration_snapshot_time, + ) + if row is None: + raise FetchParametersFailed( + "Failed to get registration snapshot block data from db_sync database: no data returned by the query" + ) + + self.registration_snapshot_slot = row["slot_no"] + self.registration_snapshot_block_time = row["time"] + logger.info( + "Got registration snapshot block data", + slot_no=self.registration_snapshot_slot, + block_time=None + if self.registration_snapshot_block_time is None + else self.registration_snapshot_block_time.isoformat(), ) - self.latest_block_slot_no = row["slot_no"] - self.lastest_block_time = row["time"] - logger.info( - "Got latest block data", - time=None if self.lastest_block_time is None else self.lastest_block_time.isoformat(), - slot_no=self.latest_block_slot_no, - ) + row = await conn.fetchrow( + "SELECT slot_no, time FROM block WHERE slot_no IS NOT NULL ORDER BY slot_no DESC LIMIT 1", + ) + if row is None: + raise FetchParametersFailed( + "Failed to get latest block time and slot number from db_sync database: no data returned by the query" + ) + + self.latest_block_slot_no = row["slot_no"] + self.lastest_block_time = row["time"] + logger.info( + "Got latest block data", + time=None if self.lastest_block_time is None else self.lastest_block_time.isoformat(), + slot_no=self.latest_block_slot_no, + ) - await conn.close() + await conn.close() + except Exception as e: + raise FetchParametersFailed(f"Failed to get latest block data with snapshot_tool: {e}") else: logger.info("Skipping querying max slot parameter") @@ -310,12 +329,24 @@ async def _fetch_gvc_dreps_list(self): json.dump(dataclasses.asdict(dreps_data), f) async def _run_snapshot_tool(self): + # Extract the db_user, db_pass, db_host, and db_name from the address using a regular expression + db_url = self.config.dbsync_database.db_url + match = re.match(r'^postgres:\/\/(?P[^:]+):(?P[^@]+)@(?P[^:\/]+):?([0-9]*)\/(?P[^?]+)?', db_url) + + if match is None: + raise InvalidDatabaseUrl(db_url=db_url) + + db_user = match.group('db_user') + db_pass = match.group('db_pass') + db_host = match.group('db_host') + db_name = match.group('db_name') + snapshot_tool_cmd = ( f"{self.config.snapshot_tool.path}" - f" --db-user {self.config.dbsync_database.user}" - f" --db-pass {self.config.dbsync_database.password}" - f" --db-host {self.config.dbsync_database.host}" - f" --db {self.config.dbsync_database.db}" + f" --db-user {db_user}" + f" --db-pass {db_pass}" + f" --db-host {db_host}" + f" --db {db_name}" f" --min-slot 0 --max-slot {self.registration_snapshot_slot}" f" --network-id {self.network_id}" f" --out-file {self.raw_snapshot_tool_file}" @@ -529,9 +560,9 @@ async def _write_db_data(self): voters_count=len(voters.values()), ) - async def run(self): + async def run(self, *db_args, **db_kwargs): """Take a snapshot and write the data to the database.""" - await self._fetch_parameters() + await self._fetch_parameters(*db_args, **db_kwargs) if self.dreps_file is None: await self._fetch_gvc_dreps_list() diff --git a/utilities/ideascale-importer/ideascale_importer/utils.py b/utilities/ideascale-importer/ideascale_importer/utils.py index d0a1addf46..5e5966e2cb 100644 --- a/utilities/ideascale-importer/ideascale_importer/utils.py +++ b/utilities/ideascale-importer/ideascale_importer/utils.py @@ -231,4 +231,4 @@ def configure_logger(log_level: str, log_format: str): formatter = json_logger_formatter logger.remove() - logger.add(sys.stdout, level=log_level.upper(), format=formatter, enqueue=True) \ No newline at end of file + logger.add(sys.stdout, level=log_level.upper(), format=formatter, enqueue=True)