diff --git a/.github/.jira_sync_config.yaml b/.github/.jira_sync_config.yaml new file mode 100644 index 0000000..3dd7a38 --- /dev/null +++ b/.github/.jira_sync_config.yaml @@ -0,0 +1,22 @@ +# Sync GitHub issues to Jira issues + +# Configuration syntax: +# https://github.com/canonical/gh-jira-sync-bot/blob/main/README.md#client-side-configuration +settings: + # Repository specific settings + components: # Jira components that will be added to Jira issue + - postgresql-vm + - postgresql-k8s + + # Settings shared across Data Platform repositories + label_mapping: + # If the GitHub issue does not have a label in this mapping, the Jira issue will be created as a Bug + enhancement: Story + jira_project_key: DPE # https://warthogs.atlassian.net/browse/DPE + status_mapping: + opened: untriaged + closed: done # GitHub issue closed as completed + not_planned: rejected # GitHub issue closed as not planned + add_gh_comment: true + sync_description: false + sync_comments: false diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..c54d587 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,50 @@ +--- +name: Bug report +about: File a bug report +labels: bug + +--- + + + +## Steps to reproduce + +1. + +## Expected behavior + + +## Actual behavior + + + +## Versions + + +Operating system: + + +Juju CLI: + + +Juju agent: + + +Charm revision: + + +LXD: + + +microk8s: + +## Log output + + +Juju debug log: + + + + +## Additional context + diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 0000000..76d0f33 --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,10 @@ +github_checks: + annotations: false +coverage: + status: + project: + default: + target: 70% + patch: + default: + target: 33% diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..9a15830 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,7 @@ +## Issue + +## Solution + +## Checklist +- [ ] I have added or updated any relevant documentation. +- [ ] I have cleaned any remaining cloud resources from my accounts. diff --git a/.github/renovate.json5 b/.github/renovate.json5 new file mode 100644 index 0000000..fa5d940 --- /dev/null +++ b/.github/renovate.json5 @@ -0,0 +1,13 @@ +{ + $schema: 'https://docs.renovatebot.com/renovate-schema.json', + extends: [ + 'github>canonical/data-platform//renovate_presets/charm.json5', + ], + reviewers: [ + 'team:data-postgresql', + ], + packageRules: [ + ], + customManagers: [ + ], +} diff --git a/.github/workflows/approve_renovate_pr.yaml b/.github/workflows/approve_renovate_pr.yaml new file mode 100644 index 0000000..214ad51 --- /dev/null +++ b/.github/workflows/approve_renovate_pr.yaml @@ -0,0 +1,15 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. +name: Approve Renovate pull request + +on: + pull_request: + types: + - opened + +jobs: + approve-pr: + name: Approve Renovate pull request + uses: canonical/data-platform-workflows/.github/workflows/approve_renovate_pr.yaml@v35.0.2 + permissions: + pull-requests: write # Needed to approve PR diff --git a/.github/workflows/check_pr.yaml b/.github/workflows/check_pr.yaml new file mode 100644 index 0000000..69de417 --- /dev/null +++ b/.github/workflows/check_pr.yaml @@ -0,0 +1,20 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. +name: Check pull request + +on: + pull_request: + types: + - opened + - labeled + - unlabeled + - edited + branches: + - main + + permissions: + pull-requests: read # Needed to check labels +jobs: + check-pr: + name: Check pull request + uses: canonical/data-platform-workflows/.github/workflows/check_charm_pr.yaml@v35.0.2 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..0792987 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,63 @@ +# Copyright 2025 Canonical Ltd. +# See LICENSE file for licensing details. +name: Tests + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +on: + pull_request: + paths-ignore: + - '.gitignore' + - '.jujuignore' + - 'LICENSE' + - '**.md' + - .github/renovate.json5 + - '.github/workflows/sync_docs.yaml' + schedule: + - cron: '53 0 * * *' # Daily at 00:53 UTC + # Triggered on push to branch "main" by .github/workflows/release.yaml + workflow_call: + outputs: + artifact-prefix: + description: build_charm.yaml `artifact-prefix` output + value: ${{ jobs.build.outputs.artifact-prefix }} + +permissions: {} +jobs: + # TODO install uv in reusable and re-enable + # lint: + # name: Lint + # uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v31.0.1 + lint: + name: tox run -e lint + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Install tox & uv + run: | + pipx install tox + sudo snap install astral-uv --classic + - name: Run linters + run: tox run -e lint + + unit-test: + name: Unit test charm + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Install tox & uv + run: | + pipx install tox + sudo snap install astral-uv --classic + - name: Run tests + run: tox run -e unit + - name: Upload Coverage to Codecov + uses: codecov/codecov-action@v5 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a0a0e29 --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +venv/ +build/ +*.charm +.tox/ +.coverage +coverage.xml +__pycache__/ +*.py[cod] +*.ini +*.log +*.tar.xz +postgresql_charms_single_kernel.egg-info/ diff --git a/pyproject.toml b/pyproject.toml index 89a57f2..a05686d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,8 @@ format = [ "ruff==0.12.11" ] lint = [ - "codespell==2.4.1" + "codespell==2.4.1", + "pyright==1.1.405" ] unit = [ "coverage[toml]==7.9.1; python_version > '3.8'", @@ -96,3 +97,13 @@ max-complexity = 10 [tool.ruff.lint.pydocstyle] convention = "google" + +[tool.pyright] +include = ["single_kernel_postgresql"] +pythonVersion = "3.8" +pythonPlatform = "All" +typeCheckingMode = "basic" +reportIncompatibleMethodOverride = false +reportImportCycles = false +reportMissingModuleSource = true +stubPath = "" diff --git a/single_kernel_postgresql/utils/postgresql.py b/single_kernel_postgresql/utils/postgresql.py index f9466c2..37962b9 100644 --- a/single_kernel_postgresql/utils/postgresql.py +++ b/single_kernel_postgresql/utils/postgresql.py @@ -408,7 +408,7 @@ def create_user( raise PostgreSQLCreateUserError() from e def _adjust_user_definition( - self, user: str, roles: Optional[List[str]], database: str, user_definition: str + self, user: str, roles: Optional[List[str]], database: Optional[str], user_definition: str ) -> Tuple[str, List[str]]: """Adjusts the user definition to include additional statements. @@ -453,7 +453,7 @@ def _adjust_user_definition( def _process_extra_user_roles( self, user: str, extra_user_roles: Optional[List[str]] = None - ) -> Tuple[Optional[List[str]], Optional[List[str]]]: + ) -> Tuple[Optional[List[str]], Optional[Set[str]]]: # Separate roles and privileges from the provided extra user roles. roles = privileges = None if extra_user_roles: @@ -489,7 +489,7 @@ def _process_extra_user_roles( privileges = { extra_user_role for extra_user_role in extra_user_roles - if extra_user_role not in roles + if extra_user_role and extra_user_role not in roles } invalid_privileges = [ privilege for privilege in privileges if privilege not in valid_privileges @@ -661,8 +661,8 @@ def grant_replication_privileges( self, user: str, database: str, - schematables: list[str], - old_schematables: list[str] | None = None, + schematables: List[str], + old_schematables: Optional[List[str]] = None, ) -> None: """Grant CONNECT privilege on database and SELECT privilege on tables. @@ -705,7 +705,7 @@ def grant_replication_privileges( connection.close() def revoke_replication_privileges( - self, user: str, database: str, schematables: list[str] + self, user: str, database: str, schematables: List[str] ) -> None: """Revoke all privileges from tables and database. @@ -792,8 +792,9 @@ def get_last_archived_wal(self) -> str: """Get the name of the last archived wal for the current PostgreSQL cluster.""" try: with self._connect_to_database() as connection, connection.cursor() as cursor: + # Should always be present cursor.execute("SELECT last_archived_wal FROM pg_stat_archiver;") - return cursor.fetchone()[0] + return cursor.fetchone()[0] # type: ignore except psycopg2.Error as e: logger.error(f"Failed to get PostgreSQL last archived WAL: {e}") raise PostgreSQLGetLastArchivedWALError() from e @@ -803,7 +804,8 @@ def get_current_timeline(self) -> str: try: with self._connect_to_database() as connection, connection.cursor() as cursor: cursor.execute("SELECT timeline_id FROM pg_control_checkpoint();") - return cursor.fetchone()[0] + # There should always be a timeline + return cursor.fetchone()[0] # type: ignore except psycopg2.Error as e: logger.error(f"Failed to get PostgreSQL current timeline id: {e}") raise PostgreSQLGetCurrentTimelineError() from e @@ -859,8 +861,8 @@ def get_postgresql_version(self, current_host=True) -> str: database_host=host ) as connection, connection.cursor() as cursor: cursor.execute("SELECT version();") - # Split to get only the version number. - return cursor.fetchone()[0].split(" ")[1] + # Split to get only the version number. There should always be a version. + return cursor.fetchone()[0].split(" ")[1] # type:ignore except psycopg2.Error as e: logger.error(f"Failed to get PostgreSQL version: {e}") raise PostgreSQLGetPostgreSQLVersionError() from e @@ -880,7 +882,8 @@ def is_tls_enabled(self, check_current_host: bool = False) -> bool: database_host=self.current_host if check_current_host else None ) as connection, connection.cursor() as cursor: cursor.execute("SHOW ssl;") - return "on" in cursor.fetchone()[0] + # SSL state should always be set + return "on" in cursor.fetchone()[0] # type: ignore except psycopg2.Error: # Connection errors happen when PostgreSQL has not started yet. return False @@ -1378,7 +1381,9 @@ def is_table_empty(self, db: str, schema: str, table: str) -> bool: connection = self._connect_to_database(database=db) with connection, connection.cursor() as cursor: cursor.execute(SQL("SELECT COUNT(1) FROM {};").format(Identifier(schema, table))) - return cursor.fetchone()[0] == 0 + if result := cursor.fetchone(): + return result[0] == 0 + return True except psycopg2.Error as e: logger.error(f"Failed to check whether table is empty: {e}") raise PostgreSQLIsTableEmptyError() from e @@ -1386,7 +1391,7 @@ def is_table_empty(self, db: str, schema: str, table: str) -> bool: if connection: connection.close() - def create_publication(self, db: str, name: str, schematables: list[str]) -> None: + def create_publication(self, db: str, name: str, schematables: List[str]) -> None: """Create PostgreSQL publication.""" connection = None try: @@ -1427,7 +1432,7 @@ def publication_exists(self, db: str, publication: str) -> bool: if connection: connection.close() - def alter_publication(self, db: str, name: str, schematables: list[str]) -> None: + def alter_publication(self, db: str, name: str, schematables: List[str]) -> None: """Alter PostgreSQL publication.""" connection = None try: @@ -1715,11 +1720,11 @@ def validate_group_map(self, group_map: Optional[str]) -> bool: return True try: - group_map = self.build_postgresql_group_map(group_map) + parsed_group_map = self.build_postgresql_group_map(group_map) except ValueError: return False - for _, psql_group in group_map: + for _, psql_group in parsed_group_map: with self._connect_to_database() as connection, connection.cursor() as cursor: query = SQL("SELECT TRUE FROM pg_roles WHERE rolname={};") query = query.format(Literal(psql_group)) @@ -1740,7 +1745,9 @@ def is_user_in_hba(self, username: str) -> bool: "SELECT COUNT(*) FROM pg_hba_file_rules WHERE {} = ANY(user_name);" ).format(Literal(username)) ) - return cursor.fetchone()[0] > 0 + if result := cursor.fetchone(): + return result[0] > 0 + return False except psycopg2.Error as e: logger.debug(f"Failed to check pg_hba: {e}") return False diff --git a/tox.ini b/tox.ini index 9260ff5..08e92d7 100644 --- a/tox.ini +++ b/tox.ini @@ -31,7 +31,7 @@ commands = [testenv:lint] description = Check code against coding style standards commands_pre = - uv --config-file=tox_uv.toml sync --active --group lint --group format + uv --config-file=tox_uv.toml sync --active --group lint --group format --group lib commands = uv lock --check uv run --active codespell "{tox_root}" --skip "{tox_root}/.git" --skip "{tox_root}/.tox" \ @@ -39,6 +39,8 @@ commands = --skip "{tox_root}/LICENSE" --skip "{tox_root}/uv.lock" uv run --active ruff check {[vars]all_path} uv run --active ruff format --check --diff {[vars]all_path} + # run last because it's slowest + uv run --active pyright [testenv:unit] description = Run unit tests diff --git a/uv.lock b/uv.lock index d85c0f0..0e7dc56 100644 --- a/uv.lock +++ b/uv.lock @@ -169,6 +169,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + [[package]] name = "opentelemetry-api" version = "1.33.1" @@ -289,6 +298,7 @@ lib = [ ] lint = [ { name = "codespell" }, + { name = "pyright" }, ] unit = [ { name = "coverage", extra = ["toml"], marker = "python_full_version >= '3.9'" }, @@ -304,7 +314,10 @@ lib = [ { name = "ops", specifier = ">=2.0.0" }, { name = "psycopg2", specifier = ">=2.9.10" }, ] -lint = [{ name = "codespell", specifier = "==2.4.1" }] +lint = [ + { name = "codespell", specifier = "==2.4.1" }, + { name = "pyright", specifier = "==1.1.405" }, +] unit = [ { name = "coverage", extras = ["toml"], marker = "python_full_version >= '3.9'", specifier = "==7.9.1" }, { name = "pytest", marker = "python_full_version < '3.9'", specifier = ">=8.3.5" }, @@ -337,6 +350,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pyright" +version = "1.1.405" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, +] + [[package]] name = "pytest" version = "8.3.5"