-
Notifications
You must be signed in to change notification settings - Fork 1
[DPE-8030] Initial CI #3
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
aa96d99
40b2993
5b5789a
bfab85b
9cf1dfb
74b70c8
2ab81cd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
# Sync GitHub issues to Jira issues | ||
|
||
# Configuration syntax: | ||
# https://github.com/canonical/gh-jira-sync-bot/blob/main/README.md#client-side-configuration | ||
settings: | ||
# Repository specific settings | ||
components: # Jira components that will be added to Jira issue | ||
- postgresql-vm | ||
- postgresql-k8s | ||
|
||
# Settings shared across Data Platform repositories | ||
label_mapping: | ||
# If the GitHub issue does not have a label in this mapping, the Jira issue will be created as a Bug | ||
enhancement: Story | ||
jira_project_key: DPE # https://warthogs.atlassian.net/browse/DPE | ||
status_mapping: | ||
opened: untriaged | ||
closed: done # GitHub issue closed as completed | ||
not_planned: rejected # GitHub issue closed as not planned | ||
add_gh_comment: true | ||
sync_description: false | ||
sync_comments: false |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
--- | ||
name: Bug report | ||
about: File a bug report | ||
labels: bug | ||
|
||
--- | ||
|
||
<!-- Thank you for submitting a bug report! All fields are required unless marked optional. --> | ||
|
||
## Steps to reproduce | ||
<!-- Please enable debug logging by running `juju model-config logging-config="<root>=INFO;unit=DEBUG"` (if possible) --> | ||
1. | ||
|
||
## Expected behavior | ||
|
||
|
||
## Actual behavior | ||
<!-- If applicable, add screenshots --> | ||
|
||
|
||
## Versions | ||
|
||
<!-- Run `lsb_release -sd` --> | ||
Operating system: | ||
|
||
<!-- Run `juju version` --> | ||
Juju CLI: | ||
|
||
<!-- Model version from `juju status` --> | ||
Juju agent: | ||
|
||
<!-- App revision from `juju status` or (advanced) commit hash --> | ||
Charm revision: | ||
|
||
<!-- Run `lxd version` --> | ||
LXD: | ||
|
||
<!-- Run `microk8s version` --> | ||
microk8s: | ||
|
||
## Log output | ||
<!-- Please enable debug logging by running `juju model-config logging-config="<root>=INFO;unit=DEBUG"` (if possible) --> | ||
<!-- Then, run `juju debug-log --replay > log.txt` and upload "log.txt" file here --> | ||
Juju debug log: | ||
|
||
<!-- (Optional) Copy the logs that are relevant to the bug & paste inside triple backticks below --> | ||
|
||
|
||
## Additional context | ||
<!-- (Optional) Add any additional information here --> |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
github_checks: | ||
annotations: false | ||
coverage: | ||
status: | ||
project: | ||
default: | ||
target: 70% | ||
patch: | ||
default: | ||
target: 33% |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
## Issue | ||
|
||
## Solution | ||
|
||
## Checklist | ||
- [ ] I have added or updated any relevant documentation. | ||
- [ ] I have cleaned any remaining cloud resources from my accounts. |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
{ | ||
$schema: 'https://docs.renovatebot.com/renovate-schema.json', | ||
extends: [ | ||
'github>canonical/data-platform//renovate_presets/charm.json5', | ||
], | ||
reviewers: [ | ||
'team:data-postgresql', | ||
], | ||
packageRules: [ | ||
], | ||
customManagers: [ | ||
], | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
# Copyright 2025 Canonical Ltd. | ||
# See LICENSE file for licensing details. | ||
name: Approve Renovate pull request | ||
|
||
on: | ||
pull_request: | ||
types: | ||
- opened | ||
|
||
jobs: | ||
approve-pr: | ||
name: Approve Renovate pull request | ||
uses: canonical/data-platform-workflows/.github/workflows/approve_renovate_pr.yaml@v35.0.2 | ||
permissions: | ||
pull-requests: write # Needed to approve PR |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
# Copyright 2025 Canonical Ltd. | ||
# See LICENSE file for licensing details. | ||
name: Check pull request | ||
|
||
on: | ||
pull_request: | ||
types: | ||
- opened | ||
- labeled | ||
- unlabeled | ||
- edited | ||
branches: | ||
- main | ||
|
||
permissions: | ||
pull-requests: read # Needed to check labels | ||
jobs: | ||
check-pr: | ||
name: Check pull request | ||
uses: canonical/data-platform-workflows/.github/workflows/check_charm_pr.yaml@v35.0.2 |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
# Copyright 2025 Canonical Ltd. | ||
# See LICENSE file for licensing details. | ||
name: Tests | ||
|
||
concurrency: | ||
group: ${{ github.workflow }}-${{ github.ref }} | ||
cancel-in-progress: true | ||
|
||
on: | ||
pull_request: | ||
paths-ignore: | ||
- '.gitignore' | ||
- '.jujuignore' | ||
- 'LICENSE' | ||
- '**.md' | ||
- .github/renovate.json5 | ||
- '.github/workflows/sync_docs.yaml' | ||
schedule: | ||
- cron: '53 0 * * *' # Daily at 00:53 UTC | ||
# Triggered on push to branch "main" by .github/workflows/release.yaml | ||
workflow_call: | ||
outputs: | ||
artifact-prefix: | ||
description: build_charm.yaml `artifact-prefix` output | ||
value: ${{ jobs.build.outputs.artifact-prefix }} | ||
|
||
permissions: {} | ||
jobs: | ||
# TODO install uv in reusable and re-enable | ||
# lint: | ||
# name: Lint | ||
# uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v31.0.1 | ||
lint: | ||
name: tox run -e lint | ||
runs-on: ubuntu-latest | ||
timeout-minutes: 5 | ||
steps: | ||
- name: Checkout | ||
uses: actions/checkout@v5 | ||
- name: Install tox & uv | ||
run: | | ||
pipx install tox | ||
sudo snap install astral-uv --classic | ||
- name: Run linters | ||
run: tox run -e lint | ||
|
||
unit-test: | ||
name: Unit test charm | ||
runs-on: ubuntu-latest | ||
timeout-minutes: 5 | ||
steps: | ||
- name: Checkout | ||
uses: actions/checkout@v5 | ||
- name: Install tox & uv | ||
run: | | ||
pipx install tox | ||
sudo snap install astral-uv --classic | ||
- name: Run tests | ||
run: tox run -e unit | ||
- name: Upload Coverage to Codecov | ||
uses: codecov/codecov-action@v5 | ||
env: | ||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
venv/ | ||
build/ | ||
*.charm | ||
.tox/ | ||
.coverage | ||
coverage.xml | ||
__pycache__/ | ||
*.py[cod] | ||
*.ini | ||
*.log | ||
*.tar.xz | ||
postgresql_charms_single_kernel.egg-info/ |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -26,7 +26,8 @@ format = [ | |
"ruff==0.12.11" | ||
] | ||
lint = [ | ||
"codespell==2.4.1" | ||
"codespell==2.4.1", | ||
"pyright==1.1.405" | ||
] | ||
unit = [ | ||
"coverage[toml]==7.9.1; python_version > '3.8'", | ||
|
@@ -96,3 +97,13 @@ max-complexity = 10 | |
|
||
[tool.ruff.lint.pydocstyle] | ||
convention = "google" | ||
|
||
[tool.pyright] | ||
include = ["single_kernel_postgresql"] | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ignore the tests. |
||
pythonVersion = "3.8" | ||
pythonPlatform = "All" | ||
typeCheckingMode = "basic" | ||
reportIncompatibleMethodOverride = false | ||
reportImportCycles = false | ||
reportMissingModuleSource = true | ||
stubPath = "" |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -408,7 +408,7 @@ def create_user( | |
raise PostgreSQLCreateUserError() from e | ||
|
||
def _adjust_user_definition( | ||
self, user: str, roles: Optional[List[str]], database: str, user_definition: str | ||
self, user: str, roles: Optional[List[str]], database: Optional[str], user_definition: str | ||
) -> Tuple[str, List[str]]: | ||
"""Adjusts the user definition to include additional statements. | ||
|
||
|
@@ -453,7 +453,7 @@ def _adjust_user_definition( | |
|
||
def _process_extra_user_roles( | ||
self, user: str, extra_user_roles: Optional[List[str]] = None | ||
) -> Tuple[Optional[List[str]], Optional[List[str]]]: | ||
) -> Tuple[Optional[List[str]], Optional[Set[str]]]: | ||
# Separate roles and privileges from the provided extra user roles. | ||
roles = privileges = None | ||
if extra_user_roles: | ||
|
@@ -489,7 +489,7 @@ def _process_extra_user_roles( | |
privileges = { | ||
extra_user_role | ||
for extra_user_role in extra_user_roles | ||
if extra_user_role not in roles | ||
if extra_user_role and extra_user_role not in roles | ||
} | ||
invalid_privileges = [ | ||
privilege for privilege in privileges if privilege not in valid_privileges | ||
|
@@ -661,8 +661,8 @@ def grant_replication_privileges( | |
self, | ||
user: str, | ||
database: str, | ||
schematables: list[str], | ||
old_schematables: list[str] | None = None, | ||
schematables: List[str], | ||
old_schematables: Optional[List[str]] = None, | ||
) -> None: | ||
"""Grant CONNECT privilege on database and SELECT privilege on tables. | ||
|
||
|
@@ -705,7 +705,7 @@ def grant_replication_privileges( | |
connection.close() | ||
|
||
def revoke_replication_privileges( | ||
self, user: str, database: str, schematables: list[str] | ||
self, user: str, database: str, schematables: List[str] | ||
) -> None: | ||
"""Revoke all privileges from tables and database. | ||
|
||
|
@@ -792,8 +792,9 @@ def get_last_archived_wal(self) -> str: | |
"""Get the name of the last archived wal for the current PostgreSQL cluster.""" | ||
try: | ||
with self._connect_to_database() as connection, connection.cursor() as cursor: | ||
# Should always be present | ||
cursor.execute("SELECT last_archived_wal FROM pg_stat_archiver;") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Would this always return a value? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm not sure. |
||
return cursor.fetchone()[0] | ||
return cursor.fetchone()[0] # type: ignore | ||
except psycopg2.Error as e: | ||
logger.error(f"Failed to get PostgreSQL last archived WAL: {e}") | ||
raise PostgreSQLGetLastArchivedWALError() from e | ||
|
@@ -803,7 +804,8 @@ def get_current_timeline(self) -> str: | |
try: | ||
with self._connect_to_database() as connection, connection.cursor() as cursor: | ||
cursor.execute("SELECT timeline_id FROM pg_control_checkpoint();") | ||
return cursor.fetchone()[0] | ||
# There should always be a timeline | ||
return cursor.fetchone()[0] # type: ignore | ||
except psycopg2.Error as e: | ||
logger.error(f"Failed to get PostgreSQL current timeline id: {e}") | ||
raise PostgreSQLGetCurrentTimelineError() from e | ||
|
@@ -859,8 +861,8 @@ def get_postgresql_version(self, current_host=True) -> str: | |
database_host=host | ||
) as connection, connection.cursor() as cursor: | ||
cursor.execute("SELECT version();") | ||
# Split to get only the version number. | ||
return cursor.fetchone()[0].split(" ")[1] | ||
# Split to get only the version number. There should always be a version. | ||
return cursor.fetchone()[0].split(" ")[1] # type:ignore | ||
except psycopg2.Error as e: | ||
logger.error(f"Failed to get PostgreSQL version: {e}") | ||
raise PostgreSQLGetPostgreSQLVersionError() from e | ||
|
@@ -880,7 +882,8 @@ def is_tls_enabled(self, check_current_host: bool = False) -> bool: | |
database_host=self.current_host if check_current_host else None | ||
) as connection, connection.cursor() as cursor: | ||
cursor.execute("SHOW ssl;") | ||
return "on" in cursor.fetchone()[0] | ||
# SSL state should always be set | ||
return "on" in cursor.fetchone()[0] # type: ignore | ||
except psycopg2.Error: | ||
# Connection errors happen when PostgreSQL has not started yet. | ||
return False | ||
|
@@ -1378,15 +1381,17 @@ def is_table_empty(self, db: str, schema: str, table: str) -> bool: | |
connection = self._connect_to_database(database=db) | ||
with connection, connection.cursor() as cursor: | ||
cursor.execute(SQL("SELECT COUNT(1) FROM {};").format(Identifier(schema, table))) | ||
return cursor.fetchone()[0] == 0 | ||
if result := cursor.fetchone(): | ||
return result[0] == 0 | ||
return True | ||
except psycopg2.Error as e: | ||
logger.error(f"Failed to check whether table is empty: {e}") | ||
raise PostgreSQLIsTableEmptyError() from e | ||
finally: | ||
if connection: | ||
connection.close() | ||
|
||
def create_publication(self, db: str, name: str, schematables: list[str]) -> None: | ||
def create_publication(self, db: str, name: str, schematables: List[str]) -> None: | ||
"""Create PostgreSQL publication.""" | ||
connection = None | ||
try: | ||
|
@@ -1427,7 +1432,7 @@ def publication_exists(self, db: str, publication: str) -> bool: | |
if connection: | ||
connection.close() | ||
|
||
def alter_publication(self, db: str, name: str, schematables: list[str]) -> None: | ||
def alter_publication(self, db: str, name: str, schematables: List[str]) -> None: | ||
"""Alter PostgreSQL publication.""" | ||
connection = None | ||
try: | ||
|
@@ -1715,11 +1720,11 @@ def validate_group_map(self, group_map: Optional[str]) -> bool: | |
return True | ||
|
||
try: | ||
group_map = self.build_postgresql_group_map(group_map) | ||
parsed_group_map = self.build_postgresql_group_map(group_map) | ||
except ValueError: | ||
return False | ||
|
||
for _, psql_group in group_map: | ||
for _, psql_group in parsed_group_map: | ||
with self._connect_to_database() as connection, connection.cursor() as cursor: | ||
query = SQL("SELECT TRUE FROM pg_roles WHERE rolname={};") | ||
query = query.format(Literal(psql_group)) | ||
|
@@ -1740,7 +1745,9 @@ def is_user_in_hba(self, username: str) -> bool: | |
"SELECT COUNT(*) FROM pg_hba_file_rules WHERE {} = ANY(user_name);" | ||
).format(Literal(username)) | ||
) | ||
return cursor.fetchone()[0] > 0 | ||
if result := cursor.fetchone(): | ||
return result[0] > 0 | ||
return False | ||
except psycopg2.Error as e: | ||
logger.debug(f"Failed to check pg_hba: {e}") | ||
return False | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Should we also add microk8s here?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Added.