Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/integration_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,10 @@ jobs:
needs:
- collect-integration-tests
runs-on: ${{ matrix.job.runner }}
timeout-minutes: 217 # Sum of steps `timeout-minutes` + 5
timeout-minutes: 226 # Sum of steps `timeout-minutes` + 5
steps:
- name: Free up disk space
timeout-minutes: 1
timeout-minutes: 10
run: |
printf '\nDisk usage before cleanup\n'
df --human-readable
Expand Down
32 changes: 18 additions & 14 deletions src/backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,11 +95,13 @@ def _tls_ca_chain_filename(self) -> str:
return ""

def _get_s3_session_resource(self, s3_parameters: dict):
session = Session(
aws_access_key_id=s3_parameters["access-key"],
aws_secret_access_key=s3_parameters["secret-key"],
region_name=s3_parameters["region"],
)
kwargs = {
"aws_access_key_id": s3_parameters["access-key"],
"aws_secret_access_key": s3_parameters["secret-key"],
}
if "region" in s3_parameters:
kwargs["region_name"] = s3_parameters["region"]
session = Session(**kwargs)
return session.resource(
"s3",
endpoint_url=self._construct_endpoint(s3_parameters),
Expand Down Expand Up @@ -195,9 +197,12 @@ def can_use_s3_repository(self) -> tuple[bool, str]:
return False, FAILED_TO_INITIALIZE_STANZA_ERROR_MESSAGE

for stanza in json.loads(output):
if stanza.get("name") != self.stanza_name:
if (stanza_name := stanza.get("name")) and stanza_name == "[invalid]":
logger.error("Invalid stanza name from s3")
return False, FAILED_TO_INITIALIZE_STANZA_ERROR_MESSAGE
if stanza_name != self.stanza_name:
logger.debug(
f"can_use_s3_repository: incompatible stanza name s3={stanza.get('name', '')}, local={self.stanza_name}"
f"can_use_s3_repository: incompatible stanza name s3={stanza_name or ''}, local={self.stanza_name}"
)
return False, ANOTHER_CLUSTER_REPOSITORY_ERROR_MESSAGE

Expand Down Expand Up @@ -237,7 +242,7 @@ def _construct_endpoint(self, s3_parameters: dict) -> str:

# Construct the endpoint using the region.
resolver = EndpointResolver(data)
endpoint_data = resolver.construct_endpoint("s3", s3_parameters["region"])
endpoint_data = resolver.construct_endpoint("s3", s3_parameters.get("region"))

# Use the built endpoint if it is an AWS endpoint.
if endpoint_data and endpoint.endswith(endpoint_data["dnsSuffix"]):
Expand All @@ -251,7 +256,7 @@ def _create_bucket_if_not_exists(self) -> None:
return

bucket_name = s3_parameters["bucket"]
region = s3_parameters.get("region")
region = s3_parameters.get("region", "")

try:
s3 = self._get_s3_session_resource(s3_parameters)
Expand Down Expand Up @@ -570,8 +575,8 @@ def _initialise_stanza(self, event: HookEvent) -> bool:
f"--stanza={self.stanza_name}",
"stanza-create",
])
except ExecError as e:
logger.exception(e)
except ExecError:
logger.exception("Failed to initialise stanza:")
self._s3_initialization_set_failure(FAILED_TO_INITIALIZE_STANZA_ERROR_MESSAGE)
return False

Expand Down Expand Up @@ -610,8 +615,8 @@ def check_stanza(self) -> bool:
with attempt:
self._execute_command(["pgbackrest", f"--stanza={self.stanza_name}", "check"])
self.charm._set_active_status()
except Exception as e:
logger.exception(e)
except Exception:
logger.exception("Failed to check stanza:")
self._s3_initialization_set_failure(FAILED_TO_INITIALIZE_STANZA_ERROR_MESSAGE)
return False

Expand Down Expand Up @@ -1269,7 +1274,6 @@ def _retrieve_s3_parameters(self) -> tuple[dict, list[str]]:

# Add some sensible defaults (as expected by the code) for missing optional parameters
s3_parameters.setdefault("endpoint", "https://s3.amazonaws.com")
s3_parameters.setdefault("region") # type: ignore
s3_parameters.setdefault("path", "")
s3_parameters.setdefault("s3-uri-style", "host")
s3_parameters.setdefault("delete-older-than-days", "9999999")
Expand Down
4 changes: 4 additions & 0 deletions templates/pgbackrest.conf.j2
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,11 @@ repo1-retention-full={{ retention_full }}
repo1-retention-history=365
repo1-type=s3
repo1-path={{ path }}
{%- if region %}
repo1-s3-region={{ region }}
{% else %}
repo1-s3-region=""
{%- endif %}
repo1-s3-endpoint={{ endpoint }}
repo1-s3-bucket={{ bucket }}
repo1-s3-uri-style={{ s3_uri_style }}
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def get_cloud_config(cloud: str) -> tuple[dict[str, str], dict[str, str]]:
"endpoint": "https://storage.googleapis.com",
"bucket": "data-charms-testing",
"path": f"/postgresql-k8s/{uuid.uuid1()}",
"region": "",
"region": "us-east-1",
}, {
"access-key": os.environ["GCP_ACCESS_KEY"],
"secret-key": os.environ["GCP_SECRET_KEY"],
Expand Down
177 changes: 177 additions & 0 deletions tests/integration/test_backups_ceph.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
#!/usr/bin/env python3
# Copyright 2024 Canonical Ltd.
# See LICENSE file for licensing details.
import dataclasses
import json
import logging
import os
import socket
import subprocess

import pytest
from pytest_operator.plugin import OpsTest

from .helpers import backup_operations

logger = logging.getLogger(__name__)

S3_INTEGRATOR_APP_NAME = "s3-integrator"
tls_certificates_app_name = "self-signed-certificates"
tls_channel = "latest/stable"
tls_config = {"ca-common-name": "Test CA"}

backup_id, value_before_backup, value_after_backup = "", None, None


@dataclasses.dataclass(frozen=True)
class ConnectionInformation:
access_key_id: str
secret_access_key: str
bucket: str


@pytest.fixture(scope="session")
def microceph():
if not os.environ.get("CI") == "true":
raise Exception("Not running on CI. Skipping microceph installation")
logger.info("Setting up TLS certificates")
subprocess.run(["openssl", "genrsa", "-out", "./ca.key", "2048"], check=True)
subprocess.run(
[
"openssl",
"req",
"-x509",
"-new",
"-nodes",
"-key",
"./ca.key",
"-days",
"1024",
"-out",
"./ca.crt",
"-outform",
"PEM",
"-subj",
"/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com",
],
check=True,
)
subprocess.run(["openssl", "genrsa", "-out", "./server.key", "2048"], check=True)
subprocess.run(
[
"openssl",
"req",
"-new",
"-key",
"./server.key",
"-out",
"./server.csr",
"-subj",
"/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com",
],
check=True,
)
host_ip = socket.gethostbyname(socket.gethostname())
subprocess.run(
f'echo "subjectAltName = IP:{host_ip}" > ./extfile.cnf',
shell=True,
check=True,
)
subprocess.run(
[
"openssl",
"x509",
"-req",
"-in",
"./server.csr",
"-CA",
"./ca.crt",
"-CAkey",
"./ca.key",
"-CAcreateserial",
"-out",
"./server.crt",
"-days",
"365",
"-extfile",
"./extfile.cnf",
],
check=True,
)

logger.info("Setting up microceph")
subprocess.run(
["sudo", "snap", "install", "microceph", "--channel", "squid/stable"], check=True
)
subprocess.run(["sudo", "microceph", "cluster", "bootstrap"], check=True)
subprocess.run(["sudo", "microceph", "disk", "add", "loop,1G,3"], check=True)
subprocess.run(
'sudo microceph enable rgw --ssl-certificate="$(sudo base64 -w0 ./server.crt)" --ssl-private-key="$(sudo base64 -w0 ./server.key)"',
shell=True,
check=True,
)
output = subprocess.run(
[
"sudo",
"microceph.radosgw-admin",
"user",
"create",
"--uid",
"test",
"--display-name",
"test",
],
capture_output=True,
check=True,
encoding="utf-8",
).stdout
key = json.loads(output)["keys"][0]
key_id = key["access_key"]
secret_key = key["secret_key"]
logger.info("Set up microceph")
return ConnectionInformation(key_id, secret_key, _BUCKET)


_BUCKET = "testbucket"
logger = logging.getLogger(__name__)


@pytest.fixture(scope="session")
def cloud_credentials(microceph: ConnectionInformation) -> dict[str, str]:
"""Read cloud credentials."""
return {
"access-key": microceph.access_key_id,
"secret-key": microceph.secret_access_key,
}


@pytest.fixture(scope="session")
def cloud_configs(microceph: ConnectionInformation):
host_ip = socket.gethostbyname(socket.gethostname())
result = subprocess.run(
"sudo base64 -w0 ./ca.crt", shell=True, check=True, stdout=subprocess.PIPE, text=True
)
base64_output = result.stdout
return {
"endpoint": f"https://{host_ip}",
"bucket": microceph.bucket,
"path": "/pg",
"region": "",
"s3-uri-style": "path",
"tls-ca-chain": f"{base64_output}",
}


async def test_backup_ceph(ops_test: OpsTest, cloud_configs, cloud_credentials, charm) -> None:
"""Build and deploy two units of PostgreSQL in microceph, test backup and restore actions."""
await backup_operations(
ops_test,
charm,
S3_INTEGRATOR_APP_NAME,
tls_certificates_app_name,
tls_config,
tls_channel,
cloud_credentials,
"ceph",
cloud_configs,
)
Comment on lines +167 to +177
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copied from VM. The only difference is the order of backup_operations() args.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for porting this test!

15 changes: 15 additions & 0 deletions tests/integration/test_backups_gcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,3 +242,18 @@ async def test_delete_pod(ops_test: OpsTest, gcp_cloud_configs: tuple[dict, dict
ops_test, "/etc/pgbackrest.conf", f"{database_app_name}/0"
)
assert original_pgbackrest_config == new_pgbackrest_config, "Pgbackrest config not rerendered"


async def test_block_on_missing_region(
ops_test: OpsTest, gcp_cloud_configs: tuple[dict, dict]
) -> None:
await ops_test.model.applications[S3_INTEGRATOR_APP_NAME].set_config({
**gcp_cloud_configs[0],
"region": "",
})
database_app_name = f"new-{DATABASE_APP_NAME}"
logger.info("waiting for the database charm to become blocked")
unit = ops_test.model.units.get(f"{database_app_name}/0")
await ops_test.model.block_until(
lambda: unit.workload_status_message == FAILED_TO_INITIALIZE_STANZA_ERROR_MESSAGE
)
7 changes: 7 additions & 0 deletions tests/spread/test_backups_ceph.py/task.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
summary: test_backups_ceph.py
environment:
TEST_MODULE: test_backups_ceph.py
execute: |
tox run -e integration -- "tests/integration/$TEST_MODULE" --model testing --alluredir="$SPREAD_TASK/allure-results"
artifacts:
- allure-results
21 changes: 20 additions & 1 deletion tests/unit/test_backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,26 @@ def test_can_use_s3_repository(harness):
None,
)

# Invalid stanza name
pgbackrest_info_other_cluster_name_backup_output = (
0,
'[{"db": [{"system-id": "12345"}], "name": "[invalid]"}]',
"",
)
same_instance_system_identifier_output = (
0,
"Database system identifier: 12345",
"",
)
_execute_command.side_effect = [
pgbackrest_info_other_cluster_name_backup_output,
same_instance_system_identifier_output,
]
assert harness.charm.backup.can_use_s3_repository() == (
False,
FAILED_TO_INITIALIZE_STANZA_ERROR_MESSAGE,
)

# Test when the cluster system id can be retrieved, but it's different from the stanza system id.
pgbackrest_info_other_cluster_system_id_backup_output = (
f'[{{"db": [{{"system-id": "12345"}}], "name": "{harness.charm.backup.stanza_name}"}}]',
Expand Down Expand Up @@ -1872,7 +1892,6 @@ def test_retrieve_s3_parameters(
"delete-older-than-days": "9999999",
"endpoint": "https://s3.amazonaws.com",
"path": "/",
"region": None,
"s3-uri-style": "host",
"secret-key": "test-secret-key",
},
Expand Down