Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
134 changes: 65 additions & 69 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,11 @@
# SPDX-License-Identifier: Apache-2.0

from collections.abc import Generator
from contextlib import contextmanager
from pathlib import Path
from typing import Any, cast
from typing import Optional, cast

import pytest
import yaml
from sqlalchemy import Engine, create_engine
from sqlalchemy.orm import Session, sessionmaker

from tdp.core.constants import (
DAG_DIRECTORY_NAME,
Expand All @@ -18,38 +15,49 @@
PLAYBOOKS_DIRECTORY_NAME,
YML_EXTENSION,
)
from tdp.core.models import BaseModel, init_database


def pytest_addoption(parser: pytest.Parser) -> None:
"""Add custom command-line options for pytest.

This function adds the --database-dsn option that allows specifying multiple
database data source names (DSNs) for testing. The option can be used multiple
times to test against different database backends. The default value is "sqlite",
it will always be present in the list of DSNs.

Usage examples:
# Test only with sqlite (default behavior)
pytest tests
# The resulting list will be: ["sqlite"]

# Test with sqlite and postgresql
pytest tests --database-dsn postgresql://user:pass@localhost/testdb
# The resulting list will be: ["sqlite", "postgresql://user:pass@localhost/testdb"]
"""
parser.addoption(
"--database-dsn",
dest="database_dsn",
action="append",
default=["sqlite"],
help="Add database DSN.",
)


def pytest_generate_tests(metafunc: pytest.Metafunc) -> None:
"""Pytest hook to generate tests based on the database dsn option."""
"""Pytest hook to generate tests based on the --database-dsn option."""
if "db_dsn" in metafunc.fixturenames:
database_dsns = cast(list, metafunc.config.getoption("--database-dsn"))
metafunc.parametrize(
"db_dsn",
metafunc.config.getoption("database_dsn"),
indirect=True, # type: ignore
database_dsns,
indirect=True,
)


@pytest.fixture
def db_dsn(
request: pytest.FixtureRequest, tmp_path: Path
) -> Generator[str, None, None]:
def db_dsn(request, tmp_path) -> Generator[str, None, None]:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why remove types for parameters ?

"""Return a database dsn.

Ensure that the database is cleaned up after each test is done.

We create a temp path instead of the default in-memory sqlite database as some test
Create a temp path instead of the default in-memory sqlite database as some test
need to generate several engine instances (which will loose the data between them).
Concerned tests are CLI tests that need to perform a `tdp init` at the beginning of
the test.
Expand All @@ -58,54 +66,26 @@ def db_dsn(
# Assign a temporary database for sqlite
if database_dsn == "sqlite":
database_dsn = f"sqlite:///{tmp_path / 'test.db'}"
yield database_dsn


@pytest.fixture()
def db_engine(
db_dsn: str, request: pytest.FixtureRequest
) -> Generator[Engine, None, None]:
"""Create a database engine and optionnally by default initialize the schema."""
engine = create_engine(db_dsn)
if request.param:
init_database(engine)
yield engine
if request.param:
BaseModel.metadata.drop_all(engine)
engine.dispose()

yield database_dsn

@contextmanager
def create_session(engine: Engine) -> Generator[Session, None, None]:
"""Utility function to create a session."""
Session = sessionmaker(bind=engine)
session = Session()
try:
yield session
finally:
session.close()

def init_dag_directory(path: Path, dag: dict[str, list]) -> None:
"""Create and populate the DAG directory with service DAG files."""
for service_name, operations in dag.items():
# Save the dag
with (path / (service_name + YML_EXTENSION)).open("w") as fd:
yaml.dump(operations, fd)

def generate_collection_at_path(
path: Path,
dag: dict[str, list],
vars: dict[str, dict[str, dict]],
) -> None:
"""Generate a collection at a given path."""
(dag_dir := path / DAG_DIRECTORY_NAME).mkdir()
(playbooks_dir := path / PLAYBOOKS_DIRECTORY_NAME).mkdir()
(tdp_vars_defaults_dir := path / DEFAULT_VARS_DIRECTORY_NAME).mkdir()

def init_playbooks_directory(path: Path, dag: dict[str, list]) -> None:
"""Create and populate the playbooks directory with operation playbooks."""
# Minimal playbook which will be used for operations
minimal_playbook = [
{"hosts": "localhost"},
]

for service_name, operations in dag.items():
# Save the dag
with (dag_dir / (service_name + YML_EXTENSION)).open("w") as fd:
yaml.dump(operations, fd)

# Save playbooks
for operation in operations:
# Do not generate playbooks for noop operations
Expand All @@ -114,40 +94,56 @@ def generate_collection_at_path(
# Generate and save stop and restart playbooks for each start operation
if operation["name"].endswith("_start"):
with (
playbooks_dir
path
/ (operation["name"].rstrip("_start") + "_restart" + YML_EXTENSION)
).open("w") as fd:
yaml.dump(minimal_playbook, fd)
with (
playbooks_dir
path
/ (operation["name"].rstrip("_start") + "_stop" + YML_EXTENSION)
).open("w") as fd:
yaml.dump(minimal_playbook, fd)
# Save the playbook
with (playbooks_dir / (operation["name"] + YML_EXTENSION)).open("w") as fd:
with (path / (operation["name"] + YML_EXTENSION)).open("w") as fd:
yaml.dump(minimal_playbook, fd)

# Save the sleep playbook
with (playbooks_dir / (OPERATION_SLEEP_NAME + YML_EXTENSION)).open("w") as fd:
with (path / (OPERATION_SLEEP_NAME + YML_EXTENSION)).open("w") as fd:
yaml.dump(minimal_playbook, fd)

# Save the vars

def init_default_vars_directory(path: Path, vars: dict[str, dict[str, dict]]) -> None:
"""Create and populate the default vars directory with service variables."""
for service_name, file_vars in vars.items():
service_dir = tdp_vars_defaults_dir / service_name
service_dir = path / service_name
service_dir.mkdir()
for filename, vars in file_vars.items():
if not filename.endswith(YML_EXTENSION):
filename += YML_EXTENSION
with (service_dir / filename).open("w") as fd:
yaml.dump(vars, fd)
yaml.dump(vars, fd, sort_keys=False)


def assert_equal_values_in_model(model1: Any, model2: Any) -> bool:
"""SQLAlchemy asserts that two identical objects of type DeclarativeBase parent of the BaseModel class,
which is used in TDP as pattern for the table models, are identical if they are compared in the same session,
but different if compared in two different sessions.
def generate_collection_at_path(
path: Path,
dag: Optional[dict[str, list]] = None,
vars: Optional[dict[str, dict[str, dict]]] = None,
) -> Path:
"""Generate a collection at a given path."""
path.mkdir(parents=True, exist_ok=True)

This function therefore transforms the tables into dictionaries and by parsing the coulumns compares their values.
"""
if isinstance(model1, BaseModel) and isinstance(model2, BaseModel):
return model1.to_dict() == model2.to_dict()
else:
return False
# Dag
(dag_dir := path / DAG_DIRECTORY_NAME).mkdir(parents=True)
if dag:
init_dag_directory(dag_dir, dag)

# Playbooks
(playbooks_dir := path / PLAYBOOKS_DIRECTORY_NAME).mkdir()
if dag:
init_playbooks_directory(playbooks_dir, dag)

# Default vars
(tdp_vars_defaults_dir := path / DEFAULT_VARS_DIRECTORY_NAME).mkdir(parents=True)
if vars:
init_default_vars_directory(tdp_vars_defaults_dir, vars)
return path
134 changes: 88 additions & 46 deletions tests/e2e/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,63 +3,105 @@

from collections.abc import Generator
from pathlib import Path
from typing import NamedTuple
from typing import Callable

import pytest
from click.testing import CliRunner
from sqlalchemy import create_engine
from click.testing import CliRunner, Result

from tdp.cli.commands.init import init
from tdp.core.models import BaseModel
from tests.conftest import generate_collection_at_path
from tdp.cli.__main__ import cli
from tdp.core.constants import (
DAG_DIRECTORY_NAME,
DEFAULT_VARS_DIRECTORY_NAME,
PLAYBOOKS_DIRECTORY_NAME,
)
from tests.conftest import (
generate_collection_at_path,
init_dag_directory,
init_default_vars_directory,
init_playbooks_directory,
)


class TDPInitArgs(NamedTuple):
collection_path: Path
db_dsn: str
vars: Path
@pytest.fixture
def runner() -> Generator[CliRunner, None, None]:
"""Fixture to provide a Click test runner."""
runner = CliRunner(env={"TDP_MOCK_DEPLOY": "True"})
# Run tests in an isolated filesystem to avoid side effects (e.g. local .env file)
with runner.isolated_filesystem():
yield runner


@pytest.fixture
def tdp_init(
collection_path: Path, db_dsn: str, vars: Path
) -> Generator[TDPInitArgs, None, None]:
base_args = [
"--collection-path",
str(collection_path),
"--database-dsn",
db_dsn,
"--vars",
str(vars),
]
runner = CliRunner()
runner.invoke(init, base_args)
yield TDPInitArgs(collection_path, db_dsn, vars)
engine = create_engine(db_dsn)
BaseModel.metadata.drop_all(engine)
engine.dispose()
def tdp(runner):
"""Fixture to provide a function that invokes the TDP CLI with given arguments."""

def invoke(args: str) -> Result:
return runner.invoke(cli, args.split())

return invoke


@pytest.fixture
def vars(tmp_path) -> Path:
"""Fixture to create a temporary directory for storing variable files."""
vars_path = tmp_path / "vars"
vars_path.mkdir(parents=True)
return vars_path


class CollectionPath:
"""A collection path object that provides methods to create collection directories."""

def __init__(self, path: Path):
self.path = path
generate_collection_at_path(self.path)

def __str__(self) -> str:
"""Return the path as a string."""
return str(self.path)

def init_dag_directory(self, dag: dict[str, list]) -> None:
"""Create and populate the DAG directory with service DAG files."""
init_dag_directory(self.path / DAG_DIRECTORY_NAME, dag)

def init_playbooks_directory(self, dag: dict[str, list]) -> None:
"""Create and populate the playbooks directory with operation playbooks."""
init_playbooks_directory(self.path / PLAYBOOKS_DIRECTORY_NAME, dag)

def init_default_vars_directory(self, vars: dict[str, dict[str, dict]]) -> None:
"""Create and populate the default vars directory with service variables."""
init_default_vars_directory(self.path / DEFAULT_VARS_DIRECTORY_NAME, vars)


@pytest.fixture
def collection_path(tmp_path_factory: pytest.TempPathFactory) -> Path:
collection_path = tmp_path_factory.mktemp("collection")
dag_service_operations = {
"service": [
{"name": "service_install"},
{"name": "service_config", "depends_on": ["service_install"]},
{"name": "service_start", "depends_on": ["service_config"]},
{"name": "service_init", "depends_on": ["service_start"]},
],
}
service_vars = {
"service": {
"service.yml": {},
},
}
generate_collection_at_path(collection_path, dag_service_operations, service_vars)
return collection_path
def collection_path_factory(
tmp_path,
) -> Generator[Callable[[], CollectionPath], None, None]:
"""Fixture that provides a factory function for creating CollectionPath objects.

The factory function can be called multiple times within a test to create multiple
collections. Each call will create a new temporary directory with a unique name.

Returns:
A factory function that, when called, returns a new CollectionPath object.

Example usage:
def test_something(collection_path_factory):
collection1 = collection_path_factory()
collection2 = collection_path_factory()
# Both collections are separate with unique paths
"""
collection_counter = 0

def _create_collection_path() -> CollectionPath:
nonlocal collection_counter
collection_counter += 1
collection_dir = tmp_path / f"collection_{collection_counter}"
return CollectionPath(collection_dir)

yield _create_collection_path


@pytest.fixture
def vars(tmp_path_factory: pytest.TempPathFactory) -> Path:
return tmp_path_factory.mktemp("collection")
def collection_path(collection_path_factory) -> CollectionPath:
return collection_path_factory()
Loading