From 41375a0f23bb1b3e66a6c688721ae421144d46f2 Mon Sep 17 00:00:00 2001 From: David W Bitner Date: Mon, 27 Apr 2026 15:27:30 -0500 Subject: [PATCH 1/7] docs: add Prerequisites section, fix CHANGELOG links, remove internal session-recall entry - README: add Prerequisites block listing Docker, pgpkg[diff], and libpq requirements before Quickstart so first-run failures are predictable - CHANGELOG: link to keepachangelog.com and semver.org per the standard format - troubleshooting.md: remove session-recall entry (internal dev tooling, not user-facing pgpkg troubleshooting) Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- CHANGELOG.md | 3 ++- README.md | 12 ++++++++++++ docs/troubleshooting.md | 17 ----------------- 3 files changed, 14 insertions(+), 18 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4fb1446..4b19c2a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,8 @@ All notable changes to this project will be documented in this file. -The format is based on Keep a Changelog and this project follows Semantic Versioning. +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [0.1.0] - 2026-04-27 diff --git a/README.md b/README.md index ace9fa0..481f08a 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,18 @@ ordered `sql/` directory. `pgpkg` does everything else: `migrations/` plus `sql/pre/` and `sql/post/` for automation or custom packaging flows. +## Prerequisites + +- **Python ≥ 3.11** and **pip** (or **uv**) +- **`pgpkg[diff]`** — the `makemigration` command requires the optional + [results](https://github.com/djrobstep/results) dependency: + `pip install 'pgpkg[diff]'` +- **Docker** — `makemigration` and `verify` spin up throwaway PostgreSQL + containers via `testcontainers`; Docker must be running +- **libpq** — `migrate` connects to a live database using standard libpq + environment variables (`PGHOST`, `PGPORT`, `PGDATABASE`, `PGUSER`, + `PGPASSWORD`) or explicit `-h/-p/-d/-U/--dsn` flags + ## Quickstart ```bash diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 48c304a..1541080 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -49,23 +49,6 @@ pip install 'pgpkg[diff]' uv sync --extra diff ``` -## Session-recall shows little or no context - -Symptom: -- `session-recall files --json --limit 10` returns zero files. - -Cause: -- Cold-start corpus (not enough historical sessions yet). - -Fix: -- Keep using baseline recall commands each session. -- Complete several real edit sessions; corpus quality improves over time. -- Verify health: - -```bash -session-recall health -``` - ## Release workflow version mismatch Symptom: From aecaf57aa88cfd5b62a86030b9493a07070c1ed5 Mon Sep 17 00:00:00 2001 From: David W Bitner Date: Mon, 27 Apr 2026 15:31:22 -0500 Subject: [PATCH 2/7] chore: gitignore .claude/*.local.md hookify rules Personal dev tooling (hookify rules) must never land in the repo. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 1596b0a..53b05e0 100644 --- a/.gitignore +++ b/.gitignore @@ -34,3 +34,6 @@ pip-wheel-metadata/ # local-only markdown notes *_SETUP.md *_PLAN.md + +# hookify rules (personal dev tooling, never commit) +.claude/*.local.md From 6f9b58c44c6b215134691291c7d917be06a2d388 Mon Sep 17 00:00:00 2001 From: David W Bitner Date: Tue, 5 May 2026 15:50:25 -0500 Subject: [PATCH 3/7] feat: add configurable version sources --- src/pgpkg/__init__.py | 2 + src/pgpkg/api.py | 64 +++++--- src/pgpkg/artifact.py | 34 +++- src/pgpkg/cli.py | 43 ++++- src/pgpkg/config.py | 17 +- src/pgpkg/diff.py | 51 +++++- src/pgpkg/executor.py | 79 +++++++-- src/pgpkg/staging.py | 16 +- src/pgpkg/tracking.py | 182 ++++++++++++++++++++- src/pgpkg/wrapper.py | 8 + tests/integration/test_diff.py | 33 ++++ tests/integration/test_executor.py | 253 ++++++++++++++++++++++++++++- tests/unit/test_artifact.py | 67 +++++++- tests/unit/test_cli.py | 25 +++ tests/unit/test_config.py | 31 ++++ tests/unit/test_staging.py | 18 ++ tests/unit/test_tracking.py | 100 ++++++++++++ tests/unit/test_wrapper.py | 31 ++++ 18 files changed, 986 insertions(+), 68 deletions(-) create mode 100644 tests/unit/test_tracking.py create mode 100644 tests/unit/test_wrapper.py diff --git a/src/pgpkg/__init__.py b/src/pgpkg/__init__.py index 6a8319a..f13450c 100644 --- a/src/pgpkg/__init__.py +++ b/src/pgpkg/__init__.py @@ -2,6 +2,7 @@ from .api import ( apply_migrations, + bundle_project, generate_incremental, list_versions, load_project, @@ -15,6 +16,7 @@ __all__ = [ "PgpkgError", "apply_migrations", + "bundle_project", "generate_incremental", "list_versions", "load_project", diff --git a/src/pgpkg/api.py b/src/pgpkg/api.py index 7769421..f4d9f25 100644 --- a/src/pgpkg/api.py +++ b/src/pgpkg/api.py @@ -6,8 +6,6 @@ import shutil from pathlib import Path -from psycopg import sql - from . import _conn from .artifact import LoadedArtifact, build_artifact, load_artifact from .catalog import Catalog, build_catalog @@ -17,6 +15,7 @@ from .executor import ApplyResult, apply_plan, make_default_plan from .planner import MigrationPlan, plan, render_graph_dot, render_graph_text from .staging import read_pre_post, write_staged_file +from .tracking import resolve_version_source __all__ = [ "PgpkgError", @@ -25,6 +24,7 @@ "load_config", "load_project", "list_versions", + "bundle_project", "stage_version", "generate_incremental", "plan_path", @@ -44,16 +44,29 @@ def list_versions(project_root: str | Path) -> list[str]: return project.catalog.versions +def bundle_project(project_root: str | Path, output_path: Path) -> Path: + """Bundle a project's staged migrations and pre/post SQL into a tar.zst artifact.""" + project = load_project(project_root) + return build_artifact(project.config, output_path) + + def stage_version( project_root: str | Path, version: str, *, output_path: Path | None = None, + also_write: Path | None = None, overwrite: bool = True, ) -> Path: """Render and write `--.sql` from the project's sql/ tree.""" config = load_config(project_root) - return write_staged_file(config, version, output_path=output_path, overwrite=overwrite) + return write_staged_file( + config, + version, + output_path=output_path, + also_write=also_write, + overwrite=overwrite, + ) def generate_incremental( @@ -63,6 +76,9 @@ def generate_incremental( to_version: str, base_url: str = "postgresql:///postgres", output_path: Path | None = None, + prepend_files: list[Path] | None = None, + append_files: list[Path] | None = None, + append_sql: list[str] | None = None, ) -> Path: """Generate `----.sql` by diffing two staged base files.""" config = load_config(project_root) @@ -72,6 +88,9 @@ def generate_incremental( to_version=to_version, base_url=base_url, output_path=output_path, + prepend_files=prepend_files, + append_files=append_files, + append_sql=append_sql, ) assert res.path is not None return res.path @@ -106,6 +125,7 @@ def apply_migrations( dbname: str | None = None, user: str | None = None, password: str | None = None, + version_source=None, ) -> ApplyResult: """Apply migrations to a live DB. Mirrors `pgpkg.migrate` but uses project source tree.""" project = load_project(project_root) @@ -121,6 +141,7 @@ def apply_migrations( dbname=dbname, user=user, password=password, + version_source=version_source, ) @@ -139,6 +160,7 @@ def migrate_from_artifact( dbname: str | None = None, user: str | None = None, password: str | None = None, + version_source=None, ) -> ApplyResult: """Apply migrations from a prebuilt tar.zst artifact (used by wrappers).""" artifact = load_artifact(Path(artifact_path)) @@ -155,6 +177,7 @@ def migrate_from_artifact( dbname=dbname, user=user, password=password, + version_source=version_source, ) @@ -171,8 +194,10 @@ def _migrate_with_catalog( dbname: str | None, user: str | None, password: str | None, + version_source, ) -> ApplyResult: pre_sql, post_sql = pre_post + resolved_version_source = resolve_version_source(config, override=version_source) with _conn.connect( conninfo, host=host, @@ -183,7 +208,7 @@ def _migrate_with_catalog( ) as conn: # Need the live version BEFORE planning so we can build the right plan. # Use a tiny autocommit-friendly read inside the same connection (pre-txn). - live_version = _read_live_version_safe(conn, config) + live_version = _read_live_version_safe(conn, config, resolved_version_source) plan_obj = make_default_plan(catalog, live_version=live_version, target=target) return apply_plan( conn, @@ -193,32 +218,22 @@ def _migrate_with_catalog( pre_sql=pre_sql, post_sql=post_sql, dry_run=dry_run, + version_source=resolved_version_source, ) -def _read_live_version_safe(conn, config: ProjectConfig) -> str | None: # type: ignore[no-untyped-def] +def _read_live_version_safe( # type: ignore[no-untyped-def] + conn, + config: ProjectConfig, + version_source, +) -> str | None: """Read current version without breaking the in-progress transaction.""" # psycopg by default starts a txn on first execute. We rollback after the read # so the apply_plan transaction starts clean. try: - with conn.cursor() as cur: - cur.execute( - "SELECT to_regclass(%s)", - (f"{config.tracking_schema}.{config.tracking_table}",), - ) - exists = cur.fetchone()[0] - if exists is None: - conn.rollback() - return None - cur.execute( - sql.SQL("SELECT version FROM {schema}.{table} ORDER BY id DESC LIMIT 1").format( - schema=sql.Identifier(config.tracking_schema), - table=sql.Identifier(config.tracking_table), - ) - ) - row = cur.fetchone() - conn.rollback() - return row[0] if row else None + live_version = version_source.read_live_version(conn, config) + conn.rollback() + return live_version except Exception: conn.rollback() raise @@ -305,6 +320,9 @@ def _config_and_catalog_from_artifact( pre_dir=pre_dir, post_dir=post_dir, project_root=tmp_root, + version_source=artifact.manifest.version_source, + tracking_schema=artifact.manifest.tracking_schema, + tracking_table=artifact.manifest.tracking_table, ) catalog = build_catalog(config) return config, catalog diff --git a/src/pgpkg/artifact.py b/src/pgpkg/artifact.py index 7b353bb..8f95b16 100644 --- a/src/pgpkg/artifact.py +++ b/src/pgpkg/artifact.py @@ -30,12 +30,18 @@ class ArtifactManifest: project_name: str prefix: str entries: list[ArtifactEntry] + tracking_schema: str = "pgpkg" + tracking_table: str = "migrations" + version_source: str | None = None def to_json(self) -> str: return json.dumps( { "project_name": self.project_name, "prefix": self.prefix, + "tracking_schema": self.tracking_schema, + "tracking_table": self.tracking_table, + "version_source": self.version_source, "entries": [ {"name": e.name, "sha256": e.sha256, "size": e.size} for e in self.entries ], @@ -51,11 +57,14 @@ def from_json(cls, text: str) -> ArtifactManifest: project_name=data["project_name"], prefix=data["prefix"], entries=[ArtifactEntry(**e) for e in data["entries"]], + tracking_schema=data.get("tracking_schema", "pgpkg"), + tracking_table=data.get("tracking_table", "migrations"), + version_source=data.get("version_source"), ) def build_artifact(config: ProjectConfig, output_path: Path) -> Path: - """Bundle migrations/ + sql/pre + sql/post + a manifest into a tar.zst. + """Bundle migrations/ + sql/pre + sql/post + runtime config into a tar.zst. Layout inside the archive: MANIFEST.json @@ -92,18 +101,17 @@ def build_artifact(config: ProjectConfig, output_path: Path) -> Path: project_name=config.project_name, prefix=config.prefix, entries=entries, + tracking_schema=config.tracking_schema, + tracking_table=config.tracking_table, + version_source=config.version_source, ) tar_buf = io.BytesIO() with tarfile.open(fileobj=tar_buf, mode="w") as tar: manifest_bytes = manifest.to_json().encode("utf-8") - info = tarfile.TarInfo(name=MANIFEST_NAME) - info.size = len(manifest_bytes) - tar.addfile(info, io.BytesIO(manifest_bytes)) + tar.addfile(_tar_info(MANIFEST_NAME, manifest_bytes), io.BytesIO(manifest_bytes)) for name, data in files: - info = tarfile.TarInfo(name=name) - info.size = len(data) - tar.addfile(info, io.BytesIO(data)) + tar.addfile(_tar_info(name, data), io.BytesIO(data)) output_path.parent.mkdir(parents=True, exist_ok=True) cctx = zstd.ZstdCompressor(level=19) @@ -111,6 +119,18 @@ def build_artifact(config: ProjectConfig, output_path: Path) -> Path: return output_path +def _tar_info(name: str, data: bytes) -> tarfile.TarInfo: + info = tarfile.TarInfo(name=name) + info.size = len(data) + info.mode = 0o644 + info.mtime = 0 + info.uid = 0 + info.gid = 0 + info.uname = "" + info.gname = "" + return info + + @dataclass(frozen=True) class LoadedArtifact: """Decompressed view of a tar.zst artifact, kept entirely in memory.""" diff --git a/src/pgpkg/cli.py b/src/pgpkg/cli.py index f7de59e..ca772ad 100644 --- a/src/pgpkg/cli.py +++ b/src/pgpkg/cli.py @@ -71,6 +71,11 @@ def build_parser() -> argparse.ArgumentParser: _project_root_arg(p_stage) p_stage.add_argument("version", help="Version to stage (PEP 440 or 'unreleased')") p_stage.add_argument("--output", type=Path, help="Override output path") + p_stage.add_argument( + "--also-write", + type=Path, + help="Write the same staged base file to a second path", + ) p_stage.add_argument( "--no-overwrite", action="store_true", @@ -88,6 +93,29 @@ def build_parser() -> argparse.ArgumentParser: ) p_make.add_argument("--to", dest="to_version", help=f"Target version (default: '{UNRELEASED}')") p_make.add_argument("--output", type=Path, help="Override output path") + p_make.add_argument( + "--prepend-file", + dest="prepend_files", + action="append", + type=Path, + default=[], + help="SQL file to prepend before the generated diff (may be repeated)", + ) + p_make.add_argument( + "--append-file", + dest="append_files", + action="append", + type=Path, + default=[], + help="SQL file to append after the generated diff (may be repeated)", + ) + p_make.add_argument( + "--append-sql", + dest="append_sql", + action="append", + default=[], + help="Literal SQL to append after the generated diff (may be repeated)", + ) p_make.add_argument( "--base-url", default="postgresql:///postgres", @@ -132,7 +160,10 @@ def build_parser() -> argparse.ArgumentParser: ) _project_root_arg(p_wheel) p_wheel.add_argument( - "--output-dir", type=Path, required=True, help="Where to write the wrapper project" + "--output-dir", + type=Path, + required=True, + help="Where to write the wrapper project", ) p_wheel.add_argument( "--cli-name", @@ -192,6 +223,9 @@ def _cmd_info(args: argparse.Namespace) -> int: "prefix": project.config.prefix, "sql_dir": str(project.config.sql_dir), "migrations_dir": str(project.config.migrations_dir), + "tracking_schema": project.config.tracking_schema, + "tracking_table": project.config.tracking_table, + "version_source": project.config.version_source, "versions": project.catalog.versions, "base_files": {v: str(p) for v, p in project.catalog.base_files.items()}, "edges": [{"from": f, "to": t, "file": str(p)} for f, t, p in project.catalog.edges], @@ -215,6 +249,7 @@ def _cmd_stageversion(args: argparse.Namespace) -> int: args.project_root, args.version, output_path=args.output, + also_write=args.also_write, overwrite=not args.no_overwrite, ) print(f"wrote {path}") @@ -239,6 +274,9 @@ def _cmd_makemigration(args: argparse.Namespace) -> int: to_version=to_v, base_url=args.base_url, output_path=args.output, + prepend_files=args.prepend_files, + append_files=args.append_files, + append_sql=args.append_sql, ) print(f"wrote {path}") return 0 @@ -318,8 +356,7 @@ def _cmd_wheel(args: argparse.Namespace) -> int: def _cmd_bundle(args: argparse.Namespace) -> int: - project = api.load_project(args.project_root) - path = api.build_artifact(project.config, args.output) + path = api.bundle_project(args.project_root, args.output) print(f"wrote {path}") return 0 diff --git a/src/pgpkg/config.py b/src/pgpkg/config.py index 1c692ba..db530e4 100644 --- a/src/pgpkg/config.py +++ b/src/pgpkg/config.py @@ -20,6 +20,7 @@ class ProjectConfig: pre_dir: Path post_dir: Path project_root: Path + version_source: str | None = None tracking_schema: str = "pgpkg" tracking_table: str = "migrations" @@ -29,7 +30,8 @@ def load_config(project_root: str | Path) -> ProjectConfig: Required keys: project_name (or fallback to [project].name). Optional keys: prefix (default = project_name), sql_dir (default 'sql'), - migrations_dir (default 'migrations'), tracking.schema, tracking.table. + migrations_dir (default 'migrations'), version_source, tracking.schema, + tracking.table. """ root = Path(project_root).resolve() pyproject = root / "pyproject.toml" @@ -59,6 +61,18 @@ def load_config(project_root: str | Path) -> ProjectConfig: pre_dir = sql_dir / "pre" post_dir = sql_dir / "post" + if "pre_post_in_base" in pgpkg_cfg: + raise ConfigError( + "[tool.pgpkg].pre_post_in_base is not supported in pgpkg 0.1.x. " + "Keep baked pre/post handling in project-specific staging or apply wrappers instead." + ) + + version_source = pgpkg_cfg.get("version_source") + if version_source is not None and ( + not isinstance(version_source, str) or not version_source.strip() + ): + raise ConfigError("[tool.pgpkg].version_source must be a non-empty string.") + tracking = pgpkg_cfg.get("tracking", {}) or {} tracking_schema = tracking.get("schema", "pgpkg") tracking_table = tracking.get("table", "migrations") @@ -71,6 +85,7 @@ def load_config(project_root: str | Path) -> ProjectConfig: pre_dir=pre_dir, post_dir=post_dir, project_root=root, + version_source=version_source, tracking_schema=tracking_schema, tracking_table=tracking_table, ) diff --git a/src/pgpkg/diff.py b/src/pgpkg/diff.py index 9dcaf2d..4e0278d 100644 --- a/src/pgpkg/diff.py +++ b/src/pgpkg/diff.py @@ -64,6 +64,9 @@ def write_incremental( base_url: str = "postgresql:///postgres", output_path: Path | None = None, overwrite: bool = True, + prepend_files: list[Path] | None = None, + append_files: list[Path] | None = None, + append_sql: list[str] | None = None, ) -> IncrementalResult: """Generate and write an incremental migration file.""" diff_sql = generate_incremental_sql( @@ -75,14 +78,14 @@ def write_incremental( target.parent.mkdir(parents=True, exist_ok=True) if target.exists() and not overwrite: raise PgpkgError(f"{target} already exists and overwrite=False", code="E_DIFF") - body = ( - f"-- Generated by pgpkg makemigration\n" - f"-- Project: {config.project_name}\n" - f"-- From: {from_version}\n" - f"-- To: {to_version}\n" - f"-- Review the diff before applying.\n" - f"\n" - f"{diff_sql}" + body = _render_incremental_body( + config=config, + from_version=from_version, + to_version=to_version, + diff_sql=diff_sql, + prepend_files=prepend_files or [], + append_files=append_files or [], + append_sql=append_sql or [], ) if not body.endswith("\n"): body += "\n" @@ -92,6 +95,38 @@ def write_incremental( ) +def _render_incremental_body( + *, + config: ProjectConfig, + from_version: str, + to_version: str, + diff_sql: str, + prepend_files: list[Path], + append_files: list[Path], + append_sql: list[str], +) -> str: + parts = [ + "-- Generated by pgpkg makemigration", + f"-- Project: {config.project_name}", + f"-- From: {from_version}", + f"-- To: {to_version}", + "-- Review the diff before applying.", + "", + ] + parts.extend(_read_sql_block(path) for path in prepend_files) + if diff_sql.strip(): + parts.append(diff_sql.rstrip("\n")) + parts.extend(_read_sql_block(path) for path in append_files) + parts.extend(sql.rstrip("\n") for sql in append_sql if sql.strip()) + return "\n".join(parts) + + +def _read_sql_block(path: Path) -> str: + if not path.is_file(): + raise PgpkgError(f"SQL wrapper file not found: {path}", code="E_DIFF") + return path.read_text(encoding="utf-8").rstrip("\n") + + def _staged_path(config: ProjectConfig, version: str) -> Path: from .layout import base_filename diff --git a/src/pgpkg/executor.py b/src/pgpkg/executor.py index 1c75cd2..7a0a1af 100644 --- a/src/pgpkg/executor.py +++ b/src/pgpkg/executor.py @@ -10,10 +10,11 @@ from .errors import ExecutionError from .planner import MigrationPlan, plan from .tracking import ( + DefaultVersionSource, + _session_role, acquire_advisory_lock, - current_version, ensure_tracking, - record_applied, + resolve_version_source, sha256_text, ) from .versioning import default_target @@ -52,6 +53,7 @@ def apply_plan( pre_sql: str = "", post_sql: str = "", dry_run: bool = False, + version_source=None, ) -> ApplyResult: """Apply a precomputed plan inside a single transaction with an advisory lock. @@ -65,13 +67,15 @@ def apply_plan( result = ApplyResult() schema = config.tracking_schema table = config.tracking_table + resolved_version_source = resolve_version_source(config, override=version_source) + default_version_source = DefaultVersionSource() try: acquire_advisory_lock(conn, config.project_name) ensure_tracking(conn, schema=schema, table=table) # Re-check the live version inside the locked txn, in case it changed. - live_version = current_version(conn, schema=schema, table=table) + live_version = resolved_version_source.read_live_version(conn, config) with conn.cursor() as cur: # Bootstrap if requested by the plan AND nothing is installed. @@ -82,13 +86,14 @@ def apply_plan( # The base file IS for `target` if target is in catalog.base_files, # otherwise it's for the highest reachable base. bootstrap_version = _infer_bootstrap_version(catalog, plan_obj) - record_applied( + _record_version_state( conn, - bootstrap_version, - sha256_text(base_sql), - plan_obj.bootstrap_base.name, - schema=schema, - table=table, + config, + resolved_version_source, + default_version_source, + version=bootstrap_version, + sha256=sha256_text(base_sql), + filename=plan_obj.bootstrap_base.name, ) result.bootstrapped_from = bootstrap_version result.final_version = bootstrap_version @@ -100,13 +105,14 @@ def apply_plan( for step in plan_obj.steps: inc_sql = step.file.read_text(encoding="utf-8") _execute_step(cur, pre_sql, inc_sql, post_sql) - record_applied( + _record_version_state( conn, - step.to_version, - sha256_text(inc_sql), - step.file.name, - schema=schema, - table=table, + config, + resolved_version_source, + default_version_source, + version=step.to_version, + sha256=sha256_text(inc_sql), + filename=step.file.name, ) result.applied_steps.append((step.from_version, step.to_version)) result.final_version = step.to_version @@ -144,6 +150,49 @@ def _infer_bootstrap_version(catalog: Catalog, plan_obj: MigrationPlan) -> str: raise ExecutionError(f"Bootstrap base file {base_path} not found in catalog") +def _record_version_state( + conn: psycopg.Connection, + config: ProjectConfig, + resolved_version_source, + default_version_source: DefaultVersionSource, + *, + version: str, + sha256: str, + filename: str, +) -> None: + with _session_role(conn): + source_manages_default_tracking = bool( + getattr(resolved_version_source, "writes_default_tracking", False) + ) + + if type(resolved_version_source) is DefaultVersionSource: + resolved_version_source.record_applied( + conn, + config, + version=version, + sha256=sha256, + filename=filename, + ) + return + + if not source_manages_default_tracking: + default_version_source.record_applied( + conn, + config, + version=version, + sha256=sha256, + filename=filename, + ) + + resolved_version_source.record_applied( + conn, + config, + version=version, + sha256=sha256, + filename=filename, + ) + + def make_default_plan( catalog: Catalog, *, diff --git a/src/pgpkg/staging.py b/src/pgpkg/staging.py index 6607372..8b24871 100644 --- a/src/pgpkg/staging.py +++ b/src/pgpkg/staging.py @@ -46,15 +46,23 @@ def write_staged_file( version: str, *, output_path: Path | None = None, + also_write: Path | None = None, overwrite: bool = True, ) -> Path: """Render and write the staged base file. Returns the path written.""" body = render_staged_sql(config, version) target = output_path or (config.migrations_dir / base_filename(config.prefix, version)) - target.parent.mkdir(parents=True, exist_ok=True) - if target.exists() and not overwrite: - raise LayoutError(f"{target} already exists and overwrite=False") - target.write_text(body, encoding="utf-8") + destinations = [path for path in dict.fromkeys([target, also_write]) if path is not None] + for path in destinations: + assert path is not None + path.parent.mkdir(parents=True, exist_ok=True) + if path.exists() and not overwrite: + raise LayoutError(f"{path} already exists and overwrite=False") + + for path in destinations: + if path is None: + continue + path.write_text(body, encoding="utf-8") return target diff --git a/src/pgpkg/tracking.py b/src/pgpkg/tracking.py index 850c19c..a6ab6f1 100644 --- a/src/pgpkg/tracking.py +++ b/src/pgpkg/tracking.py @@ -3,10 +3,19 @@ from __future__ import annotations import hashlib +import sys +from contextlib import contextmanager, suppress +from importlib import import_module +from typing import TYPE_CHECKING, Protocol, cast, runtime_checkable import psycopg from psycopg import sql +from .errors import ConfigError + +if TYPE_CHECKING: + from .config import ProjectConfig + def tracking_ddl(schema: str = "pgpkg", table: str = "migrations") -> sql.Composed: """Return the SQL to install the tracking schema/table (idempotent).""" @@ -24,19 +33,50 @@ def tracking_ddl(schema: str = "pgpkg", table: str = "migrations") -> sql.Compos ).format(schema=schema_ident, table=table_ident) +@contextmanager +def _session_role(conn: psycopg.Connection): + """Temporarily reset to the session user when SQL changed the effective role. + + Some migration frameworks intentionally `SET ROLE` during apply. pgpkg's + tracking writes should still run as the original session user that started + the migration transaction. + """ + with conn.cursor() as cur: + cur.execute("SELECT session_user, current_user") + row = cur.fetchone() + + if row is None: + yield + return + + session_user, current_user = row + if session_user == current_user: + yield + return + + with conn.cursor() as cur: + cur.execute("RESET ROLE") + + try: + yield + finally: + with conn.cursor() as cur: + cur.execute(sql.SQL("SET ROLE {}").format(sql.Identifier(current_user))) + + def ensure_tracking( conn: psycopg.Connection, *, schema: str = "pgpkg", table: str = "migrations" ) -> None: """Install the tracking schema/table if missing. Idempotent.""" - with conn.cursor() as cur: + with _session_role(conn), conn.cursor() as cur: cur.execute(tracking_ddl(schema, table)) -def current_version( +def current_tracking_version( conn: psycopg.Connection, *, schema: str = "pgpkg", table: str = "migrations" ) -> str | None: """Return the most recently applied version, or None if nothing applied.""" - with conn.cursor() as cur: + with _session_role(conn), conn.cursor() as cur: cur.execute("SELECT to_regclass(%s)", (f"{schema}.{table}",)) exists_row = cur.fetchone() if exists_row is None: @@ -54,7 +94,7 @@ def current_version( return row[0] if row else None -def record_applied( +def record_tracking_applied( conn: psycopg.Connection, version: str, sha256: str, @@ -64,7 +104,7 @@ def record_applied( table: str = "migrations", ) -> None: """Insert a row noting that `version` was just applied.""" - with conn.cursor() as cur: + with _session_role(conn), conn.cursor() as cur: cur.execute( sql.SQL( "INSERT INTO {schema}.{table} (version, sha256, filename) VALUES (%s, %s, %s)" @@ -76,6 +116,138 @@ def record_applied( ) +current_version = current_tracking_version +record_applied = record_tracking_applied + + +@runtime_checkable +class VersionSource(Protocol): + """Read the user-visible installed version and record successful applies.""" + + def read_live_version( + self, + conn: psycopg.Connection, + config: ProjectConfig, + ) -> str | None: ... + + def record_applied( + self, + conn: psycopg.Connection, + config: ProjectConfig, + *, + version: str, + sha256: str, + filename: str, + ) -> None: ... + + +class DefaultVersionSource: + """Use pgpkg's own tracking table as the authoritative installed version.""" + + writes_default_tracking = True + + def read_live_version( + self, + conn: psycopg.Connection, + config: ProjectConfig, + ) -> str | None: + return current_tracking_version( + conn, + schema=config.tracking_schema, + table=config.tracking_table, + ) + + def record_applied( + self, + conn: psycopg.Connection, + config: ProjectConfig, + *, + version: str, + sha256: str, + filename: str, + ) -> None: + record_tracking_applied( + conn, + version=version, + sha256=sha256, + filename=filename, + schema=config.tracking_schema, + table=config.tracking_table, + ) + + +@contextmanager +def _project_import_path(project_root): # type: ignore[no-untyped-def] + path = str(project_root) + added = False + if path and path not in sys.path: + sys.path.insert(0, path) + added = True + try: + yield + finally: + if added: + with suppress(ValueError): + sys.path.remove(path) + + +def resolve_version_source( + config: ProjectConfig, + override: VersionSource | None = None, +) -> VersionSource: + """Return the configured version source instance. + + `override` wins over `[tool.pgpkg].version_source`. The configured string must + use `module:attribute` syntax and resolve to either an instance or a zero-arg + class that implements the VersionSource protocol. + """ + if override is not None: + return _validate_version_source_instance(override) + + if config.version_source is None: + return DefaultVersionSource() + + module_name, sep, attr_name = config.version_source.partition(":") + if not sep or not module_name or not attr_name: + raise ConfigError("[tool.pgpkg].version_source must use 'module:attribute' syntax.") + + try: + with _project_import_path(config.project_root): + module = import_module(module_name) + except Exception as exc: + raise ConfigError( + f"Could not import version source module {module_name!r} from project root " + f"{config.project_root}: {exc}. Install the module in the runtime environment " + "or pass version_source=... explicitly." + ) from exc + + try: + source_obj = getattr(module, attr_name) + except AttributeError as exc: + raise ConfigError( + f"Version source attribute {attr_name!r} not found in module {module_name!r}." + ) from exc + + if isinstance(source_obj, type): + source_obj = source_obj() + + return _validate_version_source_instance(source_obj) + + +def _validate_version_source_instance(source: object) -> VersionSource: + missing = [ + attr + for attr in ("read_live_version", "record_applied") + if not callable(getattr(source, attr, None)) + ] + if missing: + missing_str = ", ".join(missing) + raise ConfigError( + f"Configured version source is missing required callable(s): {missing_str}." + ) + return cast(VersionSource, source) + + def acquire_advisory_lock(conn: psycopg.Connection, project_name: str) -> None: """Acquire an xact-scoped advisory lock keyed deterministically on project_name.""" digest = hashlib.sha256(project_name.encode("utf-8")).digest() diff --git a/src/pgpkg/wrapper.py b/src/pgpkg/wrapper.py index f788d09..90c00c5 100644 --- a/src/pgpkg/wrapper.py +++ b/src/pgpkg/wrapper.py @@ -14,6 +14,7 @@ from .api import build_artifact from .config import load_project +from .errors import PgpkgError _PYPROJECT_TMPL = """\ [project] @@ -126,6 +127,13 @@ def scaffold_wrapper( Returns the output_dir path. """ project = load_project(project_root) + if project.config.version_source is not None: + raise PgpkgError( + "pgpkg wheel cannot scaffold a generic wrapper for projects using " + "[tool.pgpkg].version_source. Ship a custom wrapper package and call " + "pgpkg.api.migrate_from_artifact(..., version_source=...) explicitly.", + code="E_WRAP", + ) output_dir = Path(output_dir).resolve() output_dir.mkdir(parents=True, exist_ok=True) diff --git a/tests/integration/test_diff.py b/tests/integration/test_diff.py index ece2521..f31acd5 100644 --- a/tests/integration/test_diff.py +++ b/tests/integration/test_diff.py @@ -47,3 +47,36 @@ def test_makemigration_empty_when_identical(staged_project: Path, pg_url: str): line for line in body.splitlines() if line.strip() and not line.strip().startswith("--") ).strip() assert non_comment == "" + + +def test_makemigration_wraps_diff_with_files_and_sql( + staged_project: Path, + pg_url: str, + tmp_path: Path, +): + from pgpkg.api import stage_version + + (staged_project / "sql" / "030_newtable.sql").write_text( + "CREATE TABLE IF NOT EXISTS sampleext.extra (id int PRIMARY KEY);\n" + ) + stage_version(staged_project, "unreleased") + + prepend = tmp_path / "000_pre.sql" + prepend.write_text("SET search_path TO sampleext, public;\n") + append = tmp_path / "999_post.sql" + append.write_text("SELECT 42;\n") + + path = generate_incremental( + staged_project, + from_version="0.2.0", + to_version="unreleased", + base_url=pg_url, + prepend_files=[prepend], + append_files=[append], + append_sql=["SELECT 'done';"], + ) + body = path.read_text() + assert "SET search_path TO sampleext, public;" in body + assert '"sampleext"."extra"' in body + assert "SELECT 42;" in body + assert "SELECT 'done';" in body diff --git a/tests/integration/test_executor.py b/tests/integration/test_executor.py index ab9aff8..5592f02 100644 --- a/tests/integration/test_executor.py +++ b/tests/integration/test_executor.py @@ -5,7 +5,14 @@ import pytest from pgpkg._conn import connect -from pgpkg.api import apply_migrations, migrate, verify_round_trip +from pgpkg.api import ( + apply_migrations, + bundle_project, + migrate, + migrate_from_artifact, + verify_round_trip, +) +from pgpkg.tracking import DefaultVersionSource, current_tracking_version pytestmark = pytest.mark.integration @@ -16,6 +23,118 @@ def _drop_everything(pg_url: str) -> None: cur.execute("DROP SCHEMA IF EXISTS sampleext CASCADE") +class ProjectVersionSource: + def read_live_version(self, conn, config): # type: ignore[no-untyped-def] + with conn.cursor() as cur: + cur.execute("SELECT to_regclass('sampleext.project_migrations')") + row = cur.fetchone() + if row is None or row[0] is None: + return None + cur.execute("SELECT version FROM sampleext.project_migrations ORDER BY id DESC LIMIT 1") + version_row = cur.fetchone() + return version_row[0] if version_row else None + + def record_applied(self, conn, config, *, version, sha256, filename): # type: ignore[no-untyped-def] + with conn.cursor() as cur: + cur.execute( + """ + CREATE TABLE IF NOT EXISTS sampleext.project_migrations ( + id serial PRIMARY KEY, + version text NOT NULL, + filename text NOT NULL + ) + """ + ) + cur.execute( + "INSERT INTO sampleext.project_migrations (version, filename) VALUES (%s, %s)", + (version, filename), + ) + + +class ExtendingDefaultVersionSource(DefaultVersionSource): + def record_applied(self, conn, config, *, version, sha256, filename): # type: ignore[no-untyped-def] + super().record_applied( + conn, + config, + version=version, + sha256=sha256, + filename=filename, + ) + with conn.cursor() as cur: + cur.execute( + """ + CREATE TABLE IF NOT EXISTS sampleext.project_migrations ( + id serial PRIMARY KEY, + version text NOT NULL, + filename text NOT NULL + ) + """ + ) + cur.execute( + "INSERT INTO sampleext.project_migrations (version, filename) VALUES (%s, %s)", + (version, filename), + ) + + +class ValidatingProjectVersionSource: + def read_live_version(self, conn, config): # type: ignore[no-untyped-def] + with conn.cursor() as cur: + cur.execute("SELECT to_regclass('sampleext.project_migrations')") + row = cur.fetchone() + if row is None or row[0] is None: + return None + cur.execute("SELECT version FROM sampleext.project_migrations ORDER BY id DESC LIMIT 1") + version_row = cur.fetchone() + return version_row[0] if version_row else None + + def record_applied(self, conn, config, *, version, sha256, filename): # type: ignore[no-untyped-def] + tracking_version = current_tracking_version( + conn, + schema=config.tracking_schema, + table=config.tracking_table, + ) + if tracking_version != version: + raise RuntimeError( + f"tracking version mismatch: expected {version!r}, got {tracking_version!r}" + ) + + with conn.cursor() as cur: + cur.execute( + """ + CREATE TABLE IF NOT EXISTS sampleext.project_migrations ( + id serial PRIMARY KEY, + version text NOT NULL, + filename text NOT NULL + ) + """ + ) + cur.execute( + "INSERT INTO sampleext.project_migrations (version, filename) VALUES (%s, %s)", + (version, filename), + ) + + +def _configure_role_switch_sql(project_root: Path) -> None: + pre_dir = project_root / "sql" / "pre" + post_dir = project_root / "sql" / "post" + pre_dir.mkdir(parents=True, exist_ok=True) + post_dir.mkdir(parents=True, exist_ok=True) + + (pre_dir / "001_runtime_role.sql").write_text( + """ + DO $$ + BEGIN + CREATE ROLE sample_runtime_role; + EXCEPTION WHEN duplicate_object THEN + NULL; + END + $$; + GRANT sample_runtime_role TO CURRENT_USER; + """ + ) + (post_dir / "999_runtime_role.sql").write_text("SET ROLE sample_runtime_role;\n") + + def test_fresh_install_then_incremental(staged_project: Path, pg_url: str): _drop_everything(pg_url) result = apply_migrations(staged_project, target="0.1.0", conninfo=pg_url) @@ -50,3 +169,135 @@ def test_verify_round_trip_passes(staged_project: Path, pg_url: str): # Our hand-crafted incremental is exactly equivalent to the 0.2.0 base, # so there should be no diff. assert problems == [] + + +def test_custom_version_source_reads_and_records_project_version( + staged_project: Path, + pg_url: str, +): + _drop_everything(pg_url) + source = ProjectVersionSource() + + first = apply_migrations( + staged_project, + target="0.1.0", + conninfo=pg_url, + version_source=source, + ) + assert first.applied == ["0.1.0"] + + with connect(pg_url, autocommit=True) as conn, conn.cursor() as cur: + cur.execute("SELECT version FROM pgpkg.migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.1.0"] + cur.execute("SELECT version FROM sampleext.project_migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.1.0"] + + second = apply_migrations( + staged_project, + target="0.2.0", + conninfo=pg_url, + version_source=source, + ) + assert second.applied == ["0.2.0"] + + with connect(pg_url, autocommit=True) as conn, conn.cursor() as cur: + cur.execute("SELECT version FROM pgpkg.migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.1.0", "0.2.0"] + cur.execute("SELECT version FROM sampleext.project_migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.1.0", "0.2.0"] + + +def test_default_version_source_subclass_does_not_duplicate_tracking_rows( + staged_project: Path, + pg_url: str, +): + _drop_everything(pg_url) + source = ExtendingDefaultVersionSource() + + result = apply_migrations( + staged_project, + target="0.2.0", + conninfo=pg_url, + version_source=source, + ) + assert "0.2.0" in result.applied + + with connect(pg_url, autocommit=True) as conn, conn.cursor() as cur: + cur.execute("SELECT version FROM pgpkg.migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.2.0"] + cur.execute("SELECT version FROM sampleext.project_migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.2.0"] + + +def test_migrate_from_artifact_uses_override_version_source( + staged_project: Path, + pg_url: str, + tmp_path: Path, +): + _drop_everything(pg_url) + (staged_project / "pyproject.toml").write_text( + "\n".join( + [ + "[tool.pgpkg]", + 'project_name = "sampleext"', + 'prefix = "sampleext"', + 'version_source = "does.not.exist:VersionSource"', + ] + ) + ) + artifact = tmp_path / "sampleext.tar.zst" + bundle_project(staged_project, artifact) + + result = migrate_from_artifact( + artifact, + target="0.2.0", + conninfo=pg_url, + version_source=ProjectVersionSource(), + ) + + assert result.applied == ["0.2.0"] + + with connect(pg_url, autocommit=True) as conn, conn.cursor() as cur: + cur.execute("SELECT version FROM pgpkg.migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.2.0"] + cur.execute("SELECT version FROM sampleext.project_migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.2.0"] + + +def test_default_tracking_survives_set_role_in_migration_sql( + staged_project: Path, + pg_url: str, +): + _drop_everything(pg_url) + _configure_role_switch_sql(staged_project) + + result = apply_migrations(staged_project, target="0.2.0", conninfo=pg_url) + + assert result.applied == ["0.2.0"] + + with connect(pg_url, autocommit=True) as conn, conn.cursor() as cur: + cur.execute("SELECT version FROM pgpkg.migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.2.0"] + + +def test_custom_version_source_can_validate_tracking_after_set_role( + staged_project: Path, + pg_url: str, +): + _drop_everything(pg_url) + _configure_role_switch_sql(staged_project) + + result = apply_migrations( + staged_project, + target="0.2.0", + conninfo=pg_url, + version_source=ValidatingProjectVersionSource(), + ) + + assert result.applied == ["0.2.0"] + + with connect(pg_url, autocommit=True) as conn, conn.cursor() as cur: + cur.execute("SELECT version FROM pgpkg.migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.2.0"] + cur.execute("SELECT version FROM sampleext.project_migrations ORDER BY id") + assert [r[0] for r in cur.fetchall()] == ["0.2.0"] diff --git a/tests/unit/test_artifact.py b/tests/unit/test_artifact.py index 2d01f65..f296100 100644 --- a/tests/unit/test_artifact.py +++ b/tests/unit/test_artifact.py @@ -4,7 +4,7 @@ import pytest -from pgpkg.api import stage_version +from pgpkg.api import bundle_project, stage_version from pgpkg.artifact import build_artifact, load_artifact from pgpkg.config import load_config from pgpkg.errors import PgpkgError @@ -22,6 +22,10 @@ def test_build_and_load_artifact(sample_project: Path, tmp_path: Path): loaded = load_artifact(out) assert loaded.manifest.project_name == "sampleext" + assert loaded.manifest.prefix == "sampleext" + assert loaded.manifest.tracking_schema == "pgpkg" + assert loaded.manifest.tracking_table == "migrations" + assert loaded.manifest.version_source is None migs = loaded.migrations_files() assert "migrations/sampleext--0.1.0.sql" in migs assert "-- pre" in loaded.pre_sql() @@ -45,3 +49,64 @@ def test_empty_migrations_raises(sample_project: Path, tmp_path: Path): (sample_project / "migrations").mkdir() # empty dir with pytest.raises(PgpkgError): build_artifact(load_config(sample_project), tmp_path / "x.tar.zst") + + +def test_bundle_project_uses_project_root(sample_project: Path, tmp_path: Path): + stage_version(sample_project, "0.1.0") + + out = tmp_path / "project-artifact.tar.zst" + bundle_project(sample_project, out) + + loaded = load_artifact(out) + assert loaded.manifest.project_name == "sampleext" + assert "migrations/sampleext--0.1.0.sql" in loaded.migrations_files() + + +def test_artifact_manifest_preserves_runtime_config(sample_project: Path, tmp_path: Path): + (sample_project / "pyproject.toml").write_text( + "\n".join( + [ + "[tool.pgpkg]", + 'project_name = "sampleext"', + 'prefix = "sampleext"', + 'version_source = "sampleext.migrate:VersionSource"', + "", + "[tool.pgpkg.tracking]", + 'schema = "sample_tracking"', + 'table = "schema_versions"', + ] + ) + ) + stage_version(sample_project, "0.1.0") + + out = tmp_path / "project-artifact.tar.zst" + bundle_project(sample_project, out) + + loaded = load_artifact(out) + assert loaded.manifest.tracking_schema == "sample_tracking" + assert loaded.manifest.tracking_table == "schema_versions" + assert loaded.manifest.version_source == "sampleext.migrate:VersionSource" + + +def test_build_artifact_is_deterministic(sample_project: Path, tmp_path: Path): + stage_version(sample_project, "0.1.0") + + first = tmp_path / "first.tar.zst" + second = tmp_path / "second.tar.zst" + + build_artifact(load_config(sample_project), first) + build_artifact(load_config(sample_project), second) + + assert first.read_bytes() == second.read_bytes() + + +def test_bundle_project_rejects_invalid_migration_filenames( + sample_project: Path, + tmp_path: Path, +): + migrations_dir = sample_project / "migrations" + migrations_dir.mkdir() + (migrations_dir / "sampleext.0.1.0.sql").write_text("-- invalid name\n") + + with pytest.raises(PgpkgError): + bundle_project(sample_project, tmp_path / "invalid-artifact.tar.zst") diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py index 5c5b75b..0ff621f 100644 --- a/tests/unit/test_cli.py +++ b/tests/unit/test_cli.py @@ -40,6 +40,20 @@ def test_stageversion(sample_project: Path): assert (sample_project / "migrations" / "sampleext--0.1.0.sql").exists() +def test_stageversion_also_write(sample_project: Path, tmp_path: Path): + extra = tmp_path / "sampleext.sql" + cp = _run( + "stageversion", + "0.1.0", + "--also-write", + str(extra), + "--project-root", + str(sample_project), + ) + assert cp.returncode == 0, cp.stderr + assert extra.exists() + + def test_graph_text(staged_project: Path): cp = _run("graph", "--project-root", str(staged_project)) assert cp.returncode == 0 @@ -88,3 +102,14 @@ def test_bundle_command(staged_project: Path, tmp_path: Path): cp = _run("bundle", "--output", str(out), "--project-root", str(staged_project)) assert cp.returncode == 0, cp.stderr assert out.exists() and out.stat().st_size > 0 + + +def test_info_json_includes_runtime_flags(sample_project: Path): + cp = _run("info", "--json", "--project-root", str(sample_project)) + assert cp.returncode == 0, cp.stderr + import json + + data = json.loads(cp.stdout) + assert data["tracking_schema"] == "pgpkg" + assert data["tracking_table"] == "migrations" + assert data["version_source"] is None diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 4cf8312..3abf206 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -16,6 +16,7 @@ def test_load_config_sample(sample_project: Path): assert c.migrations_dir == sample_project / "migrations" assert c.pre_dir == sample_project / "sql" / "pre" assert c.post_dir == sample_project / "sql" / "post" + assert c.version_source is None assert c.tracking_schema == "pgpkg" @@ -40,3 +41,33 @@ def test_project_name_fallback_to_project(tmp_path: Path): def test_load_project_empty_migrations(sample_project: Path): project = load_project(sample_project) assert project.catalog.versions == [] + + +def test_load_config_with_version_source(tmp_path: Path): + (tmp_path / "pyproject.toml").write_text( + "\n".join( + [ + "[tool.pgpkg]", + 'project_name = "custom"', + 'version_source = "custom.module:Source"', + ] + ) + ) + c = load_config(tmp_path) + assert c.project_name == "custom" + assert c.version_source == "custom.module:Source" + + +def test_load_config_rejects_pre_post_in_base(tmp_path: Path): + (tmp_path / "pyproject.toml").write_text( + "\n".join( + [ + "[tool.pgpkg]", + 'project_name = "custom"', + "pre_post_in_base = true", + ] + ) + ) + + with pytest.raises(ConfigError, match="pre_post_in_base"): + load_config(tmp_path) diff --git a/tests/unit/test_staging.py b/tests/unit/test_staging.py index bce0336..48bb477 100644 --- a/tests/unit/test_staging.py +++ b/tests/unit/test_staging.py @@ -25,6 +25,24 @@ def test_stage_version_writes_file(sample_project: Path): assert "Version: 0.1.0" in path.read_text() +def test_stage_version_also_writes_second_copy(sample_project: Path, tmp_path: Path): + extra = tmp_path / "sampleext.sql" + path = stage_version(sample_project, "0.1.0", also_write=extra) + assert path.exists() + assert extra.exists() + assert extra.read_text() == path.read_text() + + +def test_stage_version_also_write_preflights_all_targets(sample_project: Path, tmp_path: Path): + extra = tmp_path / "sampleext.sql" + extra.write_text("existing") + + with pytest.raises(LayoutError): + stage_version(sample_project, "0.1.0", also_write=extra, overwrite=False) + + assert not (sample_project / "migrations" / "sampleext--0.1.0.sql").exists() + + def test_no_sql_dir(tmp_path: Path): (tmp_path / "pyproject.toml").write_text('[tool.pgpkg]\nproject_name = "x"\n') with pytest.raises(LayoutError): diff --git a/tests/unit/test_tracking.py b/tests/unit/test_tracking.py new file mode 100644 index 0000000..4abe52c --- /dev/null +++ b/tests/unit/test_tracking.py @@ -0,0 +1,100 @@ +from __future__ import annotations + +from pathlib import Path +from types import SimpleNamespace +from typing import cast + +import pytest +import psycopg +from pgpkg.config import ProjectConfig +from pgpkg.errors import ConfigError +from pgpkg.tracking import DefaultVersionSource, resolve_version_source + + +class _DummyVersionSource: + def read_live_version(self, conn, config): # type: ignore[no-untyped-def] + return "1.2.3" + + def record_applied(self, conn, config, *, version, sha256, filename): # type: ignore[no-untyped-def] + return None + + +def _config(*, version_source: str | None = None) -> ProjectConfig: + return ProjectConfig( + project_name="sampleext", + prefix="sampleext", + sql_dir=Path("sql"), + migrations_dir=Path("migrations"), + pre_dir=Path("sql/pre"), + post_dir=Path("sql/post"), + project_root=Path("."), + version_source=version_source, + ) + + +def test_resolve_version_source_defaults_to_builtin(): + resolved = resolve_version_source(_config()) + assert isinstance(resolved, DefaultVersionSource) + + +def test_resolve_version_source_uses_override_instance(): + source = _DummyVersionSource() + resolved = resolve_version_source(_config(), override=source) + assert resolved is source + + +def test_resolve_version_source_imports_class(monkeypatch: pytest.MonkeyPatch): + fake_module = SimpleNamespace(CustomSource=_DummyVersionSource) + + def fake_import_module(name: str): + assert name == "demo.module" + return fake_module + + monkeypatch.setattr("pgpkg.tracking.import_module", fake_import_module) + resolved = resolve_version_source( + _config(version_source="demo.module:CustomSource") + ) + assert isinstance(resolved, _DummyVersionSource) + + +def test_resolve_version_source_imports_relative_to_project_root(tmp_path: Path): + (tmp_path / "custom_source.py").write_text( + "\n".join( + [ + "class VersionSource:", + " def read_live_version(self, conn, config):", + " return '1.2.3'", + "", + " def record_applied(self, conn, config, *, version, sha256, filename):", + " return None", + ] + ) + ) + config = ProjectConfig( + project_name="sampleext", + prefix="sampleext", + sql_dir=tmp_path / "sql", + migrations_dir=tmp_path / "migrations", + pre_dir=tmp_path / "sql" / "pre", + post_dir=tmp_path / "sql" / "post", + project_root=tmp_path, + version_source="custom_source:VersionSource", + ) + + resolved = resolve_version_source(config) + + assert ( + resolved.read_live_version(cast(psycopg.Connection, object()), config) + == "1.2.3" + ) + + +def test_resolve_version_source_rejects_missing_methods( + monkeypatch: pytest.MonkeyPatch, +): + fake_module = SimpleNamespace(BadSource=object) + + monkeypatch.setattr("pgpkg.tracking.import_module", lambda name: fake_module) + + with pytest.raises(ConfigError): + resolve_version_source(_config(version_source="demo.module:BadSource")) diff --git a/tests/unit/test_wrapper.py b/tests/unit/test_wrapper.py new file mode 100644 index 0000000..c9aa693 --- /dev/null +++ b/tests/unit/test_wrapper.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from pathlib import Path + +import pytest + +from pgpkg.errors import PgpkgError +from pgpkg.wrapper import scaffold_wrapper + + +def test_scaffold_wrapper_rejects_custom_version_source( + sample_project: Path, + tmp_path: Path, +): + (sample_project / "pyproject.toml").write_text( + "\n".join( + [ + "[tool.pgpkg]", + 'project_name = "sampleext"', + 'prefix = "sampleext"', + 'version_source = "sampleext.migrate:VersionSource"', + ] + ) + ) + + with pytest.raises(PgpkgError, match="version_source"): + scaffold_wrapper( + sample_project, + output_dir=tmp_path / "wrapper", + cli_name="sampleext-migrator", + ) \ No newline at end of file From bf42eeec7fdcecb71c94119f820b4736728825d8 Mon Sep 17 00:00:00 2001 From: David W Bitner Date: Tue, 5 May 2026 15:50:32 -0500 Subject: [PATCH 4/7] ci: harden release validation workflows --- .github/workflows/ci.yml | 27 ++++++++++++++++++++++++++- .github/workflows/publish-pypi.yml | 16 ++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 70b4803..a0939fd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,6 +39,7 @@ jobs: unit: runs-on: ubuntu-latest + name: unit (${{ matrix.python-version }}) strategy: fail-fast: false matrix: @@ -63,8 +64,9 @@ jobs: - name: Run unit tests run: uv run pytest tests/unit -q - integration-build-docs: + integration: runs-on: ubuntu-latest + name: integration (${{ matrix.postgres-image }}) strategy: fail-fast: false matrix: @@ -91,6 +93,29 @@ jobs: PGPKG_TEST_POSTGRES_IMAGE: ${{ matrix.postgres-image }} run: uv run pytest tests/integration -q + build-docs: + runs-on: ubuntu-latest + needs: + - quality + - unit + - integration + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.13" + + - name: Set up uv + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + + - name: Install dependencies + run: uv sync --extra dev --extra diff + - name: Build distributions run: uv build --out-dir dist diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index f95532e..21b2eca 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -49,10 +49,26 @@ jobs: - name: Smoke test wheel install run: | + set -euo pipefail uv venv .venv-smoke uv pip install --python .venv-smoke/bin/python dist/*.whl .venv-smoke/bin/pgpkg --help + - name: Smoke test generated wrapper + run: | + set -euo pipefail + smoke_python="$PWD/.venv-smoke/bin/python" + smoke_wrapper="$PWD/.venv-smoke/bin/sampleext-migrator" + tmpdir="$(mktemp -d)" + cp -R tests/fixtures/sample_project/. "$tmpdir/" + uv run pgpkg stageversion 0.1.0 --project-root "$tmpdir" + uv run pgpkg wheel --project-root "$tmpdir" --output-dir "$tmpdir/wrapper" + pushd "$tmpdir/wrapper" + uv build --out-dir dist + uv pip install --python "$smoke_python" dist/*.whl + "$smoke_wrapper" info + popd + - name: Upload distributions uses: actions/upload-artifact@v4 with: From 21c08fd9e1c2e01f311797b3d05b67c9cc95cdc2 Mon Sep 17 00:00:00 2001 From: David W Bitner Date: Tue, 5 May 2026 15:50:38 -0500 Subject: [PATCH 5/7] docs: refresh release and API guidance --- .gitignore | 2 + CHANGELOG.md | 10 +- CONTRIBUTING.md | 11 +- GITHUB_PYPI_SETUP.md | 149 ++++++++++++++++++++++++++ PROJECT_PLAN.md | 226 ++++++++++++++++++++++++++++++++++++++++ README.md | 27 ++++- docs/api.md | 67 +++++++++--- docs/architecture.md | 18 +++- docs/cli.md | 53 +++++++--- docs/index.md | 4 +- docs/layout.md | 45 ++++++-- docs/quickstart.md | 16 ++- docs/troubleshooting.md | 50 ++++++++- docs/wrapper.md | 33 ++++-- 14 files changed, 648 insertions(+), 63 deletions(-) create mode 100644 GITHUB_PYPI_SETUP.md create mode 100644 PROJECT_PLAN.md diff --git a/.gitignore b/.gitignore index 53b05e0..55e4108 100644 --- a/.gitignore +++ b/.gitignore @@ -34,6 +34,8 @@ pip-wheel-metadata/ # local-only markdown notes *_SETUP.md *_PLAN.md +!GITHUB_PYPI_SETUP.md +!PROJECT_PLAN.md # hookify rules (personal dev tooling, never commit) .claude/*.local.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b19c2a..95f7d89 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,22 +5,30 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.1.0] - 2026-04-27 +## [0.1.0] - 2026-05-05 ### Added - Core migration toolkit commands: `stageversion`, `makemigration`, `graph`, `migrate`, `wheel`, `bundle`, `info`, `verify`. - Python API facade for staging, planning, migration, and verification flows. - Wrapper scaffold flow with bundled migration artifact (`tar.zst`) and sample wrapper project. +- Runtime tracking configuration via `[tool.pgpkg.tracking]` and pluggable `version_source` support for application-owned version tables. +- `stageversion --also-write` plus `makemigration --prepend-file`, `--append-file`, and `--append-sql` for custom packaging and wrapper migration flows. +- Deterministic bundle artifacts that preserve tracking schema/table and configured version-source metadata. - Unit and integration test suites plus wrapper end-to-end test. - MkDocs documentation site with architecture, API, CLI, layout, and quickstart guides. - CI workflows for quality/unit/integration+build/docs and release publishing. ### Changed +- Documentation now standardizes install and release examples on `uv` and documents custom tracking/runtime packaging constraints. - Publish workflow hardened with wheel install smoke test before publish. +- Publish workflow now smoke-tests a generated wrapper package before uploading distributions. - Publish workflow checks release tag/version parity before PyPI publish. - Docs deployment workflow aligned to main-branch release path and updated Pages action versions. - Integration tests now support configurable PostgreSQL image for CI matrix validation. +### Fixed +- Tracking writes now survive migration SQL that changes the active database role. + ### Security - Trusted publishing workflow configured for TestPyPI/PyPI environments. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d9f158b..e40cfef 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -44,11 +44,12 @@ uv run mkdocs build --strict 4. Verify TestPyPI install path in a clean venv: ```bash -python -m venv .venv-testpypi -. .venv-testpypi/bin/activate -python -m pip install --upgrade pip -python -m pip install -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple pgpkg -pgpkg --help +uv venv .venv-testpypi +uv pip install --python .venv-testpypi/bin/python \ + -i https://test.pypi.org/simple/ \ + --extra-index-url https://pypi.org/simple \ + pgpkg +.venv-testpypi/bin/pgpkg --help ``` 5. Create release tag `vX.Y.Z` matching `src/pgpkg/__init__.py::__version__`. diff --git a/GITHUB_PYPI_SETUP.md b/GITHUB_PYPI_SETUP.md new file mode 100644 index 0000000..793f9a1 --- /dev/null +++ b/GITHUB_PYPI_SETUP.md @@ -0,0 +1,149 @@ +# GitHub + PyPI Setup Guide + +This checklist takes a local `pgpkg` repository to a production-ready GitHub repository with automated TestPyPI/PyPI publishing. + +## 1) Create and push the GitHub repository + +1. Create an empty GitHub repo (for example, `pgpkg`). +2. Add the remote and push: + +```bash +git remote add origin git@github.com:/pgpkg.git +git push -u origin +``` + +3. Push all long-lived branches you want preserved. + +## 2) Repository settings (recommended hygiene) + +1. Enable branch protection on the default branch: + - Require pull request before merging + - Require status checks to pass + - Require branches to be up to date before merging +2. Require these checks from CI: + - `quality` + - `unit (3.11)` + - `unit (3.12)` + - `unit (3.13)` + - `integration (postgres:14-alpine)` + - `integration (postgres:15-alpine)` + - `integration (postgres:16-alpine)` + - `integration (postgres:17-alpine)` + - `build-docs` +3. Enable "Automatically delete head branches" after merge. +4. Enable Dependabot alerts and security updates. +5. In Settings -> Pages, set Source to `GitHub Actions` so `.github/workflows/docs-pages.yml` can publish the MkDocs site. + +## 3) PyPI/TestPyPI account and project prerequisites + +You need accounts on both sites: +- https://pypi.org +- https://test.pypi.org + +Create or claim the same project name on TestPyPI first, then PyPI. + +### Required access + +- You must be an Owner or Maintainer for the project on both services. +- You must have admin rights on the GitHub repository to configure environments and workflows. + +## 4) Preferred publishing model: Trusted Publishing (OIDC) + +This repository already includes `.github/workflows/publish-pypi.yml`, which is configured for trusted publishing. + +### 4a) Configure GitHub environments + +Create these environments in GitHub: +- `testpypi` +- `pypi` + +Optional hardening: +- Add required reviewers for `pypi` environment +- Restrict deployment branches/tags + +### 4b) Configure trusted publisher on TestPyPI + +In TestPyPI project settings, add a trusted publisher with: +- Owner: `` +- Repository: `pgpkg` +- Workflow filename: `publish-pypi.yml` +- Environment: `testpypi` + +### 4c) Configure trusted publisher on PyPI + +In PyPI project settings, add a trusted publisher with: +- Owner: `` +- Repository: `pgpkg` +- Workflow filename: `publish-pypi.yml` +- Environment: `pypi` + +## 5) Credentials you actually need + +With trusted publishing configured correctly: +- No PyPI API token is needed in GitHub secrets. +- No username/password is needed in CI. +- The only required "credential" in CI is GitHub's OIDC identity (`id-token: write`, already set in workflow). + +You still need: +- GitHub account with repo admin rights +- PyPI/TestPyPI account with project owner/maintainer rights + +## 6) Optional fallback model: API token publishing + +Only use this if trusted publishing cannot be enabled. + +1. Create project-scoped API tokens in PyPI and TestPyPI. +2. Add these repository secrets: + - `PYPI_API_TOKEN` + - `TEST_PYPI_API_TOKEN` +3. Update publish workflow to pass `password: ${{ secrets. }}` to `pypa/gh-action-pypi-publish`. + +## 7) First release flow + +1. Run local gate: + +```bash +uv run pre-commit run --all-files +uv run ty check src tests +uv run pytest -q +uv build --out-dir dist +uv run python -m twine check dist/* +uv run mkdocs build --strict +``` + +2. Smoke publish to TestPyPI: + - GitHub Actions -> `Publish` -> `Run workflow` -> `repository=testpypi` +3. Verify install from TestPyPI: + +```bash +uv venv /tmp/pgpkg-smoke +uv pip install --python /tmp/pgpkg-smoke/bin/python \ + -i https://test.pypi.org/simple/ \ + --extra-index-url https://pypi.org/simple \ + pgpkg +/tmp/pgpkg-smoke/bin/pgpkg --help +``` + +The `Publish` workflow's build job also smoke-tests a generated wrapper wheel, +so a passing TestPyPI run confirms both the base CLI wheel and the wrapper +packaging path. + +4. Create a GitHub Release (tag) to trigger production PyPI publish. + +## 8) Post-setup metadata cleanup + +Add canonical URLs in `pyproject.toml`: + +```toml +[project.urls] +Homepage = "https://github.com//pgpkg" +Repository = "https://github.com//pgpkg" +Documentation = "https://.github.io/pgpkg/" +Issues = "https://github.com//pgpkg/issues" +``` + +## 9) Quick troubleshooting + +- "invalid-publisher" or "publisher not trusted": trusted publisher fields do not exactly match repo/workflow/environment. For this repo, confirm `bitner/pgpkg`, workflow `publish-pypi.yml`, and environment `testpypi` or `pypi` exactly. +- Publish job not starting: verify environment name in workflow matches configured environment. +- Artifact missing in publish job: ensure build job used `uv build --out-dir dist` and uploaded `dist/*`. diff --git a/PROJECT_PLAN.md b/PROJECT_PLAN.md new file mode 100644 index 0000000..bb67f9f --- /dev/null +++ b/PROJECT_PLAN.md @@ -0,0 +1,226 @@ +# pgpkg Project Plan (Living) + +Last updated: 2026-05-05 +Owner: core maintainers +Status: Local release gate, docs refresh, and generated-wrapper smoke pass on docs/v0.1.0-release-prep; remaining external blocker is TestPyPI trusted-publisher configuration + +## 1) Mission + +Ship a reliable PostgreSQL migration toolkit (library + CLI) that: +- stages base SQL versions from `sql/` +- generates incrementals via `results` +- plans and applies migrations safely to live DBs +- bundles migrations for wrapper projects +- publishes cleanly to PyPI with reproducible release gates + +## 2) Source of Truth Rules (Read First) + +- This file is a living execution plan and must be updated during active work. +- Every task here must be in one of three states: `[ ]` not started, `[-]` in progress, `[x]` done. +- Do not leave ambiguous status text like "almost done". +- Update this plan in the same PR/commit where work is performed. +- When a task is done, add one brief proof note (test result, command, or file changed). +- Before asking another model to review the project, update this file first so the next model starts from accurate state. +- After any alternate-model review, merge accepted findings back into this file immediately and convert them into checklist items. +- If the working tree contains uncommitted release-critical changes, record them in this plan explicitly. + +## 3) Session-Recall Operating Loop (Required) + +Before any substantial coding/review session, run: + +```bash +session-recall files --json --limit 10 +session-recall list --json --limit 5 +``` + +If those are low-signal, deepen selectively: + +```bash +session-recall search "" --json +session-recall show --json +session-recall health +``` + +Plan update protocol per session: +1. Run the two baseline `session-recall` commands. +2. If `session-recall files` is empty, note "cold start" rather than assuming recall is broken. +3. Update this plan's "Session Log" with date + one-line summary before substantial work. +4. If using another model, paste or summarize the current checklist/status into that session. +5. Execute work. +6. Mark checklist status changes immediately. +7. Record validation evidence under the task that changed. + +## 4) Current Scope (Release 0.1.x) + +In scope: +- `stageversion`, `makemigration`, `graph`, `migrate`, `wheel`, `bundle`, `info`, `verify` +- Python API facade in `src/pgpkg/api.py` +- Wrapper scaffolding and sample wrapper +- Unit/integration tests and docs site +- CI, docs deployment, trusted publishing workflows + +Out of scope: +- pg_tle/deb packaging and extension distro orchestration +- downgrade planning +- cross-database support +- plugin systems and custom adapter runtime hooks + +## 5) Codebase Reality Check (Synced) + +Implemented modules exist: +- `src/pgpkg/{api,artifact,catalog,cli,config,diff,errors,executor,layout,planner,staging,tracking,versioning,wrapper,_conn}.py` + +Implemented tests exist: +- Unit: `tests/unit/*` +- Integration: `tests/integration/{test_diff,test_executor,test_tracking,test_wrapper_end_to_end}.py` +- Shared fixtures: `tests/conftest.py`, `tests/fixtures/sample_project/*` + +Workflows exist: +- `ci.yml` +- `docs-pages.yml` +- `publish-pypi.yml` + +Docs exist: +- `docs/{index,quickstart,layout,cli,api,wrapper,architecture}.md` + +Current branch state: +- Branch: `docs/v0.1.0-release-prep` +- Divergence from `main`: ahead 2, behind 0 (`git rev-list --left-right --count main...HEAD` -> `0 2`) +- Working tree: clean (`git status --short --branch`) +- Remote tracking: none configured locally (`git branch -vv`) +- Branch-only files vs `main`: `.gitignore`, `CHANGELOG.md`, `README.md`, `docs/troubleshooting.md` + +## 6) Completed Work Log (Up To Date) + +Core implementation: +- [x] Project scaffolding and package layout complete. +- [x] Core migration pipeline complete (`stageversion` -> `makemigration` -> `migrate`). +- [x] Wrapper bundle flow complete (`wheel` + sample wrapper). +- [x] Tracking schema and migration execution path complete. +- [x] CLI command surface complete and tested. + +Quality and validation: +- [x] Unit and integration test suites implemented. +- [x] Local validation passes: `63 passed` on latest run. +- [x] Packaging checks pass: wheel/sdist build + `twine check`. +- [x] Docs strict build passes (`mkdocs build --strict`). +- [x] Branch-local packaging/docs checks rerun after post-`main` docs changes. + - Proof: `uv build --out-dir dist`, `uv run twine check dist/*`, and `uv run mkdocs build --strict` passed on 2026-04-27 on `docs/v0.1.0-release-prep`. + +Release hardening recently added: +- [x] Publish workflow wheel smoke test added. + - Proof: `.github/workflows/publish-pypi.yml` has "Smoke test wheel install". +- [x] Publish workflow release tag/version guard added. + - Proof: `.github/workflows/publish-pypi.yml` has "Verify tag matches built version". +- [x] Docs workflow cleaned to main-only branch deploy path. + - Proof: `.github/workflows/docs-pages.yml` push/deploy conditions target `main` only. +- [x] Docs workflow Pages action versions updated. + - Proof: `actions/configure-pages@v6`, `upload-pages-artifact@v5`, `deploy-pages@v5`. +- [x] Latest local release validation on current tree passes. + - Proof: `pytest`, `uv build`, `twine check`, and `mkdocs build --strict` succeeded on 2026-04-27 after workflow hardening edits. +- [x] Generated wrapper smoke path validated locally against the built `pgpkg` wheel. + - Proof: built a sample wrapper, installed repo `dist/pgpkg-0.1.0-py3-none-any.whl` into a clean venv, then installed the generated wrapper wheel and ran `sampleext-migrator info` on 2026-05-05. + +Session-recall setup: +- [x] `session-recall` installed and callable. +- [x] Copilot instructions include "Progressive Session Recall" block. +- [x] Session store now available (health command succeeds). +- [ ] Session-recall corpus warmed with useful repo-file history. + - Current state: `files --json --limit 10` still returns 0 files; `list --json --limit 5` now returns 5 recent repo sessions; `health` reports `22 sessions` with progressive disclosure still calibrating at `86/200`. + +## 7) Active Checklist (What Is Left) + +### A. Multi-direction review pass +- [x] Run a second-model release review using the baseline session-recall commands first. + - Proof: secondary reviewer run completed with actionable findings on workflow publish guardrails, metadata URLs, and supply-chain pinning risk. +- [x] Merge accepted second-model findings into this plan the same day. + - Accepted: manual-PyPI version guard added, project URLs added, CI postgres-version matrix added. +- [x] Record rejected findings with one-line rationale to avoid repeated churn. + - Rejected/deferred: full commit-SHA action pinning deferred to a separate supply-chain hardening pass to avoid mixing with release-candidate unblockers. + +### B. Commit and remote validation +- [x] Commit current release-hardening workflow edits with a conventional commit. + - Proof: `bff68c5 chore: harden release pipeline and release docs`, `579c2fc fix: make release smoke test uv-venv compatible`. +- [x] Push branch and verify CI/doc workflows succeed remotely. + - Proof: CI run `25015495681` success and Docs run `25015495690` success on head `579c2fcee7683672e1893330d740e3b6f1bd7f1c`. + +### C. Release candidate prep +- [x] Latest local release gate passes on current tree. + - Proof: validated on 2026-04-27. +- [x] Re-run local release gate immediately before tagging if any code or workflow changes occur after this plan update. + - Proof: pre-commit, ty, pytest, build, twine check, and mkdocs strict rerun passed on 2026-04-27 after metadata/docs/CI updates. +- [x] Re-ran the local release gate after runtime-config/docs refresh on this branch. + - Proof: `uv run pre-commit run --all-files`, `uv run ty check src tests`, `uv run pytest -q`, `uv build --out-dir dist`, `uv run python -m twine check dist/*`, and `uv run mkdocs build --strict` all passed on 2026-05-05. +- [x] Create/refresh `CHANGELOG.md` with 0.1.0 release notes. +- [-] Execute TestPyPI publish dry run via workflow_dispatch. + - Progress: workflow dispatched and build stage passes. + - Blocker: trusted publishing exchange fails with `invalid-publisher` for environment `testpypi`. + - Required user action: configure TestPyPI trusted publisher to match: + - repository: `bitner/pgpkg` + - workflow: `.github/workflows/publish-pypi.yml` + - ref: `refs/heads/main` + - environment: `testpypi` +- [ ] Verify install from TestPyPI in clean venv and run `pgpkg --help`. +- [ ] If dry run passes, create GitHub Release tag matching `src/pgpkg/__init__.py::__version__`. + +### D. Session-recall optimization +- [-] For the next 5-10 real work sessions, prepend baseline recall commands and ensure actual file edits happen in those sessions. + - Progress: baseline recall commands were rerun on 2026-04-27 before this plan update; this session includes a real file edit to keep warming the corpus. +- [-] After each substantial session, add one short human summary to the session log in this plan. + - Progress: this session's summary was added below; continue this habit until file recall becomes non-empty. +- [ ] Re-check after warmup: + - `session-recall files --json --limit 10` + - `session-recall list --json --limit 5` + - `session-recall health` +- [ ] Target: non-empty repo file recall and improved corpus-size/progressive-disclosure signal. + +### E. Nice-to-have (post-0.1.0) +- [x] Add PostgreSQL version matrix (14-17) for integration tests. + - Proof: `ci.yml` integration job now uses `postgres:14/15/16/17-alpine` matrix with `PGPKG_TEST_POSTGRES_IMAGE`. +- [x] Add release process doc section for version bump + release/tag policy. + - Proof: `CONTRIBUTING.md` release section updated with explicit version/tag parity and TestPyPI verification path. +- [x] Add troubleshooting section for top migration/connectivity failure modes. + - Proof: new `docs/troubleshooting.md` added and included in `mkdocs.yml` nav. + +## 8) Definition of Done for 0.1.0 + +All must be true: +- [-] All release gate commands pass locally and in CI. + - Progress: latest full gate passed locally on 2026-05-05, including `pre-commit`, `ty`, `pytest`, `uv build`, `twine check`, strict MkDocs, and generated-wrapper smoke; branch-specific remote CI has not run because this branch has no upstream. +- [x] Second-model review findings are triaged and merged into this plan. + - Proof: accepted and rejected findings are recorded in section 7A and summarized in the 2026-04-27 session log. +- [ ] TestPyPI publish + install smoke test verified. +- [ ] GitHub Release tag equals package version. +- [ ] PyPI publish succeeds from trusted publishing workflow. +- [-] Session-recall baseline commands are part of team runbook and this plan-update loop is being followed. + - Progress: section 3 defines the loop, and this session followed it before work started; keep using it through the remaining warmup sessions. + +## 9) Session Log (Keep Current) + +- [2026-04-27] Plan rewritten to living format; synced with current code/workflows; release-hardening tasks recorded as done. +- [2026-04-27] Added publish smoke test + tag/version guard + docs workflow cleanup; local validation re-run passed. +- [2026-04-27] Session-recall confirmed functional; corpus still cold-start and needs more sessions to become high-signal. +- [2026-04-27] Plan refreshed again for second-model review; current workflow hardening changes are local/uncommitted; baseline recall still shows cold-start with empty file recall. +- [2026-04-27] Second-model findings triaged: accepted release guardrail + metadata + matrix/doc improvements implemented; local full gate rerun passed; supply-chain SHA pinning deferred. +- [2026-04-27] Committed and pushed release hardening updates; remote CI+Docs runs are green on latest main; TestPyPI publish run failed at trusted publisher exchange (`invalid-publisher`) and requires account-level publisher config update. +- [2026-04-27] Baseline recall rerun on `docs/v0.1.0-release-prep`: repo sessions increased to 22 but file recall remains empty; current branch is clean, 2 commits ahead of `main`, has no upstream, and branch-local `uv build`, `twine check`, and strict MkDocs validation all pass. +- [2026-05-05] Baseline recall rerun for this session; docs were synced with runtime tracking/version-source behavior, the full local release gate passed (`85 passed`), and generated-wrapper smoke succeeded against the built `pgpkg` wheel. + +## 10) Update Template (Copy For Each Future Session) + +Use this block when updating the plan: + +```markdown +### Session YYYY-MM-DD +- Model used: +- Recall run: files/list (yes/no), health status +- Recall quality: cold-start / useful / high-signal +- Goal: +- Work done: +- Validation evidence: +- Accepted findings from other reviewers/models: +- Rejected findings from other reviewers/models: +- Checklist updates: + - [ ] / [-] / [x] items changed +- Next step: +``` diff --git a/README.md b/README.md index 481f08a..96cf7ff 100644 --- a/README.md +++ b/README.md @@ -27,10 +27,10 @@ ordered `sql/` directory. `pgpkg` does everything else: ## Prerequisites -- **Python ≥ 3.11** and **pip** (or **uv**) +- **Python ≥ 3.11** and **uv** - **`pgpkg[diff]`** — the `makemigration` command requires the optional [results](https://github.com/djrobstep/results) dependency: - `pip install 'pgpkg[diff]'` + `uv tool install --with 'pgpkg[diff]' pgpkg` - **Docker** — `makemigration` and `verify` spin up throwaway PostgreSQL containers via `testcontainers`; Docker must be running - **libpq** — `migrate` connects to a live database using standard libpq @@ -40,7 +40,7 @@ ordered `sql/` directory. `pgpkg` does everything else: ## Quickstart ```bash -pip install 'pgpkg[diff]' +uv tool install --with 'pgpkg[diff]' pgpkg mkdir -p sql/pre sql/post echo "CREATE TABLE foo (id int PRIMARY KEY);" > sql/010_schema.sql @@ -62,6 +62,27 @@ pgpkg makemigration --from 0.1.0 --to 0.2.0 pgpkg migrate -d mydb -h localhost --to 0.2.0 ``` +## Tracking and packaging + +By default, `pgpkg` records applied versions in `pgpkg.migrations`. You can +relocate that table with: + +```toml +[tool.pgpkg] +project_name = "myext" + +[tool.pgpkg.tracking] +schema = "ops" +table = "schema_versions" +``` + +If your application already owns the authoritative version table, set +`[tool.pgpkg].version_source = "module:attribute"` and provide an object with +`read_live_version(...)` and `record_applied(...)` methods. The generic +`pgpkg wheel` scaffold is intentionally limited to the default tracking path; +custom version sources should use a project-specific wrapper that calls +`pgpkg.api.migrate_from_artifact(..., version_source=...)`. + See [docs/](docs/) for the full manual and design details. ## Development diff --git a/docs/api.md b/docs/api.md index bc9b1c5..312b3c4 100644 --- a/docs/api.md +++ b/docs/api.md @@ -1,45 +1,67 @@ # Python API -Everything the CLI does is available as a Python API. Import from `pgpkg`: +The root package exposes the high-level project API: ```python from pgpkg import ( - list_versions, - stage_version, - generate_incremental, - plan_path, apply_migrations, + bundle_project, + generate_incremental, + list_versions, + load_project, migrate, + plan_path, + stage_version, verify_round_trip, ) + +from pgpkg.api import migrate_from_artifact ``` +`migrate_from_artifact()` stays in `pgpkg.api` because it is mainly intended +for wrapper packages and automation flows. + ## `list_versions(project_root) -> list[str]` All known versions (released + unreleased), sorted. `unreleased` always last. -## `stage_version(project_root, version, *, output_path=None, overwrite=True) -> Path` +## `stage_version(project_root, version, *, output_path=None, also_write=None, overwrite=True) -> Path` Render `sql/` into `--.sql`. Returns the path written. -## `generate_incremental(project_root, *, from_version, to_version, base_url, output_path=None) -> Path` +If `also_write` is set, the same rendered base file is written to that second +path after all destinations are validated. + +## `generate_incremental(project_root, *, from_version, to_version, base_url, output_path=None, prepend_files=None, append_files=None, append_sql=None) -> Path` Diff two staged base files through `results.temporary_local_db` and write `----.sql`. +Optional wrapper content is rendered around the generated diff in this order: +`prepend_files`, diff body, `append_files`, then literal `append_sql` strings. + ## `plan_path(project_root, *, source, target) -> MigrationPlan` Return the shortest chain of incrementals from `source` to `target`. If `source is None`, the plan starts with a bootstrap base file. -## `apply_migrations(project_root, *, target=None, dry_run=False, conninfo=None, host=None, port=None, dbname=None, user=None, password=None) -> ApplyResult` +## `bundle_project(project_root, output_path) -> Path` + +Build a reproducible `tar.zst` artifact from the project root. The manifest +includes migration entry checksums plus the resolved tracking schema, tracking +table, and configured `version_source` string. + +## `apply_migrations(project_root, *, target=None, dry_run=False, conninfo=None, host=None, port=None, dbname=None, user=None, password=None, version_source=None) -> ApplyResult` Apply the plan to a live database. Accepts psycopg-style kwargs; all default to libpq env vars (`PGHOST`, `PGPORT`, …). `migrate` is an alias for `apply_migrations`. +If `version_source` is passed, it overrides `[tool.pgpkg].version_source` for +that call only. + ### `ApplyResult` ```python @@ -59,12 +81,29 @@ For each incremental `(a, b)` where both have staged base files, confirm that `base(a) + a→b` produces the same schema as `base(b)`. Returns a list of problems (empty = OK). -## `build_artifact(config, output_path) -> Path` +## `migrate_from_artifact(artifact_path, *, target=None, dry_run=False, conninfo=None, host=None, port=None, dbname=None, user=None, password=None, version_source=None) -> ApplyResult` + +Apply migrations from a prebuilt `tar.zst` artifact. This is the runtime entry +point used by wrapper wheels. -Bundle `migrations/` + `sql/pre/` + `sql/post/` into a reproducible -`tar.zst` with a `MANIFEST.json` containing SHA-256 per entry. +If the artifact manifest contains tracking settings or a configured +`version_source`, those values become the defaults for the apply call. Passing +`version_source=` explicitly still wins. + +## Custom version sources + +A version source is any object implementing the two-method protocol below: + +```python +class VersionSource: + def read_live_version(self, conn, config) -> str | None: ... + def record_applied(self, conn, config, *, version, sha256, filename) -> None: ... +``` -## `migrate_from_artifact(artifact_path, *, target=None, ...) -> ApplyResult` +Set `[tool.pgpkg].version_source = "module:attribute"` to load one from the +project root, or pass an instance directly to `apply_migrations()` or +`migrate_from_artifact()`. -Like `migrate`, but reads every migration + pre/post from a baked -`tar.zst`. Used by wrapper wheels. +If your custom source fully replaces pgpkg's own tracking table, set +`writes_default_tracking = True` on the object or class. Otherwise `pgpkg` +will keep writing its default tracking row first, then call your source. diff --git a/docs/architecture.md b/docs/architecture.md index f12a09e..36e3bc3 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -11,32 +11,40 @@ A short tour of the modules: | `pgpkg.versioning` | PEP 440 + `unreleased`-last ordering | | `pgpkg.planner` | BFS over the catalog graph for shortest `source → target` paths | | `pgpkg.diff` | wrap `results.temporary_local_db` + `schemadiff_as_sql` | -| `pgpkg.tracking` | `pgpkg.migrations` DDL, advisory locks, sha256 bookkeeping | +| `pgpkg.tracking` | default tracking DDL, version-source protocol, role-safe bookkeeping | | `pgpkg.executor` | run a plan in one xact with `pg_advisory_xact_lock` | | `pgpkg._conn` | thin psycopg helper honoring libpq env vars | -| `pgpkg.artifact` | build and load the tar.zst artifact (MANIFEST with sha256) | +| `pgpkg.artifact` | build and load the tar.zst artifact (MANIFEST with checksums + runtime config) | | `pgpkg.api` | public facade | | `pgpkg.cli` | argparse CLI, psql-compatible DB flags | | `pgpkg.wrapper` | scaffold a wrapper Python project | ## Invariants -- The tracking table `pgpkg.migrations` is the source of truth for the - installed version. +- The installed version comes from the configured version source. By default + that is `pgpkg.migrations`, but projects can relocate the table or provide a + custom `module:attribute` implementation. - `sql/` is always "unreleased". Staging it writes an immutable base file. - Every applied step runs inside one transaction with a project-scoped advisory lock (`pg_advisory_xact_lock(sha256(project_name)[:8])`). - `sql/pre/*` runs before every step; `sql/post/*` runs after. Together they form the project's public boundary for side effects (roles, grants, GUCs). +- Tracking writes always run as the original session user, even if migration + SQL temporarily changes the active role. +- Artifacts preserve runtime tracking defaults so wrapper and bundle-based + execution see the same config as source-tree execution. ## What pgpkg deliberately does *not* do - No schema adoption (no "adopt an already-installed schema as a version" flow). If a DB was seeded out-of-band, you're responsible for inserting - the right row into `pgpkg.migrations`. + the right row into the configured tracking source. - No downgrades, no per-version `sql//` tree, no built-in pg_tle/control/deb/PGXN packaging. +- No generic wrapper generation for projects with custom version sources. + Those projects need a project-specific wrapper so they can construct and + pass the runtime `version_source` object explicitly. - No compiled C — `pgpkg` is pure Python. ## Dependencies diff --git a/docs/cli.md b/docs/cli.md index 7dce6a6..b2727b9 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -3,7 +3,7 @@ `pgpkg` uses argparse and accepts `--help` (the `-h` short flag is reserved for `--host`, matching `psql`). -``` +```text pgpkg [--help] [options] Commands: @@ -41,13 +41,19 @@ vars take over. ## `stageversion` -``` -pgpkg stageversion [--output PATH] [--no-overwrite] +```text +pgpkg stageversion [--output PATH] [--also-write PATH] [--no-overwrite] ``` Renders `sql/` into a single `--.sql`. Ignores `sql/pre/` and `sql/post/` (they run at apply time). +- `--output` overrides the default path under `migrations/`. +- `--also-write` writes the same rendered base file to a second location, + which is useful for review artifacts or custom packaging flows. +- `--no-overwrite` preflights every destination and fails before writing if + any target already exists. + ## `info` ``` @@ -55,7 +61,8 @@ pgpkg info [--json] ``` Prints resolved project metadata including the inferred prefix, SQL and -migrations directories, known versions, base files, and graph edges. +migrations directories, tracking schema/table, configured `version_source`, +known versions, base files, and graph edges. ## `versions` @@ -67,8 +74,10 @@ Prints known versions in sorted order, including `unreleased` when present. ## `makemigration` -``` +```text pgpkg makemigration [--from VERSION] [--to VERSION] [--base-url URL] [--output PATH] + [--prepend-file PATH]... [--append-file PATH]... + [--append-sql SQL]... ``` Writes `----.sql`. `--base-url` is the postgres URL used @@ -76,9 +85,16 @@ to spawn tempdbs via `results.temporary_local_db`. Defaults to `postgresql:///postgres`, i.e. a local admin connection through the peer socket. +When wrapper SQL is supplied, `pgpkg` renders the output in this order: + +1. Every `--prepend-file`. +2. The generated schema diff. +3. Every `--append-file`. +4. Every `--append-sql` literal. + ## `graph` -``` +```text pgpkg graph [--format text|dot] ``` @@ -86,7 +102,7 @@ Shows the version graph either as plain text or Graphviz DOT. ## `plan` -``` +```text pgpkg plan [--source VERSION] [--to VERSION] ``` @@ -95,16 +111,18 @@ a fresh install and may start with a bootstrap base file. ## `migrate` -``` +```text pgpkg migrate [--to VERSION] [--dry-run] ``` Runs inside one transaction with `pg_advisory_xact_lock`. `--dry-run` -executes the same SQL inside a transaction, then rolls back. +executes the same SQL inside a transaction, then rolls back. The live source +version comes from the configured version source, which defaults to +`pgpkg.migrations`. ## `verify` -``` +```text pgpkg verify [--base-url URL] ``` @@ -113,18 +131,23 @@ that applying `a -> b` produces the same resulting schema as loading base `b`. ## `wheel` -``` +```text pgpkg wheel --output-dir PATH [--cli-name NAME] ``` -Scaffolds a wrapper Python project. See [Wrapping into a wheel](wrapper.md). +Scaffolds a wrapper Python project. Projects using +`[tool.pgpkg].version_source` must ship a custom wrapper instead; the generic +scaffold rejects that configuration so the wrapper can pass an explicit +`version_source=...` object at runtime. See [Wrapping into a wheel](wrapper.md). ## `bundle` -``` +```text pgpkg bundle --output PATH ``` Writes a compressed `tar.zst` artifact containing `migrations/`, `sql/pre/`, -and `sql/post/`. This is useful for automation or for shipping migration -artifacts separately from a full wrapper project. +and `sql/post/`. The manifest also records the resolved tracking schema, +tracking table, and configured `version_source` string so artifact-based +execution keeps the same runtime defaults. This is useful for automation or +for shipping migration artifacts separately from a full wrapper project. diff --git a/docs/index.md b/docs/index.md index 7ecaec6..ee26d9c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -29,8 +29,8 @@ code. You own: ## Install ```bash -pip install pgpkg # core -pip install 'pgpkg[diff]' # + results, required for makemigration/verify +uv tool install pgpkg # core +uv tool install --with 'pgpkg[diff]' pgpkg # + results for makemigration/verify ``` ## Look around diff --git a/docs/layout.md b/docs/layout.md index 45b1ca0..0f89389 100644 --- a/docs/layout.md +++ b/docs/layout.md @@ -26,10 +26,41 @@ | `prefix` | `project_name` | file prefix for `--.sql` | | `sql_dir` | `sql` | path to the source tree | | `migrations_dir` | `migrations` | where staged base files + incrementals live | -| `pre_dir` | `sql/pre` | pre hooks | -| `post_dir` | `sql/post` | post hooks | -| `tracking_schema` | `pgpkg` | reserved, stores `migrations` table | -| `tracking_table` | `migrations` | reserved | +| `version_source` | *(unset)* | `module:attribute` loader for a custom live-version source | + +Derived paths are not configurable separately: + +- `pre_dir` is always `/pre`. +- `post_dir` is always `/post`. + +Tracking settings live under a nested table: + +| key | default | purpose | +|---|---|---| +| `[tool.pgpkg.tracking].schema` | `pgpkg` | schema for pgpkg's default tracking table | +| `[tool.pgpkg.tracking].table` | `migrations` | table for pgpkg's default tracking rows | + +Example: + +```toml +[tool.pgpkg] +project_name = "myext" +prefix = "myext" +version_source = "myext.db:version_source" + +[tool.pgpkg.tracking] +schema = "ops" +table = "schema_versions" +``` + +`version_source` must resolve to an object or zero-argument class with +`read_live_version(conn, config)` and +`record_applied(conn, config, *, version, sha256, filename)` methods. The +module is imported relative to the project root. + +`[tool.pgpkg].pre_post_in_base` is intentionally rejected in `0.1.x`. +Keep pre/post SQL as runtime hooks or handle baked-in behavior in a custom +wrapper. ## Filename grammar @@ -49,7 +80,7 @@ literal `unreleased`, which always sorts last. ## Tracking table -`pgpkg` owns the `pgpkg.migrations` table: +By default, `pgpkg` owns the `pgpkg.migrations` table: ```sql CREATE TABLE pgpkg.migrations ( @@ -61,4 +92,6 @@ CREATE TABLE pgpkg.migrations ( ); ``` -Never write to it by hand. The `pgpkg` schema is reserved. +Never write to it by hand. If you configure a custom `version_source`, that +source becomes the authoritative runtime view; pgpkg's own tracking table can +still be kept in sync unless the source declares `writes_default_tracking = True`. diff --git a/docs/quickstart.md b/docs/quickstart.md index 6e4cbd8..fc3fdb2 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -2,6 +2,12 @@ Start from any project that has a `pyproject.toml`. +## 0. Install pgpkg + +```bash +uv tool install --with 'pgpkg[diff]' pgpkg +``` + ## 1. Declare your project ```toml @@ -31,7 +37,7 @@ sql/ ## 3. Stage a version -``` +```bash pgpkg stageversion 0.1.0 ``` @@ -39,7 +45,7 @@ This writes `migrations/myext--0.1.0.sql`. Never edit that file by hand. ## 4. Keep editing `sql/`, then generate an incremental -``` +```bash pgpkg stageversion 0.2.0 pgpkg makemigration --from 0.1.0 --to 0.2.0 ``` @@ -52,7 +58,7 @@ Review the diff. Edit freely; `pgpkg verify` will round-trip-check it later. ## 5. Apply to a live DB -``` +```bash pgpkg migrate -h localhost -d mydb -U myuser --to 0.2.0 ``` @@ -61,7 +67,7 @@ Standard libpq environment variables (`PGHOST`, `PGPORT`, `PGDATABASE`, ## 6. Verify -``` +```bash pgpkg verify ``` @@ -71,7 +77,7 @@ loading `base(b)` directly. ## 7. Ship -``` +```bash pgpkg wheel --output-dir ../myext-migrator cd ../myext-migrator && uv build --out-dir dist ``` diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 1541080..d50d4c9 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -44,11 +44,44 @@ Symptom: Fix: ```bash -pip install 'pgpkg[diff]' +uv tool install --with 'pgpkg[diff]' pgpkg # or in this repo uv sync --extra diff ``` +## `version_source` import or validation fails + +Symptom: +- `pgpkg migrate` exits with a config error about `module:attribute` syntax, + import failure, or missing required callables. + +Checks: + +```bash +pgpkg info --json +python -c "import mymodule; print(mymodule)" +``` + +Fix: +- Set `[tool.pgpkg].version_source` to `module:attribute`. +- Ensure the module is importable from the project root or installed in the + runtime environment. +- Implement both `read_live_version(...)` and `record_applied(...)`. + +## `pgpkg wheel` rejects projects using `version_source` + +Symptom: +- Wrapper scaffolding exits with `error [E_WRAP]` mentioning `version_source`. + +Cause: +- Generic wrappers cannot construct your project-specific runtime object. + +Fix: +- Use `pgpkg bundle --output ...` or `pgpkg.api.bundle_project(...)` to build + the artifact. +- Ship a custom wrapper package that calls + `pgpkg.api.migrate_from_artifact(..., version_source=...)`. + ## Release workflow version mismatch Symptom: @@ -60,3 +93,18 @@ Cause: Fix: - Confirm `src/pgpkg/__init__.py::__version__`. - Re-tag release as `v` to match built artifact. + +## Publish workflow fails with `invalid-publisher` + +Symptom: +- The `Publish` workflow reaches the PyPI/TestPyPI publish step, then fails + during trusted-publisher exchange. + +Checks: +- Verify the trusted publisher is configured for repository `bitner/pgpkg`. +- Verify the workflow filename matches `publish-pypi.yml`. +- Verify the environment name matches `testpypi` or `pypi` exactly. + +Fix: +- Update the trusted publisher entry in PyPI/TestPyPI so the repository, + workflow, and environment fields exactly match the GitHub workflow. diff --git a/docs/wrapper.md b/docs/wrapper.md index a2fcf27..ad6e542 100644 --- a/docs/wrapper.md +++ b/docs/wrapper.md @@ -3,8 +3,8 @@ Shipping a database extension or application as a Python wheel means consumers can install and migrate without cloning the source tree: -``` -pip install myext-migrator +```bash +uv tool install myext-migrator myext-migrator migrate -h db.example.com -d prod -U deploy ``` @@ -12,7 +12,7 @@ myext-migrator migrate -h db.example.com -d prod -U deploy ## Scaffold -``` +```bash pgpkg wheel --output-dir ../myext-migrator ``` @@ -38,10 +38,10 @@ myext-migrator/ ## Build and install -``` +```bash cd myext-migrator uv build --out-dir dist -pip install dist/myext_migrator-*.whl +uv tool install ./dist/myext_migrator-*.whl ``` ## Runtime dependency @@ -60,11 +60,32 @@ The wrapper is apply-only: If you need those commands, use the base `pgpkg` CLI against the source tree instead. +Projects that use `[tool.pgpkg].version_source` also need a custom wrapper. +The generic `pgpkg wheel` scaffold intentionally rejects that case so the +wrapper can pass `version_source=...` into `pgpkg.api.migrate_from_artifact(...)` +explicitly. + +Minimal shape for a custom wrapper runtime: + +```python +from pgpkg.api import migrate_from_artifact + +result = migrate_from_artifact( + artifact_path, + target=target, + conninfo=dsn, + version_source=MyVersionSource(), +) +``` + +Use `pgpkg bundle --output ...` if you only need the artifact and do not want +the generic scaffold. + ## Bundle-only (no wrapper) If you just want the artifact file: -``` +```bash pgpkg bundle --output myext.tar.zst ``` From 937086f73ad5355bb67ab270dfff02173a4e089f Mon Sep 17 00:00:00 2001 From: David W Bitner Date: Tue, 5 May 2026 16:09:14 -0500 Subject: [PATCH 6/7] remove testpypi --- .github/workflows/publish-pypi.yml | 30 +----------------- CHANGELOG.md | 3 +- CONTRIBUTING.md | 27 +++++++--------- GITHUB_PYPI_SETUP.md | 51 ++++++++++++------------------ PROJECT_PLAN.md | 28 +++++++--------- docs/troubleshooting.md | 7 ++-- tests/unit/test_tracking.py | 12 +++---- tests/unit/test_wrapper.py | 2 +- 8 files changed, 56 insertions(+), 104 deletions(-) diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 21b2eca..4206e67 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -5,14 +5,6 @@ on: types: [published] workflow_dispatch: inputs: - repository: - description: Publish destination - required: true - default: testpypi - type: choice - options: - - testpypi - - pypi expected_version: description: Required for manual PyPI publish (e.g. 0.1.0) required: false @@ -76,28 +68,8 @@ jobs: path: dist/* if-no-files-found: error - publish-testpypi: - if: github.event_name == 'workflow_dispatch' && inputs.repository == 'testpypi' - needs: build - runs-on: ubuntu-latest - permissions: - id-token: write - environment: - name: testpypi - steps: - - name: Download distributions - uses: actions/download-artifact@v4 - with: - name: python-package-distributions - path: dist - - - name: Publish to TestPyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - repository-url: https://test.pypi.org/legacy/ - publish-pypi: - if: github.event_name == 'release' || (github.event_name == 'workflow_dispatch' && inputs.repository == 'pypi') + if: github.event_name == 'release' || github.event_name == 'workflow_dispatch' needs: build runs-on: ubuntu-latest permissions: diff --git a/CHANGELOG.md b/CHANGELOG.md index 95f7d89..6f78583 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Publish workflow hardened with wheel install smoke test before publish. - Publish workflow now smoke-tests a generated wrapper package before uploading distributions. - Publish workflow checks release tag/version parity before PyPI publish. +- Release workflow and setup docs now target PyPI only; TestPyPI setup is deferred until access is available. - Docs deployment workflow aligned to main-branch release path and updated Pages action versions. - Integration tests now support configurable PostgreSQL image for CI matrix validation. @@ -30,6 +31,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Tracking writes now survive migration SQL that changes the active database role. ### Security -- Trusted publishing workflow configured for TestPyPI/PyPI environments. +- Trusted publishing workflow configured for the PyPI environment. [0.1.0]: https://github.com/bitner/pgpkg/releases/tag/v0.1.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e40cfef..3377dc2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,28 +39,25 @@ uv run python -m twine check dist/* uv run mkdocs build --strict ``` -3. For a smoke release, run the `Publish` workflow manually with `repository=testpypi`. +3. Create release tag `vX.Y.Z` matching `src/pgpkg/__init__.py::__version__`. -4. Verify TestPyPI install path in a clean venv: +4. Publish to PyPI by creating a GitHub Release from that tag. -```bash -uv venv .venv-testpypi -uv pip install --python .venv-testpypi/bin/python \ - -i https://test.pypi.org/simple/ \ - --extra-index-url https://pypi.org/simple \ - pgpkg -.venv-testpypi/bin/pgpkg --help -``` +Optional manual path: +- Run `Publish` with `expected_version=` only if version parity is already confirmed. -5. Create release tag `vX.Y.Z` matching `src/pgpkg/__init__.py::__version__`. +5. Verify the PyPI install path in a clean venv: -6. Publish to PyPI by creating a GitHub Release from that tag (or by running `Publish` with `repository=pypi` only if version parity is confirmed). +```bash +uv venv .venv-pypi +uv pip install --python .venv-pypi/bin/python pgpkg +.venv-pypi/bin/pgpkg --help +``` ## Trusted publishing setup -Before the publish workflow can work, configure trusted publishing for both environments: +Before the publish workflow can work, configure trusted publishing for the PyPI environment: -- GitHub environment `testpypi` bound to the TestPyPI project - GitHub environment `pypi` bound to the PyPI project No API tokens are required when trusted publishing is configured correctly. @@ -68,5 +65,5 @@ No API tokens are required when trusted publishing is configured correctly. ## Release policy guardrails - The release tag version must match the built package version exactly. -- TestPyPI install smoke test is required before first PyPI publish. +- The publish build job smoke-tests both the CLI wheel and a generated wrapper before upload. - If workflow_dispatch is used for PyPI publish, confirm version parity before running it. diff --git a/GITHUB_PYPI_SETUP.md b/GITHUB_PYPI_SETUP.md index 793f9a1..84b1f6c 100644 --- a/GITHUB_PYPI_SETUP.md +++ b/GITHUB_PYPI_SETUP.md @@ -1,6 +1,6 @@ # GitHub + PyPI Setup Guide -This checklist takes a local `pgpkg` repository to a production-ready GitHub repository with automated TestPyPI/PyPI publishing. +This checklist takes a local `pgpkg` repository to a production-ready GitHub repository with automated PyPI publishing. ## 1) Create and push the GitHub repository @@ -34,17 +34,14 @@ git push -u origin 4. Enable Dependabot alerts and security updates. 5. In Settings -> Pages, set Source to `GitHub Actions` so `.github/workflows/docs-pages.yml` can publish the MkDocs site. -## 3) PyPI/TestPyPI account and project prerequisites +## 3) PyPI account and project prerequisites -You need accounts on both sites: +You need an account on: - https://pypi.org -- https://test.pypi.org - -Create or claim the same project name on TestPyPI first, then PyPI. ### Required access -- You must be an Owner or Maintainer for the project on both services. +- You must be an Owner or Maintainer for the project on PyPI. - You must have admin rights on the GitHub repository to configure environments and workflows. ## 4) Preferred publishing model: Trusted Publishing (OIDC) @@ -54,22 +51,13 @@ This repository already includes `.github/workflows/publish-pypi.yml`, which is ### 4a) Configure GitHub environments Create these environments in GitHub: -- `testpypi` - `pypi` Optional hardening: - Add required reviewers for `pypi` environment - Restrict deployment branches/tags -### 4b) Configure trusted publisher on TestPyPI - -In TestPyPI project settings, add a trusted publisher with: -- Owner: `` -- Repository: `pgpkg` -- Workflow filename: `publish-pypi.yml` -- Environment: `testpypi` - -### 4c) Configure trusted publisher on PyPI +### 4b) Configure trusted publisher on PyPI In PyPI project settings, add a trusted publisher with: - Owner: `` @@ -86,16 +74,15 @@ With trusted publishing configured correctly: You still need: - GitHub account with repo admin rights -- PyPI/TestPyPI account with project owner/maintainer rights +- PyPI account with project owner/maintainer rights ## 6) Optional fallback model: API token publishing Only use this if trusted publishing cannot be enabled. -1. Create project-scoped API tokens in PyPI and TestPyPI. +1. Create a project-scoped API token in PyPI. 2. Add these repository secrets: - `PYPI_API_TOKEN` - - `TEST_PYPI_API_TOKEN` 3. Update publish workflow to pass `password: ${{ secrets. }}` to `pypa/gh-action-pypi-publish`. ## 7) First release flow @@ -111,24 +98,26 @@ uv run python -m twine check dist/* uv run mkdocs build --strict ``` -2. Smoke publish to TestPyPI: - - GitHub Actions -> `Publish` -> `Run workflow` -> `repository=testpypi` -3. Verify install from TestPyPI: +2. Create a GitHub Release (tag) to trigger PyPI publish. + +Optional manual path: +- GitHub Actions -> `Publish` -> `Run workflow` -> `expected_version=` + +3. Verify install from PyPI: ```bash uv venv /tmp/pgpkg-smoke -uv pip install --python /tmp/pgpkg-smoke/bin/python \ - -i https://test.pypi.org/simple/ \ - --extra-index-url https://pypi.org/simple \ - pgpkg +uv pip install --python /tmp/pgpkg-smoke/bin/python pgpkg /tmp/pgpkg-smoke/bin/pgpkg --help ``` The `Publish` workflow's build job also smoke-tests a generated wrapper wheel, -so a passing TestPyPI run confirms both the base CLI wheel and the wrapper -packaging path. +so a successful publish already validates both the base CLI wheel and the +wrapper packaging path before upload. -4. Create a GitHub Release (tag) to trigger production PyPI publish. +TestPyPI is intentionally not wired up in this repository right now. If access +is restored later, add a separate environment and publish job rather than +reusing the PyPI lane implicitly. ## 8) Post-setup metadata cleanup @@ -144,6 +133,6 @@ Issues = "https://github.com//pgpkg/issues" ## 9) Quick troubleshooting -- "invalid-publisher" or "publisher not trusted": trusted publisher fields do not exactly match repo/workflow/environment. For this repo, confirm `bitner/pgpkg`, workflow `publish-pypi.yml`, and environment `testpypi` or `pypi` exactly. +- "invalid-publisher" or "publisher not trusted": trusted publisher fields do not exactly match repo/workflow/environment. For this repo, confirm `bitner/pgpkg`, workflow `publish-pypi.yml`, and environment `pypi` exactly. - Publish job not starting: verify environment name in workflow matches configured environment. - Artifact missing in publish job: ensure build job used `uv build --out-dir dist` and uploaded `dist/*`. diff --git a/PROJECT_PLAN.md b/PROJECT_PLAN.md index bb67f9f..022570f 100644 --- a/PROJECT_PLAN.md +++ b/PROJECT_PLAN.md @@ -2,7 +2,7 @@ Last updated: 2026-05-05 Owner: core maintainers -Status: Local release gate, docs refresh, and generated-wrapper smoke pass on docs/v0.1.0-release-prep; remaining external blocker is TestPyPI trusted-publisher configuration +Status: Local release gate, docs refresh, and generated-wrapper smoke pass on docs/v0.1.0-release-prep; release path now targets PyPI only while TestPyPI access is unavailable ## 1) Mission @@ -85,8 +85,8 @@ Docs exist: Current branch state: - Branch: `docs/v0.1.0-release-prep` -- Divergence from `main`: ahead 2, behind 0 (`git rev-list --left-right --count main...HEAD` -> `0 2`) -- Working tree: clean (`git status --short --branch`) +- Divergence from `main`: ahead 5, behind 0 (`git rev-list --left-right --count main...HEAD` -> `0 5`) +- Working tree: local workflow/docs edits pending commit for the PyPI-only release path - Remote tracking: none configured locally (`git branch -vv`) - Branch-only files vs `main`: `.gitignore`, `CHANGELOG.md`, `README.md`, `docs/troubleshooting.md` @@ -101,7 +101,7 @@ Core implementation: Quality and validation: - [x] Unit and integration test suites implemented. -- [x] Local validation passes: `63 passed` on latest run. +- [x] Local validation passes: `85 passed` on latest run. - [x] Packaging checks pass: wheel/sdist build + `twine check`. - [x] Docs strict build passes (`mkdocs build --strict`). - [x] Branch-local packaging/docs checks rerun after post-`main` docs changes. @@ -152,16 +152,11 @@ Session-recall setup: - [x] Re-ran the local release gate after runtime-config/docs refresh on this branch. - Proof: `uv run pre-commit run --all-files`, `uv run ty check src tests`, `uv run pytest -q`, `uv build --out-dir dist`, `uv run python -m twine check dist/*`, and `uv run mkdocs build --strict` all passed on 2026-05-05. - [x] Create/refresh `CHANGELOG.md` with 0.1.0 release notes. -- [-] Execute TestPyPI publish dry run via workflow_dispatch. - - Progress: workflow dispatched and build stage passes. - - Blocker: trusted publishing exchange fails with `invalid-publisher` for environment `testpypi`. - - Required user action: configure TestPyPI trusted publisher to match: - - repository: `bitner/pgpkg` - - workflow: `.github/workflows/publish-pypi.yml` - - ref: `refs/heads/main` - - environment: `testpypi` -- [ ] Verify install from TestPyPI in clean venv and run `pgpkg --help`. -- [ ] If dry run passes, create GitHub Release tag matching `src/pgpkg/__init__.py::__version__`. +- [x] Remove TestPyPI release lane until access is available. + - Proof: `.github/workflows/publish-pypi.yml`, `CONTRIBUTING.md`, and `GITHUB_PYPI_SETUP.md` now describe a PyPI-only release path. +- [ ] Create GitHub Release tag matching `src/pgpkg/__init__.py::__version__`. +- [ ] Publish to PyPI from the trusted publishing workflow. +- [ ] Verify install from PyPI in a clean venv and run `pgpkg --help`. ### D. Session-recall optimization - [-] For the next 5-10 real work sessions, prepend baseline recall commands and ensure actual file edits happen in those sessions. @@ -178,7 +173,7 @@ Session-recall setup: - [x] Add PostgreSQL version matrix (14-17) for integration tests. - Proof: `ci.yml` integration job now uses `postgres:14/15/16/17-alpine` matrix with `PGPKG_TEST_POSTGRES_IMAGE`. - [x] Add release process doc section for version bump + release/tag policy. - - Proof: `CONTRIBUTING.md` release section updated with explicit version/tag parity and TestPyPI verification path. + - Proof: `CONTRIBUTING.md` release section updated with explicit version/tag parity and a PyPI-only publish path. - [x] Add troubleshooting section for top migration/connectivity failure modes. - Proof: new `docs/troubleshooting.md` added and included in `mkdocs.yml` nav. @@ -189,7 +184,7 @@ All must be true: - Progress: latest full gate passed locally on 2026-05-05, including `pre-commit`, `ty`, `pytest`, `uv build`, `twine check`, strict MkDocs, and generated-wrapper smoke; branch-specific remote CI has not run because this branch has no upstream. - [x] Second-model review findings are triaged and merged into this plan. - Proof: accepted and rejected findings are recorded in section 7A and summarized in the 2026-04-27 session log. -- [ ] TestPyPI publish + install smoke test verified. +- [ ] PyPI install smoke verified. - [ ] GitHub Release tag equals package version. - [ ] PyPI publish succeeds from trusted publishing workflow. - [-] Session-recall baseline commands are part of team runbook and this plan-update loop is being followed. @@ -205,6 +200,7 @@ All must be true: - [2026-04-27] Committed and pushed release hardening updates; remote CI+Docs runs are green on latest main; TestPyPI publish run failed at trusted publisher exchange (`invalid-publisher`) and requires account-level publisher config update. - [2026-04-27] Baseline recall rerun on `docs/v0.1.0-release-prep`: repo sessions increased to 22 but file recall remains empty; current branch is clean, 2 commits ahead of `main`, has no upstream, and branch-local `uv build`, `twine check`, and strict MkDocs validation all pass. - [2026-05-05] Baseline recall rerun for this session; docs were synced with runtime tracking/version-source behavior, the full local release gate passed (`85 passed`), and generated-wrapper smoke succeeded against the built `pgpkg` wheel. +- [2026-05-05] Removed the TestPyPI lane from workflow/docs because access is unavailable; release guidance now targets PyPI only. ## 10) Update Template (Copy For Each Future Session) diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index d50d4c9..595e0b8 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -97,14 +97,15 @@ Fix: ## Publish workflow fails with `invalid-publisher` Symptom: -- The `Publish` workflow reaches the PyPI/TestPyPI publish step, then fails + +- The `Publish` workflow reaches the PyPI publish step, then fails during trusted-publisher exchange. Checks: - Verify the trusted publisher is configured for repository `bitner/pgpkg`. - Verify the workflow filename matches `publish-pypi.yml`. -- Verify the environment name matches `testpypi` or `pypi` exactly. +- Verify the environment name matches `pypi` exactly. Fix: -- Update the trusted publisher entry in PyPI/TestPyPI so the repository, +- Update the trusted publisher entry in PyPI so the repository, workflow, and environment fields exactly match the GitHub workflow. diff --git a/tests/unit/test_tracking.py b/tests/unit/test_tracking.py index 4abe52c..def68cc 100644 --- a/tests/unit/test_tracking.py +++ b/tests/unit/test_tracking.py @@ -4,8 +4,9 @@ from types import SimpleNamespace from typing import cast -import pytest import psycopg +import pytest + from pgpkg.config import ProjectConfig from pgpkg.errors import ConfigError from pgpkg.tracking import DefaultVersionSource, resolve_version_source @@ -51,9 +52,7 @@ def fake_import_module(name: str): return fake_module monkeypatch.setattr("pgpkg.tracking.import_module", fake_import_module) - resolved = resolve_version_source( - _config(version_source="demo.module:CustomSource") - ) + resolved = resolve_version_source(_config(version_source="demo.module:CustomSource")) assert isinstance(resolved, _DummyVersionSource) @@ -83,10 +82,7 @@ def test_resolve_version_source_imports_relative_to_project_root(tmp_path: Path) resolved = resolve_version_source(config) - assert ( - resolved.read_live_version(cast(psycopg.Connection, object()), config) - == "1.2.3" - ) + assert resolved.read_live_version(cast(psycopg.Connection, object()), config) == "1.2.3" def test_resolve_version_source_rejects_missing_methods( diff --git a/tests/unit/test_wrapper.py b/tests/unit/test_wrapper.py index c9aa693..ff16313 100644 --- a/tests/unit/test_wrapper.py +++ b/tests/unit/test_wrapper.py @@ -28,4 +28,4 @@ def test_scaffold_wrapper_rejects_custom_version_source( sample_project, output_dir=tmp_path / "wrapper", cli_name="sampleext-migrator", - ) \ No newline at end of file + ) From 347439b9ca169a3002e676d0873f013554b486a8 Mon Sep 17 00:00:00 2001 From: David W Bitner Date: Tue, 5 May 2026 16:18:04 -0500 Subject: [PATCH 7/7] docs: finalize release links and checklist --- CONTRIBUTING.md | 4 ++-- GITHUB_PYPI_SETUP.md | 4 ++-- PROJECT_PLAN.md | 2 +- README.md | 2 +- docs/troubleshooting.md | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3377dc2..f517139 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,12 +39,12 @@ uv run python -m twine check dist/* uv run mkdocs build --strict ``` -3. Create release tag `vX.Y.Z` matching `src/pgpkg/__init__.py::__version__`. +3. Create release tag `v0.1.0` matching `src/pgpkg/__init__.py::__version__`. 4. Publish to PyPI by creating a GitHub Release from that tag. Optional manual path: -- Run `Publish` with `expected_version=` only if version parity is already confirmed. +- Run `Publish` with `expected_version=0.1.0` only if version parity is already confirmed. 5. Verify the PyPI install path in a clean venv: diff --git a/GITHUB_PYPI_SETUP.md b/GITHUB_PYPI_SETUP.md index 84b1f6c..cc74b40 100644 --- a/GITHUB_PYPI_SETUP.md +++ b/GITHUB_PYPI_SETUP.md @@ -98,10 +98,10 @@ uv run python -m twine check dist/* uv run mkdocs build --strict ``` -2. Create a GitHub Release (tag) to trigger PyPI publish. +2. Create GitHub Release tag `v0.1.0` to trigger PyPI publish. Optional manual path: -- GitHub Actions -> `Publish` -> `Run workflow` -> `expected_version=` +- GitHub Actions -> `Publish` -> `Run workflow` -> `expected_version=0.1.0` 3. Verify install from PyPI: diff --git a/PROJECT_PLAN.md b/PROJECT_PLAN.md index 022570f..8be2272 100644 --- a/PROJECT_PLAN.md +++ b/PROJECT_PLAN.md @@ -154,7 +154,7 @@ Session-recall setup: - [x] Create/refresh `CHANGELOG.md` with 0.1.0 release notes. - [x] Remove TestPyPI release lane until access is available. - Proof: `.github/workflows/publish-pypi.yml`, `CONTRIBUTING.md`, and `GITHUB_PYPI_SETUP.md` now describe a PyPI-only release path. -- [ ] Create GitHub Release tag matching `src/pgpkg/__init__.py::__version__`. +- [ ] Create GitHub Release tag `v0.1.0` matching `src/pgpkg/__init__.py::__version__`. - [ ] Publish to PyPI from the trusted publishing workflow. - [ ] Verify install from PyPI in a clean venv and run `pgpkg --help`. diff --git a/README.md b/README.md index 96cf7ff..d97c50e 100644 --- a/README.md +++ b/README.md @@ -83,7 +83,7 @@ If your application already owns the authoritative version table, set custom version sources should use a project-specific wrapper that calls `pgpkg.api.migrate_from_artifact(..., version_source=...)`. -See [docs/](docs/) for the full manual and design details. +See the full manual at https://bitner.github.io/pgpkg/ and the source docs in [docs/](docs/). ## Development diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 595e0b8..3527778 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -92,7 +92,7 @@ Cause: Fix: - Confirm `src/pgpkg/__init__.py::__version__`. -- Re-tag release as `v` to match built artifact. +- Re-tag release as `v0.1.0` to match built artifact. ## Publish workflow fails with `invalid-publisher`