diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..050fa4c --- /dev/null +++ b/.dockerignore @@ -0,0 +1,4 @@ +.git +.github +*.md +LICENSE diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 0000000..3c681be --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,77 @@ +# Copilot Instructions — devops-ia/powerpipe + +## Project Overview + +This repo maintains a community Docker image for [Powerpipe](https://powerpipe.io). Turbot stopped publishing official Docker images, so we build from their pre-compiled binaries. + +The image is a thin wrapper: it downloads the Powerpipe binary, sets up the runtime environment (UID 9193, dirs, env vars), and provides a default CMD. + +## Architecture + +``` +Dockerfile → builds the image (ARG POWERPIPE_VERSION controls version) +README.md → documents flags, env vars, quickstart, Kubernetes notes +cli-snapshot.json → machine-readable snapshot of CLI behavior (auto-generated) +scripts/ → extraction and comparison tools (do not modify) +package.json → semantic-release config (do not modify) +``` + +## When Upstream Releases a New Version + +1. Review the behavioral diff in the PR comment +2. Update `README.md`: + - Add new CLI flags to the flag tables + - Remove deprecated/removed flags + - Add new environment variables to the env var table + - Remove dropped environment variables + - Update the version in example commands if relevant +3. Update `Dockerfile`: + - Add/remove ENV vars if defaults changed + - Update HEALTHCHECK if the service behavior changed +4. Do NOT update `ARG POWERPIPE_VERSION` (updatecli handles this) +5. Do NOT modify `cli-snapshot.json` (CI regenerates it) + +## Files You SHOULD Modify + +- `README.md` — flag tables, env var tables, examples +- `Dockerfile` — ENV defaults, HEALTHCHECK, EXPOSE + +## Files You MUST NOT Modify + +- `.github/workflows/` — CI/CD pipelines +- `package.json` — semantic-release config +- `cli-snapshot.json` — auto-generated by CI +- `scripts/` — extraction tools +- `LICENSE` + +## How to Build and Test + +```bash +# Build +docker build -t powerpipe:test . + +# Smoke test +docker run --rm powerpipe:test powerpipe --version + +# Server test +docker run --rm -d --name pp-test -p 9033:9033 powerpipe:test powerpipe server +sleep 5 +curl -sf http://localhost:9033/ && echo "OK" +docker stop pp-test +``` + +## Documentation Format + +Flag tables use this format: +```markdown +| Flag | Description | Default | +|------|-------------|---------| +| `--listen` | Listen address (local/network) | `network` | +``` + +Env var tables use this format: +```markdown +| Variable | Image default | Description | +|----------|--------------|-------------| +| `POWERPIPE_LISTEN` | `network` | Interface to listen on | +``` diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..554bbfe --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: weekly + groups: + actions: + patterns: + - "*" + + - package-ecosystem: pip + directory: /tests + schedule: + interval: weekly diff --git a/.github/updatecli/powerpipe.yaml b/.github/updatecli/powerpipe.yaml new file mode 100644 index 0000000..7c93034 --- /dev/null +++ b/.github/updatecli/powerpipe.yaml @@ -0,0 +1,46 @@ +--- +name: Bump Powerpipe version + +scms: + github: + kind: github + spec: + user: '{{ requiredEnv "GITHUB_ACTOR" }}' + email: '{{ requiredEnv "GITHUB_ACTOR" }}@users.noreply.github.com' + owner: devops-ia + repository: powerpipe + token: '{{ requiredEnv "GITHUB_TOKEN" }}' + branch: main + commitmessage: + title: 'chore: bump powerpipe to {{ source "powerpipe" }}' + +sources: + powerpipe: + kind: githubRelease + spec: + owner: turbot + repository: powerpipe + token: '{{ requiredEnv "GITHUB_TOKEN" }}' + versionFilter: + kind: semver + pattern: ">=1.0.0" + +targets: + dockerfile: + name: "Update Powerpipe version in Dockerfile" + kind: file + scmid: github + sourceid: powerpipe + spec: + file: Dockerfile + matchpattern: 'ARG POWERPIPE_VERSION=.*' + replacepattern: 'ARG POWERPIPE_VERSION={{ source "powerpipe" }}' + +actions: + pr: + kind: github/pullrequest + scmid: github + title: 'chore: bump powerpipe to {{ source "powerpipe" }}' + spec: + labels: + - dependencies diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml new file mode 100644 index 0000000..98fcb43 --- /dev/null +++ b/.github/workflows/copilot-setup-steps.yml @@ -0,0 +1,30 @@ +--- +name: Copilot Setup Steps + +on: + workflow_dispatch: + push: + paths: + - .github/workflows/copilot-setup-steps.yml + pull_request: + paths: + - .github/workflows/copilot-setup-steps.yml + +jobs: + copilot-setup-steps: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Install tools + run: | + sudo apt-get update -qq && sudo apt-get install -y -qq jq > /dev/null + echo "jq $(jq --version), docker $(docker --version)" + + - name: Build Docker image + run: docker build -t powerpipe:local . + + - name: Validate + run: docker run --rm powerpipe:local powerpipe --version diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000..b65eee0 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,307 @@ +name: Build and Push Docker Image + +env: + DOCKERHUB_USER: devopsiaci + DOCKERHUB_REPO: powerpipe + GHCR_REGISTRY: ghcr.io + GHCR_REPO: ${{ github.repository }} + +on: + workflow_dispatch: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + test: + name: Test + runs-on: ubuntu-latest + permissions: + contents: read + security-events: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Lint Dockerfile + uses: hadolint/hadolint-action@54c9adbab1582c2ef04b2016b760714a4bfde3cf # v3.1.0 + with: + dockerfile: Dockerfile + config: .hadolint.yaml + + - name: Unit tests + run: | + pip install -r tests/requirements.txt + python3 -m pytest tests/ \ + --cov=compare_snapshots \ + --cov-report=term-missing \ + --cov-fail-under=90 + + - name: Build test image + uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0 + with: + context: . + load: true + tags: powerpipe:test + + - name: Smoke test + run: | + docker run --rm powerpipe:test powerpipe --version + + - name: Container structure tests + run: | + docker run --rm \ + -v "$PWD/structure-tests.yaml:/structure-tests.yaml:ro" \ + -v /var/run/docker.sock:/var/run/docker.sock \ + gcr.io/gcp-runtimes/container-structure-test:latest \ + test --image powerpipe:test --config /structure-tests.yaml + + - name: Security scan (Trivy) + uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1 # v0.35.0 + with: + image-ref: powerpipe:test + format: sarif + output: trivy-results.sarif + exit-code: "1" + severity: CRITICAL + ignore-unfixed: true + + - name: Upload Trivy SARIF results + if: always() + uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 + with: + sarif_file: trivy-results.sarif + + behavior-check: + name: Behavior Check + if: github.event_name == 'pull_request' + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Detect version change + id: version + run: | + git fetch origin ${{ github.base_ref }} --depth=1 + OLD_VERSION=$(git show origin/${{ github.base_ref }}:Dockerfile 2>/dev/null | grep -oP 'ARG POWERPIPE_VERSION=\K.*' || echo "") + NEW_VERSION=$(grep -oP 'ARG POWERPIPE_VERSION=\K.*' Dockerfile) + echo "old=$OLD_VERSION" >> "$GITHUB_OUTPUT" + echo "new=$NEW_VERSION" >> "$GITHUB_OUTPUT" + if [ "$OLD_VERSION" != "$NEW_VERSION" ]; then + echo "changed=true" >> "$GITHUB_OUTPUT" + echo "Version change detected: $OLD_VERSION → $NEW_VERSION" + else + echo "changed=false" >> "$GITHUB_OUTPUT" + echo "No version change" + fi + + - name: Build test image + if: steps.version.outputs.changed == 'true' + uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0 + with: + context: . + load: true + tags: powerpipe:test + + - name: Extract CLI snapshot + if: steps.version.outputs.changed == 'true' + run: | + docker run --rm --network none \ + -v "$PWD/scripts:/scripts:ro" \ + powerpipe:test bash /scripts/extract-cli-snapshot.sh > /tmp/cli-snapshot-new.json + + - name: Extract env vars from upstream source + if: steps.version.outputs.changed == 'true' + run: | + ENV_VARS=$(bash scripts/extract-env-vars.sh "${{ steps.version.outputs.new }}") + jq --argjson env_vars "$ENV_VARS" '. + {env_vars: $env_vars}' /tmp/cli-snapshot-new.json > /tmp/cli-snapshot-full.json + + - name: Compare snapshots + id: diff + if: steps.version.outputs.changed == 'true' + run: | + python3 scripts/compare_snapshots.py \ + cli-snapshot.json /tmp/cli-snapshot-full.json \ + --output-md /tmp/behavior-diff.md \ + --output-json /tmp/behavior-diff.json \ + && echo "has_changes=false" >> "$GITHUB_OUTPUT" \ + || echo "has_changes=true" >> "$GITHUB_OUTPUT" + + - name: Comment on PR + if: steps.version.outputs.changed == 'true' + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + const fs = require('fs'); + const md = fs.readFileSync('/tmp/behavior-diff.md', 'utf8'); + const marker = ''; + + // Find and update existing comment, or create new one + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + const existing = comments.find(c => c.body.includes(marker)); + + const body = `${marker}\n${md}`; + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body, + }); + } + + release: + name: Release + needs: [test] + if: github.event_name != 'pull_request' + runs-on: ubuntu-latest + permissions: + attestations: write + artifact-metadata: write + contents: write + id-token: write + packages: write + + steps: + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + fetch-depth: 0 + + - name: Get Powerpipe version + id: version + run: | + VERSION=$(grep -oP 'ARG POWERPIPE_VERSION=\K.*' Dockerfile) + TAG="v${VERSION}" + echo "version=${VERSION}" >> "${GITHUB_OUTPUT}" + echo "tag=${TAG}" >> "${GITHUB_OUTPUT}" + if git rev-parse "${TAG}" >/dev/null 2>&1; then + echo "is_new=false" >> "${GITHUB_OUTPUT}" + echo "Tag ${TAG} already exists — skipping release" + else + echo "is_new=true" >> "${GITHUB_OUTPUT}" + echo "New version detected: ${TAG}" + fi + + - name: Create release tag + if: steps.version.outputs.is_new == 'true' + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git tag -a "${{ steps.version.outputs.tag }}" \ + -m "Release ${{ steps.version.outputs.tag }}" + git push origin "${{ steps.version.outputs.tag }}" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Create GitHub Release + if: steps.version.outputs.is_new == 'true' + run: | + gh release create "${{ steps.version.outputs.tag }}" \ + --title "${{ steps.version.outputs.tag }}" \ + --generate-notes \ + --latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Set Docker metadata + id: meta + if: steps.version.outputs.is_new == 'true' + uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0 + with: + images: | + ${{ env.DOCKERHUB_USER }}/${{ env.DOCKERHUB_REPO }} + ${{ env.GHCR_REGISTRY }}/${{ env.GHCR_REPO }} + labels: | + org.opencontainers.image.maintainer='amartingarcia,ialejandro' + org.opencontainers.image.title='Powerpipe' + org.opencontainers.image.description='Powerpipe — Dashboards for DevOps' + org.opencontainers.image.vendor='devops-ia' + tags: | + type=raw,value=${{ steps.version.outputs.tag }} + + - name: Set up QEMU + if: steps.version.outputs.is_new == 'true' + uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0 + + - name: Set up Docker Buildx + if: steps.version.outputs.is_new == 'true' + uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0 + + - name: Cache Docker layers + if: steps.version.outputs.is_new == 'true' + uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + + - name: "[DOCKERHUB] Log in" + if: steps.version.outputs.is_new == 'true' + uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: "[GHCR] Log in" + if: steps.version.outputs.is_new == 'true' + continue-on-error: true + uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0 + with: + registry: ${{ env.GHCR_REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push Docker image + id: push + if: steps.version.outputs.is_new == 'true' + uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0 + with: + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max + context: . + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + push: true + sbom: true + tags: ${{ steps.meta.outputs.tags }} + + - name: "[DOCKERHUB] Update registry description" + if: steps.version.outputs.is_new == 'true' + uses: peter-evans/dockerhub-description@1b9a80c056b620d92cedb9d9b5a223409c68ddfa # v5.0.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + repository: ${{ env.DOCKERHUB_USER }}/${{ env.DOCKERHUB_REPO }} + + - name: "[GHCR] Generate artifact attestation" + if: steps.version.outputs.is_new == 'true' + continue-on-error: true + uses: actions/attest@59d89421af93a897026c735860bf21b6eb4f7b26 # v4.1.0 + with: + subject-name: ${{ env.GHCR_REGISTRY }}/${{ env.GHCR_REPO }} + subject-digest: ${{ steps.push.outputs.digest }} + push-to-registry: true + + - name: Move Docker cache + if: steps.version.outputs.is_new == 'true' + run: | + rm -rf /tmp/.buildx-cache + mv /tmp/.buildx-cache-new /tmp/.buildx-cache diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..74e8ffe --- /dev/null +++ b/.gitignore @@ -0,0 +1,16 @@ +# OS +.DS_Store +Thumbs.db + +# IDE +.vscode/ +.idea/ +.coverage + +__pycache__/ + +# OpenSpec — local workflow tooling, not part of the image +openspec/ +OPENSPEC-GUIDE.md +.github/prompts/ +.github/skills/ diff --git a/.hadolint.yaml b/.hadolint.yaml new file mode 100644 index 0000000..b6b5fc4 --- /dev/null +++ b/.hadolint.yaml @@ -0,0 +1,9 @@ +# Hadolint configuration +# https://github.com/hadolint/hadolint + +ignore: + # DL3008: Pin versions in apt-get install — acceptable for base system tools + # in CI-built images where reproducibility is handled by pinning the base image tag + - DL3008 + # DL3009: Delete apt-get lists — handled explicitly in our RUN layer + - DL3009 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..4496e21 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,97 @@ +# Contributing + +Thank you for your interest in contributing! This repo builds and publishes a community Docker image for [Powerpipe](https://powerpipe.io). + +## Prerequisites + +- Docker 20.10+ +- `bash`, `jq`, `python3`, `pip` +- A running Steampipe instance (see [devops-ia/steampipe](https://github.com/devops-ia/steampipe)) + +## Build the image locally + +```bash +# Build with the default Powerpipe version from the Dockerfile +docker build -t powerpipe:dev . + +# Build with a specific version +docker build --build-arg POWERPIPE_VERSION=1.5.1 -t powerpipe:dev . +``` + +## Run the test suite + +### Unit tests (no Docker needed) + +```bash +pip install -r tests/requirements.txt +python3 -m pytest tests/ --cov=compare_snapshots --cov-report=term-missing +``` + +### Lint the Dockerfile + +```bash +docker run --rm -i hadolint/hadolint < Dockerfile +``` + +### Container structure tests (requires built image) + +```bash +docker run --rm \ + -v "$PWD/structure-tests.yaml:/structure-tests.yaml:ro" \ + -v /var/run/docker.sock:/var/run/docker.sock \ + gcr.io/gcp-runtimes/container-structure-test:latest \ + test --image powerpipe:dev --config /structure-tests.yaml +``` + +### Security scan + +```bash +docker run --rm \ + -v /var/run/docker.sock:/var/run/docker.sock \ + aquasec/trivy image --severity CRITICAL --ignore-unfixed powerpipe:dev +``` + +## How releases work + +Releases are **fully automated** — do not bump versions manually. + +1. [updatecli](https://www.updatecli.io/) detects new Powerpipe releases and opens a PR updating `ARG POWERPIPE_VERSION` in the `Dockerfile`. +2. The PR CI runs all tests. +3. On merge to `main`, [semantic-release](https://semantic-release.gitbook.io/) reads conventional commits, bumps the version, and publishes to GHCR and Docker Hub automatically. + +## Commit message format + +This repo uses [Conventional Commits](https://www.conventionalcommits.org/): + +``` +feat: add support for multi-arch builds +fix: correct workspace directory permissions +chore: bump powerpipe to 1.6.0 +docs: add mod installation example +``` + +| Type | When to use | +|------|------------| +| `feat` | New feature or capability | +| `fix` | Bug fix | +| `chore` | Maintenance (version bumps, CI tweaks) | +| `docs` | Documentation only | +| `refactor` | Code restructure without behaviour change | + +## Reporting bugs + +Please [open an issue](https://github.com/devops-ia/powerpipe/issues/new) with: + +- Powerpipe image version +- Docker version (`docker --version`) +- Steps to reproduce +- Expected vs actual behaviour +- Relevant logs (`docker logs powerpipe`) + +## Pull requests + +1. Fork the repo and create a branch: `git checkout -b feat/my-improvement` +2. Make your changes +3. Run the tests (see above) +4. Commit using Conventional Commits format +5. Open a PR against `main` diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..22afa0a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,50 @@ +FROM debian:bookworm-slim + +ARG POWERPIPE_VERSION=1.5.1 +ARG TARGETARCH + +LABEL maintainer="amartingarcia, ialejandro" +LABEL org.opencontainers.image.title="Powerpipe" +LABEL org.opencontainers.image.description="Powerpipe — Dashboards for DevOps" +LABEL org.opencontainers.image.source="https://github.com/devops-ia/powerpipe" +LABEL org.opencontainers.image.vendor="devops-ia" +LABEL org.opencontainers.image.url="https://powerpipe.io" + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + +# git is required for 'powerpipe mod install' from GitHub repos +# hadolint ignore=DL3008,DL3005 +RUN apt-get update && \ + apt-get upgrade -y --no-install-recommends && \ + apt-get install -y --no-install-recommends ca-certificates curl git jq && \ + rm -rf /var/lib/apt/lists/* + +# Powerpipe uses DOTS as separator in release asset names +RUN curl -fsSL "https://github.com/turbot/powerpipe/releases/download/v${POWERPIPE_VERSION}/powerpipe.linux.${TARGETARCH}.tar.gz" \ + | tar -xz -C /usr/local/bin && \ + chmod +x /usr/local/bin/powerpipe + +# UID 9193, GID 0 (OpenShift compatible, consistent with Steampipe) +RUN useradd -u 9193 -g 0 -d /home/powerpipe -m -s /bin/bash powerpipe + +RUN mkdir -p /home/powerpipe/.powerpipe \ + /workspace && \ + chown -R 9193:0 /home/powerpipe /workspace && \ + chmod -R g=u /home/powerpipe /workspace + +ENV POWERPIPE_UPDATE_CHECK=false \ + POWERPIPE_TELEMETRY=none \ + POWERPIPE_INSTALL_DIR=/home/powerpipe/.powerpipe \ + POWERPIPE_LISTEN=network \ + POWERPIPE_LOG_LEVEL=warn \ + POWERPIPE_MOD_LOCATION=/workspace + +USER 9193 +WORKDIR /workspace + +EXPOSE 9033 + +HEALTHCHECK --interval=30s --timeout=5s --start-period=30s --retries=3 \ + CMD curl -sf http://localhost:9033/ || exit 1 + +CMD ["powerpipe", "server"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..b6eddf8 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2026 DevOps Solutions + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 17c1eb2..67e9597 100644 --- a/README.md +++ b/README.md @@ -1 +1,116 @@ -# powerpipe \ No newline at end of file +# powerpipe + +[![CI](https://github.com/devops-ia/powerpipe/actions/workflows/docker-build.yml/badge.svg)](https://github.com/devops-ia/powerpipe/actions/workflows/docker-build.yml) +[![GitHub release](https://img.shields.io/github/v/release/devops-ia/powerpipe)](https://github.com/devops-ia/powerpipe/releases) +[![Docker Hub](https://img.shields.io/docker/v/devopsiaci/powerpipe?label=Docker%20Hub&logo=docker)](https://hub.docker.com/r/devopsiaci/powerpipe) +[![Docker Pulls](https://img.shields.io/docker/pulls/devopsiaci/powerpipe?logo=docker)](https://hub.docker.com/r/devopsiaci/powerpipe) +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) + +Community Docker image for [Powerpipe](https://powerpipe.io) — dashboards for DevOps. Visualize cloud configurations and assess security posture against compliance benchmarks. + +> **Why this image?** Turbot stopped publishing official Docker images after Powerpipe v0.1.1. This project provides multi-arch container images built from official pre-compiled binaries. + +## Quick start + +```bash +# Run Powerpipe server (HTTP dashboard on port 9033) +docker run -d --name powerpipe \ + -p 9033:9033 \ + -e POWERPIPE_DATABASE="postgres://steampipe:password@steampipe-host:9193/steampipe" \ + ghcr.io/devops-ia/powerpipe:1.5.1 + +# Install a mod +docker exec powerpipe powerpipe mod install github.com/turbot/steampipe-mod-aws-compliance + +# Access dashboards +open http://localhost:9033 +``` + +## Image details + +| Property | Value | +|----------|-------| +| Base image | `debian:bookworm-slim` | +| Architectures | `linux/amd64`, `linux/arm64` | +| User | `powerpipe` (UID 9193, GID 0) | +| Port | 9033 (HTTP) | +| Default CMD | `powerpipe server` | + +## Registries + +```bash +# GitHub Container Registry +docker pull ghcr.io/devops-ia/powerpipe:1.5.1 + +# Docker Hub +docker pull devopsiaci/powerpipe:1.5.1 +``` + +## `server` flags + +| Flag | Description | Default | +|------|-------------|---------| +| `--listen` | Accept connections from `local` or `network` | `local` | +| `--port` | HTTP server port | `9033` | +| `--dashboard-timeout` | Dashboard execution timeout (seconds) | `0` (no timeout) | +| `--watch` | Watch mod files for changes | `true` | +| `--var` | Set a variable value (`key=value`) | — | +| `--var-file` | Path to `.ppvar` file | — | + +## Environment variables + +Container-optimized defaults are pre-configured. Override as needed: + +| Variable | Image default | Description | +|----------|--------------|-------------| +| `POWERPIPE_UPDATE_CHECK` | `false` | Disable update checking | +| `POWERPIPE_TELEMETRY` | `none` | Disable telemetry | +| `POWERPIPE_LISTEN` | `network` | Listen on all interfaces | +| `POWERPIPE_PORT` | `9033` | HTTP server port | +| `POWERPIPE_LOG_LEVEL` | `warn` | Logging level | +| `POWERPIPE_MOD_LOCATION` | `/workspace` | Mod working directory | +| `POWERPIPE_DATABASE` | — | Database connection string (deprecated — use config) | +| `POWERPIPE_DASHBOARD_TIMEOUT` | `0` | Dashboard timeout (seconds) | +| `POWERPIPE_BENCHMARK_TIMEOUT` | `0` | Benchmark timeout (seconds) | +| `POWERPIPE_MEMORY_MAX_MB` | `1024` | Process memory soft limit (MB) | +| `POWERPIPE_MAX_PARALLEL` | `10` | Maximum parallel executions | +| `POWERPIPE_INSTALL_DIR` | `/home/powerpipe/.powerpipe` | Installation directory | + +Full reference: [Powerpipe Environment Variables](https://powerpipe.io/docs/reference/env-vars) + +## Kubernetes / Helm + +This image is designed to work with the [helm-steampipe](https://github.com/devops-ia/helm-steampipe) Helm chart (Powerpipe component): + +- **UID 9193 / GID 0** — compatible with OpenShift restricted SCC +- **`/workspace`** — mod working directory, mountable as volume +- **`git` included** — required for `powerpipe mod install` from GitHub +- **Shell available** (`/bin/bash`, `/bin/sh`) for init container scripts + +## Documentation + +| Topic | Description | +|-------|-------------| +| [Getting Started](docs/getting-started.md) | Docker, Docker Compose, mod install, first dashboard | +| [Configuration](docs/configuration.md) | Environment variables, database connection, mod workspace | +| [Kubernetes](docs/kubernetes.md) | Helm chart, workspace PVC, Secrets, health checks | +| [Examples](docs/examples.md) | AWS benchmarks, multi-account, CI/CD integration | +| [Troubleshooting](docs/troubleshooting.md) | Connection errors, mod not found, OOM, debug mode | + +## Versioning + +Image versions track upstream Powerpipe releases 1:1: + +| Image tag | Powerpipe version | +|-----------|-------------------| +| `1.5.1` | [v1.5.1](https://github.com/turbot/powerpipe/releases/tag/v1.5.1) | + +New versions are detected automatically via [updatecli](https://www.updatecli.io/) and published after merge. + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md). + +## License + +MIT — see [LICENSE](LICENSE). \ No newline at end of file diff --git a/cli-snapshot.json b/cli-snapshot.json new file mode 100644 index 0000000..0794650 --- /dev/null +++ b/cli-snapshot.json @@ -0,0 +1,432 @@ +{ + "version": "1.5.1", + "snapshot_date": "2026-04-12", + "help_text_hash": "2e295bc0a121c9d2e776369776bf5d8155b14e973de0ac2babf21c776190947b", + "subcommands": [ + "benchmark", + "control", + "dashboard", + "detection", + "login", + "mod", + "query", + "server", + "variable" + ], + "benchmark_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--workspace" + ], + "benchmark_help_hash": "7d1bffd17e00444989e05678ccb4d4b485cb09c52461f73ff965d14e960a7e40", + "benchmark_list_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "benchmark_list_help_hash": "edc5f872263bae1b7b87b2569b11e79e3f09aa1477e6810baea1caf4f6000490", + "benchmark_run_flags": [ + "--benchmark-timeout", + "--config-path", + "--database", + "--dry-run", + "--export", + "--header", + "--help", + "--input", + "--install-dir", + "--max-parallel", + "--mod-install", + "--mod-location", + "--output", + "--pipes-host", + "--pipes-token", + "--progress", + "--pull", + "--query-timeout", + "--search-path", + "--search-path-prefix", + "--separator", + "--share", + "--snapshot", + "--snapshot-location", + "--snapshot-tag", + "--snapshot-title", + "--tag", + "--timing", + "--var", + "--var-file", + "--where", + "--workspace" + ], + "benchmark_run_help_hash": "7823e1436895172ab13110b79d3c7c415ca844a36d595a46b37cbb25da211b88", + "benchmark_show_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "benchmark_show_help_hash": "e65c3d042ad00df03b1f4484a495caa737b5d26882151e9ac4f3f5cf40bc434a", + "control_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--workspace" + ], + "control_help_hash": "cf5a0b44764f9261326f61414df0124bbed8f651a467ed8ef912df05fc257afb", + "control_list_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "control_list_help_hash": "d4a129ba470749a2628818ab53f09940deeb2a1236fd9dea7e81b50f0f4ee3b1", + "control_run_flags": [ + "--arg", + "--benchmark-timeout", + "--config-path", + "--database", + "--export", + "--header", + "--help", + "--input", + "--install-dir", + "--mod-install", + "--mod-location", + "--output", + "--pipes-host", + "--pipes-token", + "--progress", + "--pull", + "--query-timeout", + "--search-path", + "--search-path-prefix", + "--separator", + "--share", + "--snapshot", + "--snapshot-location", + "--snapshot-tag", + "--snapshot-title", + "--timing", + "--var", + "--var-file", + "--workspace" + ], + "control_run_help_hash": "133cf7c1984a761b3c2e140466634357133dfb9c2939367d1df0e946f8dd3ad5", + "control_show_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "control_show_help_hash": "a22fc5f238a793087981d8f8d8dd4bdacc74bca135a6e7e47e29440ef358f8de", + "dashboard_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--workspace" + ], + "dashboard_help_hash": "520056444ae7693bf3bdaade32b4c304eee7d1ef5b711662f168db200d223e9d", + "dashboard_list_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "dashboard_list_help_hash": "e87e6d72ff0043314126f80465e16bb41061245b67b5cc52940b0ba9b5dda344", + "dashboard_run_flags": [ + "--arg", + "--config-path", + "--dashboard-timeout", + "--database", + "--export", + "--help", + "--input", + "--install-dir", + "--max-parallel", + "--mod-install", + "--mod-location", + "--output", + "--pipes-host", + "--pipes-token", + "--progress", + "--pull", + "--query-timeout", + "--search-path", + "--search-path-prefix", + "--share", + "--snapshot", + "--snapshot-location", + "--snapshot-tag", + "--snapshot-title", + "--var", + "--var-file", + "--workspace" + ], + "dashboard_run_help_hash": "ef2538fe0c145f9e616f5fbd6ceac8f785f8c0e4d3046220e03405ee5f79216f", + "dashboard_show_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "dashboard_show_help_hash": "504aa398325af276e5cb8433210b43b909cf4ba55f5439ba97476b8029aa992b", + "detection_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--workspace" + ], + "detection_help_hash": "96b6609afb40ab333920ade93d2b5197abeb84cf958d759dd92e962173fce3cb", + "detection_list_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "detection_list_help_hash": "87d5e18958b1d59c90826cef1c7842d89cc3fd1ec0a104cff6966008051dcc77", + "detection_run_flags": [ + "--arg", + "--config-path", + "--database", + "--detection-timeout", + "--export", + "--header", + "--help", + "--input", + "--install-dir", + "--max-parallel", + "--mod-install", + "--mod-location", + "--output", + "--pipes-host", + "--pipes-token", + "--progress", + "--pull", + "--query-timeout", + "--separator", + "--share", + "--snapshot", + "--snapshot-location", + "--snapshot-tag", + "--snapshot-title", + "--var", + "--var-file", + "--workspace" + ], + "detection_run_help_hash": "2863a121c24c661e4057216a8c456ef634f6470b0c4cc23558c14ad40a12d43a", + "detection_show_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "detection_show_help_hash": "e0bcee903727d493247a5c3bd4b8adbd733274da2b2cb7c9bc6b188121740507", + "login_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--pipes-host", + "--pipes-token", + "--workspace" + ], + "login_help_hash": "d69728be5bb065632eb6d7a0a1dfbc5f39c5cad21bac7eca1a0c741ff4a71eb7", + "mod_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--workspace" + ], + "mod_help_hash": "e712f02f345d4060e8fcaf0b1efbd49fa495c0fb33b1bd56eab6a1bd5e9ba2cb", + "mod_init_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--workspace" + ], + "mod_init_help_hash": "7a0f09cdc6987c73152bf98e4995e9007db82dfdb2964cc6c9b1a72d23e6052d", + "mod_install_flags": [ + "--config-path", + "--database", + "--dry-run", + "--force", + "--help", + "--install-dir", + "--mod-location", + "--prune", + "--pull", + "--workspace" + ], + "mod_install_help_hash": "41a75b930c069a6f769badc336b42a31d066bf0834b841111523aa5e38184c11", + "mod_list_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "mod_list_help_hash": "10569928b5b3c5bd5a508587332467319011aee5d7ef64ea565f30ce20c448be", + "mod_show_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "mod_show_help_hash": "c60274576c3faa3deabb2a05cc19f2175515858af92b3eb3d212fc5a081d81ba", + "mod_uninstall_flags": [ + "--config-path", + "--dry-run", + "--help", + "--install-dir", + "--mod-location", + "--prune", + "--workspace" + ], + "mod_uninstall_help_hash": "40f3d562fc49fbf5f04677944128b243fb783d15a2a7ad4feff90bc900231846", + "mod_update_flags": [ + "--config-path", + "--dry-run", + "--force", + "--help", + "--install-dir", + "--mod-location", + "--prune", + "--pull", + "--workspace" + ], + "mod_update_help_hash": "436669e53dac4315838c30de11983b7e9d4cae3b6fd9f455c2558554807a9098", + "query_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--workspace" + ], + "query_help_hash": "e653c652937b7007ce96386d7941990c570542ff17e08c7156b7aa2e81468ad9", + "query_list_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "query_list_help_hash": "84526f030bff6459ffe96d905f23d9383617adc223688588fdb832bf37819987", + "query_run_flags": [ + "--arg", + "--config-path", + "--database", + "--export", + "--header", + "--help", + "--input", + "--install-dir", + "--mod-location", + "--output", + "--pipes-host", + "--pipes-token", + "--progress", + "--query-timeout", + "--search-path", + "--search-path-prefix", + "--separator", + "--share", + "--snapshot", + "--snapshot-location", + "--snapshot-tag", + "--snapshot-title", + "--timing", + "--var", + "--var-file", + "--workspace" + ], + "query_run_help_hash": "bbd9c24c0c424e9e761799ad56b0d4b068cd8f7526466826ae26a6cd36ad2ef9", + "query_show_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "query_show_help_hash": "2b1ee40626caab45a7934f17f767b9cbab0c76f66df1be984ceed49e7f5f5768", + "server_flags": [ + "--config-path", + "--dashboard-timeout", + "--database", + "--help", + "--install-dir", + "--listen", + "--mod-location", + "--pipes-host", + "--pipes-token", + "--port", + "--var", + "--var-file", + "--watch", + "--workspace" + ], + "server_help_hash": "ec0c5317809e8e82400018e5c617ec8514aa69a0b733ca464e7ab8c638cabc9a", + "variable_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--workspace" + ], + "variable_help_hash": "49817cbe3301801eaa3a961406a7f6c54f2cab94f711f85a554c535a8c35f40f", + "variable_list_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "variable_list_help_hash": "efa0feec252e4439d11f968817a797a27d1b0c1266d7e5c93c055e106b1f6f68", + "variable_show_flags": [ + "--config-path", + "--help", + "--install-dir", + "--mod-location", + "--output", + "--workspace" + ], + "variable_show_help_hash": "9322b4fe4b4fcf46489da890c29ec8132ae3df9aebf24d4de2ca9c0b505150f9", + "env_vars": [ + "POWERPIPE_BENCHMARK_TIMEOUT", + "POWERPIPE_CONFIG_DUMP", + "POWERPIPE_DASHBOARD_TIMEOUT", + "POWERPIPE_DISPLAY_WIDTH", + "POWERPIPE_LISTEN", + "POWERPIPE_PORT" + ] +} diff --git a/docs/configuration.md b/docs/configuration.md new file mode 100644 index 0000000..9af853a --- /dev/null +++ b/docs/configuration.md @@ -0,0 +1,120 @@ +# Configuration + +## Database connection + +Powerpipe requires a PostgreSQL-compatible database (Steampipe). Set this with `POWERPIPE_DATABASE`: + +```bash +docker run -d \ + -e POWERPIPE_DATABASE="postgresql://steampipe:mypassword@steampipe:9193/steampipe" \ + ghcr.io/devops-ia/powerpipe:1.5.1 +``` + +The connection string format: `postgresql://:@:/` + +## Environment variables + +All variables can be set via `-e` flags or a `.env` file in Docker Compose. + +### Server settings + +| Variable | Default (image) | Description | +|----------|----------------|-------------| +| `POWERPIPE_DATABASE` | — | **Required.** Steampipe connection string | +| `POWERPIPE_LISTEN` | `network` | `local` or `network` (bind to all interfaces) | +| `POWERPIPE_PORT` | `9033` | HTTP port for the dashboard server | +| `POWERPIPE_BASE_URL` | — | Public URL used in share links (e.g. `https://my.domain`) | + +### Execution settings + +| Variable | Default (image) | Description | +|----------|----------------|-------------| +| `POWERPIPE_MAX_PARALLEL` | `10` | Max concurrent queries | +| `POWERPIPE_MEMORY_MAX_MB` | `1024` | Memory limit in MB | +| `POWERPIPE_BENCHMARK_TIMEOUT` | `0` | Benchmark timeout (0 = unlimited) | +| `POWERPIPE_DASHBOARD_TIMEOUT` | `0` | Dashboard timeout (0 = unlimited) | + +### Mod and workspace + +| Variable | Default (image) | Description | +|----------|----------------|-------------| +| `POWERPIPE_MOD_LOCATION` | `/workspace` | Directory containing `mod.pp` files | +| `POWERPIPE_WORKSPACE_PROFILES_LOCATION` | — | Custom workspace profiles directory | + +### Operational + +| Variable | Default (image) | Description | +|----------|----------------|-------------| +| `POWERPIPE_UPDATE_CHECK` | `false` | Disable update check | +| `POWERPIPE_TELEMETRY` | `none` | Disable telemetry | +| `POWERPIPE_LOG_LEVEL` | `warn` | Log verbosity: `error`, `warn`, `info`, `debug`, `trace` | +| `POWERPIPE_INSTALL_DIR` | `/home/powerpipe/.powerpipe` | Powerpipe install directory | + +## Mounting a mod workspace + +Mods are installed into the workspace volume. Mount your mod directory: + +```bash +docker run -d \ + -v "$PWD/workspace:/workspace" \ + -e POWERPIPE_MOD_LOCATION=/workspace \ + -e POWERPIPE_DATABASE="postgresql://steampipe:pass@steampipe:9193/steampipe" \ + ghcr.io/devops-ia/powerpipe:1.5.1 +``` + +Inside the container, install mods: + +```bash +docker exec powerpipe powerpipe mod install github.com/turbot/steampipe-mod-aws-compliance +``` + +## Variable files (`.ppvars`) + +For mods that accept input variables, create a `.ppvars` file: + +```hcl +# workspace/steampipe.ppvars +benchmark_tags = { + environment = "production" + team = "platform" +} +``` + +Mount it alongside your workspace: + +```bash +docker run -d \ + -v "$PWD/workspace:/workspace" \ + -v "$PWD/steampipe.ppvars:/workspace/steampipe.ppvars:ro" \ + ghcr.io/devops-ia/powerpipe:1.5.1 +``` + +## Using secrets for the database password + +In production, avoid passing plaintext passwords via environment variables. Use Docker secrets or Kubernetes Secrets: + +```yaml +# docker-compose.yml (Docker Swarm) +services: + powerpipe: + image: ghcr.io/devops-ia/powerpipe:1.5.1 + environment: + POWERPIPE_DATABASE: "postgresql://steampipe:{{ secret('db_password') }}@steampipe:9193/steampipe" + secrets: + - db_password +secrets: + db_password: + external: true +``` + +## Debug mode + +Enable verbose logging: + +```bash +docker run --rm \ + -e POWERPIPE_LOG_LEVEL=debug \ + -e POWERPIPE_DATABASE="..." \ + ghcr.io/devops-ia/powerpipe:1.5.1 \ + powerpipe server +``` diff --git a/docs/examples.md b/docs/examples.md new file mode 100644 index 0000000..0e85749 --- /dev/null +++ b/docs/examples.md @@ -0,0 +1,176 @@ +# Examples + +## Running a benchmark + +With a mod installed, run a compliance benchmark: + +```bash +# Install the AWS Compliance mod +docker exec powerpipe powerpipe mod install github.com/turbot/steampipe-mod-aws-compliance + +# List available benchmarks +docker exec powerpipe powerpipe benchmark list + +# Run the CIS AWS Foundations benchmark +docker exec powerpipe powerpipe benchmark run aws_compliance.benchmark.cis_aws_foundations_benchmark_v300 + +# Export results as JSON +docker exec powerpipe \ + powerpipe benchmark run aws_compliance.benchmark.cis_aws_foundations_benchmark_v300 \ + --export /workspace/results.json +``` + +## Docker Compose — Full stack + +The complete stack for Powerpipe + Steampipe + AWS: + +```yaml +# docker-compose.yml +services: + steampipe: + image: ghcr.io/devops-ia/steampipe:2.4.1 + command: ["steampipe", "service", "start", "--foreground", "--database-listen", "network"] + environment: + STEAMPIPE_DATABASE_PASSWORD: steampipe + volumes: + - steampipe-data:/home/steampipe/.steampipe + - ./aws.spc:/home/steampipe/.steampipe/config/aws.spc:ro + healthcheck: + test: ["CMD", "pg_isready", "-h", "localhost", "-p", "9193"] + interval: 10s + timeout: 5s + retries: 5 + + powerpipe: + image: ghcr.io/devops-ia/powerpipe:1.5.1 + ports: + - "9033:9033" + environment: + POWERPIPE_DATABASE: "postgresql://steampipe:steampipe@steampipe:9193/steampipe" + volumes: + - workspace:/workspace + depends_on: + steampipe: + condition: service_healthy + +volumes: + steampipe-data: + workspace: +``` + +```bash +# Install AWS plugin in Steampipe +docker compose exec steampipe steampipe plugin install aws + +# Install AWS compliance mod in Powerpipe +docker compose exec powerpipe powerpipe mod install github.com/turbot/steampipe-mod-aws-compliance + +# Open dashboards +open http://localhost:9033 +``` + +## AWS credentials + +Mount AWS credentials read-only: + +```yaml +# In your docker-compose.yml steampipe service: +volumes: + - "$HOME/.aws:/home/steampipe/.aws:ro" +``` + +Or set environment variables: + +```yaml +environment: + AWS_ACCESS_KEY_ID: "${AWS_ACCESS_KEY_ID}" + AWS_SECRET_ACCESS_KEY: "${AWS_SECRET_ACCESS_KEY}" + AWS_DEFAULT_REGION: us-east-1 +``` + +## Running benchmarks via CLI (non-interactive) + +Run benchmarks from the command line without starting the server: + +```bash +# One-shot benchmark (no server needed) +docker run --rm \ + -v "$HOME/.aws:/home/powerpipe/.aws:ro" \ + -v "$PWD/workspace:/workspace" \ + -e POWERPIPE_DATABASE="postgresql://steampipe:pass@host.docker.internal:9193/steampipe" \ + ghcr.io/devops-ia/powerpipe:1.5.1 \ + powerpipe benchmark run aws_compliance.benchmark.cis_aws_foundations_benchmark_v300 \ + --output brief +``` + +## Multiple AWS accounts + +Configure Steampipe with an aggregator connection, then query across all accounts from Powerpipe: + +**`aws.spc`:** +```hcl +connection "aws_prod" { + plugin = "aws" + profile = "production" + regions = ["us-east-1", "eu-west-1"] +} + +connection "aws_dev" { + plugin = "aws" + profile = "development" + regions = ["us-east-1"] +} + +connection "aws_all" { + plugin = "aws" + type = "aggregator" + connections = ["aws_prod", "aws_dev"] +} +``` + +Run compliance benchmarks against all accounts: + +```bash +docker exec powerpipe \ + powerpipe benchmark run aws_compliance.benchmark.cis_aws_foundations_benchmark_v300 \ + --search-path-prefix aws_all +``` + +## Kubernetes compliance + +Install and run the Kubernetes compliance mod: + +```bash +# Requires the kubernetes plugin in Steampipe +docker exec steampipe steampipe plugin install kubernetes + +# Install the mod +docker exec powerpipe powerpipe mod install github.com/turbot/steampipe-mod-kubernetes-compliance + +# Run NSA/CISA Kubernetes hardening benchmark +docker exec powerpipe \ + powerpipe benchmark run kubernetes_compliance.benchmark.nsa_cisa_v10 +``` + +## CI/CD integration + +Export benchmark results for CI pipelines: + +```bash +#!/bin/bash +# Run benchmark and fail if any controls are in alarm state +docker run --rm \ + -v "$HOME/.aws:/home/powerpipe/.aws:ro" \ + -v "$PWD/workspace:/workspace" \ + -e POWERPIPE_DATABASE="${STEAMPIPE_CONNECTION_STRING}" \ + ghcr.io/devops-ia/powerpipe:1.5.1 \ + powerpipe benchmark run aws_compliance.benchmark.cis_aws_foundations_benchmark_v300 \ + --export /workspace/results.json \ + --output brief + +# Check exit code — non-zero means controls failed +if [ $? -ne 0 ]; then + echo "Compliance benchmark failed — review results.json" + exit 1 +fi +``` diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 0000000..7771096 --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1,109 @@ +# Getting Started + +## Prerequisites + +You'll need Docker installed. Powerpipe connects to [Steampipe](https://steampipe.io) as its data source — the simplest way to run both is Docker Compose. + +## Quick start with Docker Compose + +The fastest way to get dashboards running: + +```bash +# Clone the examples +curl -O https://raw.githubusercontent.com/devops-ia/powerpipe/main/examples/docker-compose.yml + +# Start Steampipe + Powerpipe +docker compose up -d + +# Open dashboards +open http://localhost:9033 +``` + +## Manual Docker run + +### 1. Start Steampipe first + +Powerpipe needs a running Steampipe PostgreSQL endpoint: + +```bash +docker run -d --name steampipe \ + -p 9193:9193 \ + -e STEAMPIPE_DATABASE_PASSWORD=mypassword \ + ghcr.io/devops-ia/steampipe:2.4.1 \ + steampipe service start --foreground --database-listen network +``` + +### 2. Install a plugin + +```bash +docker exec steampipe steampipe plugin install aws +``` + +### 3. Start Powerpipe + +```bash +docker run -d --name powerpipe \ + -p 9033:9033 \ + -e POWERPIPE_DATABASE="postgresql://steampipe:mypassword@steampipe:9193/steampipe" \ + --link steampipe \ + ghcr.io/devops-ia/powerpipe:1.5.1 \ + powerpipe server --listen network +``` + +### 4. Install a mod + +```bash +# Install the AWS Compliance mod +docker exec powerpipe powerpipe mod install github.com/turbot/steampipe-mod-aws-compliance +``` + +### 5. Open the dashboard + +Navigate to [http://localhost:9033](http://localhost:9033) in your browser. + +## Docker Compose (recommended) + +Using the example compose file from this repo: + +```yaml +services: + steampipe: + image: ghcr.io/devops-ia/steampipe:2.4.1 + command: ["steampipe", "service", "start", "--foreground", "--database-listen", "network"] + environment: + STEAMPIPE_DATABASE_PASSWORD: steampipe + volumes: + - steampipe-data:/home/steampipe/.steampipe + - ./aws.spc:/home/steampipe/.steampipe/config/aws.spc:ro + healthcheck: + test: ["CMD", "pg_isready", "-h", "localhost", "-p", "9193"] + interval: 10s + timeout: 5s + retries: 5 + + powerpipe: + image: ghcr.io/devops-ia/powerpipe:1.5.1 + command: ["powerpipe", "server", "--listen", "network"] + ports: + - "9033:9033" + environment: + POWERPIPE_DATABASE: "postgresql://steampipe:steampipe@steampipe:9193/steampipe" + volumes: + - ./workspace:/workspace + depends_on: + steampipe: + condition: service_healthy + +volumes: + steampipe-data: +``` + +```bash +docker compose up -d +``` + +## Next steps + +- [Configuration](configuration.md) — database connection, env vars, mod workspace +- [Examples](examples.md) — running benchmarks, AWS compliance, mod patterns +- [Kubernetes](kubernetes.md) — deploy with Helm diff --git a/docs/kubernetes.md b/docs/kubernetes.md new file mode 100644 index 0000000..2c6ac57 --- /dev/null +++ b/docs/kubernetes.md @@ -0,0 +1,173 @@ +# Kubernetes + +The recommended way to run Powerpipe on Kubernetes is via the [helm-steampipe](https://github.com/devops-ia/helm-steampipe) chart, which manages both Steampipe and Powerpipe as a single release. + +## Quick install with Helm + +```bash +helm repo add devops-ia https://devops-ia.github.io/helm-charts +helm repo update + +helm install steampipe devops-ia/steampipe \ + --set powerpipe.enabled=true \ + --set bbdd.enabled=true \ + --set bbdd.listen=network +``` + +Powerpipe will be available on port 9033 of the Powerpipe Service. + +## Custom values + +Create a `values.yaml` to configure both components: + +```yaml +# Steampipe (required by Powerpipe) +bbdd: + enabled: true + listen: network + +# Powerpipe +powerpipe: + enabled: true + image: + repository: ghcr.io/devops-ia/powerpipe + tag: "1.5.1" + env: + - name: POWERPIPE_MAX_PARALLEL + value: "10" + - name: POWERPIPE_LOG_LEVEL + value: "warn" + resources: + requests: + memory: "512Mi" + cpu: "250m" + limits: + memory: "2Gi" + cpu: "1" + ingress: + enabled: true + className: nginx + hosts: + - host: powerpipe.example.com + paths: + - path: / + pathType: Prefix +``` + +```bash +helm install steampipe devops-ia/steampipe -f values.yaml +``` + +## Mounting a mod workspace + +Use a PersistentVolumeClaim to persist installed mods: + +```yaml +powerpipe: + enabled: true + extraVolumes: + - name: workspace + persistentVolumeClaim: + claimName: powerpipe-workspace + extraVolumeMounts: + - name: workspace + mountPath: /workspace + env: + - name: POWERPIPE_MOD_LOCATION + value: /workspace +``` + +Install a mod in the PVC on first run using an init container: + +```yaml +powerpipe: + initContainers: + - name: install-mods + image: ghcr.io/devops-ia/powerpipe:1.5.1 + command: + - sh + - -c + - | + powerpipe mod install github.com/turbot/steampipe-mod-aws-compliance + env: + - name: POWERPIPE_MOD_LOCATION + value: /workspace + volumeMounts: + - name: workspace + mountPath: /workspace +``` + +## Database connection with Kubernetes Secrets + +Store the Steampipe database password in a Secret: + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: steampipe-db-password +type: Opaque +stringData: + password: "mysecretpassword" +``` + +Reference it in the Powerpipe deployment: + +```yaml +powerpipe: + enabled: true + extraEnvVarsSecret: steampipe-db-password + env: + - name: STEAMPIPE_DATABASE_PASSWORD + valueFrom: + secretKeyRef: + name: steampipe-db-password + key: password +``` + +## OpenShift + +The image is OpenShift compatible (UID 9193, GID 0): + +```yaml +# No securityContext overrides needed — runs with the default SCC +powerpipe: + enabled: true + securityContext: + runAsUser: 9193 + runAsGroup: 0 + fsGroup: 0 +``` + +## Health checks + +Powerpipe exposes an HTTP endpoint. The chart configures these automatically: + +```yaml +livenessProbe: + httpGet: + path: / + port: 9033 + initialDelaySeconds: 30 + periodSeconds: 30 + +readinessProbe: + httpGet: + path: / + port: 9033 + initialDelaySeconds: 10 + periodSeconds: 10 +``` + +## Verifying the deployment + +```bash +# Check pods are running +kubectl get pods -l app.kubernetes.io/name=steampipe + +# Port-forward to access Powerpipe locally +kubectl port-forward svc/steampipe-powerpipe 9033:9033 + +# Open browser +open http://localhost:9033 +``` diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md new file mode 100644 index 0000000..c454e55 --- /dev/null +++ b/docs/troubleshooting.md @@ -0,0 +1,165 @@ +# Troubleshooting + +## Powerpipe exits immediately + +**Symptom:** Container exits right after starting with no error message. + +**Cause:** Missing `POWERPIPE_DATABASE` or unreachable Steampipe endpoint. + +**Fix:** + +```bash +# Check logs +docker logs powerpipe + +# Verify Steampipe is reachable from Powerpipe container +docker exec powerpipe bash -c "pg_isready -h steampipe -p 9193" + +# Ensure Steampipe is listening on network (not just localhost) +# Steampipe must be started with --database-listen network +docker logs steampipe | grep "Listen" +``` + +--- + +## Cannot connect to database + +**Symptom:** `Error: connection refused` or `FATAL: password authentication failed` + +**Fix:** + +```bash +# Verify the connection string +docker run --rm \ + -e POWERPIPE_DATABASE="postgresql://steampipe:yourpassword@steampipe:9193/steampipe" \ + ghcr.io/devops-ia/powerpipe:1.5.1 \ + powerpipe query "select 1" + +# Check Steampipe password +docker exec steampipe steampipe service status --show-password +``` + +--- + +## Port 9033 already in use + +**Symptom:** `address already in use: 9033` + +**Fix:** + +```bash +# Find the conflicting process +lsof -i :9033 + +# Use a different host port (container still uses 9033 internally) +docker run -d -p 19033:9033 ghcr.io/devops-ia/powerpipe:1.5.1 +``` + +--- + +## Mod not found + +**Symptom:** `Error: no mods found in /workspace` or benchmark commands fail with `not found`. + +**Cause:** Mod not installed, or `POWERPIPE_MOD_LOCATION` not set correctly. + +**Fix:** + +```bash +# Verify mod location +docker exec powerpipe ls /workspace + +# Install the mod +docker exec powerpipe powerpipe mod install github.com/turbot/steampipe-mod-aws-compliance + +# Verify mod was installed +docker exec powerpipe powerpipe mod list +``` + +If using a mounted volume, ensure the mount is writable: + +```bash +docker run -d \ + -v "$PWD/workspace:/workspace" \ # writable, no :ro + -e POWERPIPE_MOD_LOCATION=/workspace \ + ghcr.io/devops-ia/powerpipe:1.5.1 +``` + +--- + +## Permission denied on /workspace + +**Symptom:** `mkdir: cannot create directory '/workspace': Permission denied` or mod install fails. + +**Cause:** The workspace volume is owned by root and Powerpipe runs as UID 9193. + +**Fix:** + +```bash +# Set correct ownership before mounting +mkdir -p workspace +chown -R 9193:0 workspace +chmod -R g=u workspace +``` + +Or in Docker Compose: + +```yaml +services: + powerpipe: + user: "9193:0" + volumes: + - workspace:/workspace +``` + +--- + +## Out of memory / OOM killed + +**Symptom:** Container killed by OOM. `dmesg | grep -i oom` shows the process. + +**Fix:** + +```bash +docker run -d \ + -e POWERPIPE_MEMORY_MAX_MB=2048 \ + -e POWERPIPE_MAX_PARALLEL=5 \ + --memory=3g \ + ghcr.io/devops-ia/powerpipe:1.5.1 +``` + +Rule of thumb: set `--memory` to `POWERPIPE_MEMORY_MAX_MB` × 1.5 to allow headroom. + +--- + +## Dashboard timeout + +**Symptom:** Dashboard loads but queries never complete, showing spinners indefinitely. + +**Fix:** + +```bash +# Increase timeouts (0 = unlimited) +docker run -d \ + -e POWERPIPE_DASHBOARD_TIMEOUT=300 \ + -e POWERPIPE_BENCHMARK_TIMEOUT=600 \ + ghcr.io/devops-ia/powerpipe:1.5.1 +``` + +Also check Steampipe query timeout: + +```bash +docker exec steampipe bash -c "echo 'SHOW steampipe_query_timeout' | psql -h localhost -p 9193 -U steampipe -d steampipe" +``` + +--- + +## Enabling debug logs + +```bash +docker run --rm \ + -e POWERPIPE_LOG_LEVEL=debug \ + -e POWERPIPE_DATABASE="..." \ + ghcr.io/devops-ia/powerpipe:1.5.1 \ + powerpipe server 2>&1 | tee powerpipe-debug.log +``` diff --git a/examples/docker-compose.yml b/examples/docker-compose.yml new file mode 100644 index 0000000..7545402 --- /dev/null +++ b/examples/docker-compose.yml @@ -0,0 +1,40 @@ +services: + steampipe: + image: ghcr.io/devops-ia/steampipe:2.4.1 + command: ["steampipe", "service", "start", "--foreground", "--database-listen", "network"] + environment: + STEAMPIPE_DATABASE_PASSWORD: steampipe + STEAMPIPE_LOG_LEVEL: warn + volumes: + - steampipe-data:/home/steampipe/.steampipe + # Mount plugin config (see examples/aws.spc) + # - ./aws.spc:/home/steampipe/.steampipe/config/aws.spc:ro + ports: + - "9193:9193" + healthcheck: + test: ["CMD", "pg_isready", "-h", "localhost", "-p", "9193"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + restart: unless-stopped + + powerpipe: + image: ghcr.io/devops-ia/powerpipe:1.5.1 + command: ["powerpipe", "server", "--listen", "network"] + ports: + - "9033:9033" + environment: + POWERPIPE_DATABASE: "postgresql://steampipe:steampipe@steampipe:9193/steampipe" + POWERPIPE_LOG_LEVEL: warn + POWERPIPE_MOD_LOCATION: /workspace + volumes: + - workspace:/workspace + depends_on: + steampipe: + condition: service_healthy + restart: unless-stopped + +volumes: + steampipe-data: + workspace: diff --git a/examples/powerpipe.ppvars b/examples/powerpipe.ppvars new file mode 100644 index 0000000..de024a2 --- /dev/null +++ b/examples/powerpipe.ppvars @@ -0,0 +1,15 @@ +# Example Powerpipe variable file (.ppvars) +# Place this file in your mod workspace directory to set input variables. +# Reference: https://powerpipe.io/docs/reference/config-files/variables + +# Tags to filter benchmark results (optional) +# benchmark_tags = { +# environment = "production" +# team = "platform" +# } + +# Limit benchmarks to specific AWS regions (optional, defaults to all) +# regions = ["us-east-1", "eu-west-1"] + +# Control the number of parallel control evaluations +# max_parallel = 5 diff --git a/scripts/compare_snapshots.py b/scripts/compare_snapshots.py new file mode 100644 index 0000000..cfdccbd --- /dev/null +++ b/scripts/compare_snapshots.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 +"""Compare two CLI snapshots and generate a semantic diff. + +Usage: + python3 scripts/compare-snapshots.py old.json new.json [--output-md diff.md] [--output-json diff.json] + +Outputs: + - JSON diff to stdout (or --output-json) + - Markdown summary to --output-md (optional) + +Exit codes: + 0 — no behavioral changes + 1 — behavioral changes detected + 2 — error +""" + +import argparse +import json +import sys +from pathlib import Path + + +def diff_lists(old: list, new: list) -> dict: + """Compare two sorted lists, return added/removed.""" + old_set = set(old) + new_set = set(new) + added = sorted(new_set - old_set) + removed = sorted(old_set - new_set) + return {"added": added, "removed": removed} + + +_SKIP_KEYS = {"version", "snapshot_date"} +_HASH_SUFFIX = "_hash" + + +def _array_keys(snapshot: dict) -> list[str]: + """Return all keys whose values are lists (skip metadata/hash keys).""" + return sorted( + k for k, v in snapshot.items() + if isinstance(v, list) and k not in _SKIP_KEYS + ) + + +def _hash_keys(snapshot: dict) -> list[str]: + """Return all keys that are hash fields.""" + return sorted( + k for k, v in snapshot.items() + if isinstance(v, str) and k.endswith(_HASH_SUFFIX) + ) + + +def compare(old: dict, new: dict) -> dict: + """Generate semantic diff between two snapshots (generic, works for any CLI).""" + # Collect all array keys from both snapshots + all_array_keys = sorted(set(_array_keys(old)) | set(_array_keys(new))) + all_hash_keys = sorted(set(_hash_keys(old)) | set(_hash_keys(new))) + + result: dict = { + "has_changes": False, + "version_change": f"{old.get('version', '?')} → {new.get('version', '?')}", + "categories": {}, + "hash_changes": [], + } + + for key in all_array_keys: + d = diff_lists(old.get(key, []), new.get(key, [])) + result["categories"][key] = d + if d["added"] or d["removed"]: + result["has_changes"] = True + + for key in all_hash_keys: + if old.get(key) != new.get(key): + result["hash_changes"].append(key.removesuffix(_HASH_SUFFIX).replace("_", " ")) + + return result + + +def render_diff_table(label: str, diff: dict) -> str: + """Render a single section's added/removed as markdown.""" + lines = [] + if diff["added"] or diff["removed"]: + lines.append(f"\n### {label}") + lines.append("| Change | Item |") + lines.append("|--------|------|") + for item in diff["removed"]: + lines.append(f"| ❌ Removed | `{item}` |") + for item in diff["added"]: + lines.append(f"| ➕ Added | `{item}` |") + return "\n".join(lines) + + +def render_markdown(diff: dict) -> str: + """Render full markdown summary.""" + lines = [f"## 🔍 CLI Behavioral Changes: {diff['version_change']}\n"] + + if not diff["has_changes"] and not diff["hash_changes"]: + lines.append("✅ **No behavioral changes detected.**") + lines.append( + "\nThe new version has the same CLI flags, env vars, and subcommands." + ) + return "\n".join(lines) + + for key, section_diff in diff["categories"].items(): + label = key.replace("_", " ").title() + table = render_diff_table(label, section_diff) + if table: + lines.append(table) + + if diff["hash_changes"]: + lines.append(f"\n### Help text changes") + lines.append( + f"Help text changed for: {', '.join(diff['hash_changes'])}. " + "Review `--help` output for wording/description updates." + ) + + if diff["has_changes"]: + lines.append("\n---") + lines.append("### ⚠️ Action needed") + lines.append( + "These changes may require updates to `README.md` and `Dockerfile` ENV.\n" + ) + lines.append("@copilot Please review the behavioral changes above and:") + lines.append("1. Update `README.md` flag and env var tables") + lines.append("2. Update `Dockerfile` ENV section if defaults changed") + lines.append("3. Ensure documented behavior matches the current CLI") + + return "\n".join(lines) + + +def main(): + parser = argparse.ArgumentParser(description="Compare CLI snapshots") + parser.add_argument("old", help="Path to old cli-snapshot.json") + parser.add_argument("new", help="Path to new cli-snapshot.json") + parser.add_argument("--output-md", help="Write markdown summary to file") + parser.add_argument("--output-json", help="Write JSON diff to file") + args = parser.parse_args() + + try: + old = json.loads(Path(args.old).read_text()) + new = json.loads(Path(args.new).read_text()) + except (json.JSONDecodeError, FileNotFoundError) as e: + print(f"ERROR: {e}", file=sys.stderr) + sys.exit(2) + + diff = compare(old, new) + + # Output JSON (flatten categories for backward compat) + output = {k: v for k, v in diff.items() if k != "categories"} + output.update(diff["categories"]) + diff_json = json.dumps(output, indent=2) + if args.output_json: + Path(args.output_json).write_text(diff_json + "\n") + else: + print(diff_json) + + # Output markdown + if args.output_md: + md = render_markdown(diff) + Path(args.output_md).write_text(md + "\n") + + # Exit code: 1 if behavioral changes, 0 if none + sys.exit(1 if diff["has_changes"] else 0) + + +if __name__ == "__main__": + main() diff --git a/scripts/extract-cli-snapshot.sh b/scripts/extract-cli-snapshot.sh new file mode 100755 index 0000000..2b042b4 --- /dev/null +++ b/scripts/extract-cli-snapshot.sh @@ -0,0 +1,124 @@ +#!/usr/bin/env bash +# Extract a behavioral snapshot of a Cobra CLI. +# Runs INSIDE the Docker container (mount this script as a volume). +# +# Usage: +# docker run --rm --network none \ +# -v "$PWD/scripts:/scripts:ro" \ +# powerpipe:test bash /scripts/extract-cli-snapshot.sh +# +# Override binary: CLI_BIN=powerpipe bash /scripts/extract-cli-snapshot.sh +# Override depth: MAX_DEPTH=3 bash /scripts/extract-cli-snapshot.sh +# +# Outputs JSON to stdout. Env vars are NOT included (extracted separately +# from upstream source code by extract-env-vars.sh). + +set -euo pipefail + +# Suppress color, update checks, and telemetry for deterministic output. +export NO_COLOR=1 +export STEAMPIPE_UPDATE_CHECK=false STEAMPIPE_TELEMETRY=none +export POWERPIPE_UPDATE_CHECK=false POWERPIPE_TELEMETRY=none +export TERM=dumb + +readonly CLI="${CLI_BIN:-powerpipe}" +readonly MAX_DEPTH="${MAX_DEPTH:-4}" + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +# Strip ANSI escape sequences from stdin. +_strip_ansi() { + sed 's/\x1b\[[0-9;]*[mGKHF]//g' +} + +# Emit clean --help text for a command. +# Exits non-zero if the command itself fails (not just "no subcommands"). +_help() { + "$@" --help 2>&1 | _strip_ansi +} + +# Parse subcommand names from help text on stdin. +# Prints one name per line; empty output (exit 0) = no Available Commands section. +_parse_subcommands() { + awk ' + /^[[:space:]]*Available Commands:/ { in_section=1; next } + in_section && /^[^[:space:]]/ { exit } + in_section && /^[[:space:]]+[a-z]/ { print $1 } + ' | grep -xv 'help' || true +} + +# --------------------------------------------------------------------------- +# Recursive emitter +# --------------------------------------------------------------------------- + +# Emit one JSON fragment per command visited (flags + help hash), then recurse. +# Args: depth key_prefix [cmd_words...] +_emit() { + local depth="$1" + local key_prefix="$2" + shift 2 + # $@ = full command words, e.g.: powerpipe benchmark run + + # Capture help text ONCE; derive flags, subcommands, and hash from it. + local help_text + if ! help_text=$(_help "$@"); then + return 0 # command failed — skip without poisoning the snapshot + fi + + local flags + flags=$(printf '%s\n' "$help_text" \ + | grep -oE -- '--[a-z][a-z0-9-]+' \ + | sort -u \ + | jq -R . | jq -s . \ + || printf '[]') + + local hash + hash=$(printf '%s' "$help_text" | sha256sum | awk '{print $1}') + + jq -n \ + --arg kf "${key_prefix}_flags" \ + --argjson vf "$flags" \ + --arg kh "${key_prefix}_help_hash" \ + --arg vh "$hash" \ + '{($kf): $vf, ($kh): $vh}' + + [[ "$depth" -ge "$MAX_DEPTH" ]] && return 0 + + local subcmds + subcmds=$(printf '%s\n' "$help_text" | _parse_subcommands) || return 0 + + while IFS= read -r sub; do + [[ -z "$sub" ]] && continue + _emit $(( depth + 1 )) "${key_prefix}_${sub}" "$@" "$sub" + done <<< "$subcmds" +} + +# --------------------------------------------------------------------------- +# Main +# --------------------------------------------------------------------------- + +VERSION=$("$CLI" --version 2>&1 | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' || printf 'unknown') + +# Capture top-level help once. +TOP_HELP=$(_help "$CLI") +TOP_HASH=$(printf '%s' "$TOP_HELP" | sha256sum | awk '{print $1}') +TOP_CMDS=$(printf '%s\n' "$TOP_HELP" | _parse_subcommands) +SUBCMDS_JSON=$(printf '%s\n' "$TOP_CMDS" | jq -R . | jq -s .) + +# Stream all JSON fragments then merge into one object. +{ + jq -n \ + --arg version "$VERSION" \ + --arg date "$(date -u +%Y-%m-%d)" \ + --arg help_hash "$TOP_HASH" \ + --argjson subs "$SUBCMDS_JSON" \ + '{version: $version, snapshot_date: $date, help_text_hash: $help_hash, subcommands: $subs}' + + while IFS= read -r cmd; do + [[ -z "$cmd" ]] && continue + _emit 1 "$cmd" "$CLI" "$cmd" + done <<< "$TOP_CMDS" + +} | jq -s 'reduce .[] as $item ({}; . + $item)' diff --git a/scripts/extract-env-vars.sh b/scripts/extract-env-vars.sh new file mode 100755 index 0000000..930de9b --- /dev/null +++ b/scripts/extract-env-vars.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# Extract environment variable names from upstream Powerpipe source code. +# Runs on the CI runner (not inside the container). +# +# Usage: +# bash scripts/extract-env-vars.sh 1.5.1 +# +# Outputs a JSON array of env var names to stdout. + +set -euo pipefail + +VERSION="${1:?Usage: $0 }" + +# Fetch env.go from the tagged release (powerpipe uses internal/ not pkg/) +ENV_GO_URL="https://raw.githubusercontent.com/turbot/powerpipe/v${VERSION}/internal/constants/env.go" + +ENV_VARS=$(curl -sfL "$ENV_GO_URL" \ + | grep -oE '"(POWERPIPE_[A-Z_]+|PIPES_[A-Z_]+)"' \ + | tr -d '"' \ + | sort -u \ + | jq -R . | jq -s .) + +if [ "$ENV_VARS" = "[]" ] || [ -z "$ENV_VARS" ]; then + echo "ERROR: No env vars extracted from $ENV_GO_URL" >&2 + exit 1 +fi + +echo "$ENV_VARS" diff --git a/structure-tests.yaml b/structure-tests.yaml new file mode 100644 index 0000000..89624a2 --- /dev/null +++ b/structure-tests.yaml @@ -0,0 +1,87 @@ +schemaVersion: "2.0.0" + +# Validate binary +fileExistenceTests: + - name: "powerpipe binary exists" + path: "/usr/local/bin/powerpipe" + shouldExist: true + permissions: "-rwxr-xr-x" + + - name: "powerpipe home dir exists" + path: "/home/powerpipe" + shouldExist: true + + - name: "powerpipe install dir exists" + path: "/home/powerpipe/.powerpipe" + shouldExist: true + + - name: "workspace dir exists" + path: "/workspace" + shouldExist: true + +# Validate image metadata +metadataTest: + labels: + - key: "org.opencontainers.image.title" + value: "Powerpipe" + - key: "org.opencontainers.image.vendor" + value: "devops-ia" + - key: "org.opencontainers.image.source" + value: "https://github.com/devops-ia/powerpipe" + exposedPorts: + - "9033" + cmd: + - "powerpipe" + - "server" + workdir: "/workspace" + user: "9193" + +# Validate ENV vars and binary behavior +commandTests: + - name: "POWERPIPE_UPDATE_CHECK is false" + command: "sh" + args: ["-c", "printenv POWERPIPE_UPDATE_CHECK"] + expectedOutput: ["false"] + exitCode: 0 + + - name: "POWERPIPE_TELEMETRY is none" + command: "sh" + args: ["-c", "printenv POWERPIPE_TELEMETRY"] + expectedOutput: ["none"] + exitCode: 0 + + - name: "POWERPIPE_INSTALL_DIR is set" + command: "sh" + args: ["-c", "printenv POWERPIPE_INSTALL_DIR"] + expectedOutput: ["/home/powerpipe/.powerpipe"] + exitCode: 0 + + - name: "POWERPIPE_LISTEN is network" + command: "sh" + args: ["-c", "printenv POWERPIPE_LISTEN"] + expectedOutput: ["network"] + exitCode: 0 + + - name: "POWERPIPE_MOD_LOCATION is workspace" + command: "sh" + args: ["-c", "printenv POWERPIPE_MOD_LOCATION"] + expectedOutput: ["/workspace"] + exitCode: 0 + + - name: "POWERPIPE_LOG_LEVEL is warn" + command: "sh" + args: ["-c", "printenv POWERPIPE_LOG_LEVEL"] + expectedOutput: ["warn"] + exitCode: 0 + + - name: "powerpipe --version returns version string" + command: "powerpipe" + args: ["--version"] + expectedOutput: + - "Powerpipe v[0-9]+\\.[0-9]+\\.[0-9]+" + exitCode: 0 + + - name: "powerpipe --help exits 0" + command: "powerpipe" + args: ["--help"] + exitCode: 0 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..e78bfcf --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,5 @@ +"""pytest configuration — adds scripts/ to sys.path for imports.""" +import sys +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).parent.parent / "scripts")) diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 0000000..661d893 --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,2 @@ +pytest>=8.0 +pytest-cov>=5.0 diff --git a/tests/test_compare_snapshots.py b/tests/test_compare_snapshots.py new file mode 100644 index 0000000..5575b6d --- /dev/null +++ b/tests/test_compare_snapshots.py @@ -0,0 +1,317 @@ +"""Unit tests for scripts/compare_snapshots.py — powerpipe fixtures.""" + +import json +import sys +from pathlib import Path + +import pytest + +from compare_snapshots import compare, diff_lists, render_markdown + + +# --------------------------------------------------------------------------- +# Fixtures — powerpipe-specific +# --------------------------------------------------------------------------- + +@pytest.fixture +def powerpipe_snapshot_v1(): + return { + "version": "1.5.1", + "snapshot_date": "2026-01-01", + "subcommands": ["benchmark", "completion", "mod", "server"], + "server_flags": ["--help", "--listen", "--port"], + "benchmark_run_flags": ["--dry-run", "--export", "--help", "--output"], + "mod_flags": ["--help", "--pull"], + "env_vars": [ + "POWERPIPE_LISTEN", + "POWERPIPE_MOD_LOCATION", + "POWERPIPE_PORT", + "POWERPIPE_TELEMETRY", + "POWERPIPE_UPDATE_CHECK", + ], + "help_text_hash": "pp_abc123", + "server_help_hash": "pp_def456", + "benchmark_help_hash": "pp_ghi789", + } + + +@pytest.fixture +def powerpipe_snapshot_v2_no_changes(powerpipe_snapshot_v1): + return {**powerpipe_snapshot_v1, "version": "1.6.0"} + + +@pytest.fixture +def powerpipe_snapshot_v2_with_changes(): + return { + "version": "1.6.0", + "snapshot_date": "2026-02-01", + "subcommands": ["benchmark", "completion", "mod", "server", "variable"], + "server_flags": ["--help", "--listen", "--port", "--workspace"], + "benchmark_run_flags": ["--dry-run", "--export", "--help", "--output"], + "mod_flags": ["--help"], + "env_vars": [ + "POWERPIPE_LISTEN", + "POWERPIPE_MOD_LOCATION", + "POWERPIPE_NEW_VAR", + "POWERPIPE_PORT", + "POWERPIPE_UPDATE_CHECK", + ], + "help_text_hash": "pp_changed", + "server_help_hash": "pp_def456", + "benchmark_help_hash": "pp_ghi789", + } + + +# --------------------------------------------------------------------------- +# diff_lists +# --------------------------------------------------------------------------- + +class TestDiffLists: + def test_added_items(self): + d = diff_lists(["a", "b"], ["a", "b", "c"]) + assert d["added"] == ["c"] + assert d["removed"] == [] + + def test_removed_items(self): + d = diff_lists(["a", "b", "c"], ["a", "b"]) + assert d["added"] == [] + assert d["removed"] == ["c"] + + def test_both_added_and_removed(self): + d = diff_lists(["a", "b"], ["b", "c"]) + assert d["added"] == ["c"] + assert d["removed"] == ["a"] + + def test_no_changes(self): + d = diff_lists(["a", "b"], ["a", "b"]) + assert d["added"] == [] + assert d["removed"] == [] + + def test_empty_lists(self): + d = diff_lists([], []) + assert d["added"] == [] + assert d["removed"] == [] + + def test_result_is_sorted(self): + d = diff_lists(["z", "a"], ["z", "b", "c"]) + assert d["added"] == ["b", "c"] + assert d["removed"] == ["a"] + + +# --------------------------------------------------------------------------- +# compare — no changes +# --------------------------------------------------------------------------- + +class TestCompareNoChanges: + def test_identical_snapshots_has_no_changes(self, powerpipe_snapshot_v1): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v1) + assert result["has_changes"] is False + + def test_version_label_only_has_no_changes(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_no_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_no_changes) + assert result["has_changes"] is False + + def test_version_change_is_reported(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_no_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_no_changes) + assert "1.5.1" in result["version_change"] + assert "1.6.0" in result["version_change"] + + def test_no_hash_changes_when_identical(self, powerpipe_snapshot_v1): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v1) + assert result["hash_changes"] == [] + + +# --------------------------------------------------------------------------- +# compare — with behavioral changes +# --------------------------------------------------------------------------- + +class TestCompareWithChanges: + def test_has_changes_true(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + assert result["has_changes"] is True + + def test_added_subcommand_detected(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + assert "variable" in result["categories"]["subcommands"]["added"] + + def test_added_server_flag_detected(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + assert "--workspace" in result["categories"]["server_flags"]["added"] + + def test_removed_mod_flag_detected(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + assert "--pull" in result["categories"]["mod_flags"]["removed"] + + def test_added_env_var_detected(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + assert "POWERPIPE_NEW_VAR" in result["categories"]["env_vars"]["added"] + + def test_removed_env_var_detected(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + assert "POWERPIPE_TELEMETRY" in result["categories"]["env_vars"]["removed"] + + def test_hash_change_detected(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + assert len(result["hash_changes"]) > 0 + + +# --------------------------------------------------------------------------- +# compare — dynamic key detection +# --------------------------------------------------------------------------- + +class TestCompareDynamicKeys: + def test_powerpipe_specific_keys_detected(self, powerpipe_snapshot_v1): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v1) + assert "server_flags" in result["categories"] + assert "benchmark_run_flags" in result["categories"] + assert "mod_flags" in result["categories"] + + def test_no_steampipe_keys_appear(self, powerpipe_snapshot_v1): + result = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v1) + assert "service_start_flags" not in result["categories"] + assert "query_flags" not in result["categories"] + + def test_new_category_in_new_snapshot_detected(self, powerpipe_snapshot_v1): + old = {**powerpipe_snapshot_v1} + new = {**powerpipe_snapshot_v1, "pipeline_flags": ["--async", "--output"]} + result = compare(old, new) + assert "pipeline_flags" in result["categories"] + assert result["categories"]["pipeline_flags"]["added"] == ["--async", "--output"] + assert result["has_changes"] is True + + +# --------------------------------------------------------------------------- +# render_markdown +# --------------------------------------------------------------------------- + +class TestRenderMarkdown: + def test_no_changes_message(self, powerpipe_snapshot_v1): + diff = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v1) + md = render_markdown(diff) + assert "No behavioral changes detected" in md + assert "Action needed" not in md + + def test_changes_include_tables(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + diff = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + md = render_markdown(diff) + assert "❌ Removed" in md + assert "➕ Added" in md + + def test_changes_include_copilot_mention(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + diff = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + md = render_markdown(diff) + assert "@copilot" in md + + def test_hash_change_section_present(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + diff = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + md = render_markdown(diff) + assert "Help text changes" in md + + def test_version_in_header(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes): + diff = compare(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes) + md = render_markdown(diff) + assert "1.5.1" in md + assert "1.6.0" in md + + +# --------------------------------------------------------------------------- +# TestMain — in-process calls for coverage +# --------------------------------------------------------------------------- + +class TestMain: + def test_main_no_changes_exits_0(self, powerpipe_snapshot_v1, tmp_path, monkeypatch): + import compare_snapshots as cs + old_file = tmp_path / "old.json" + new_file = tmp_path / "new.json" + old_file.write_text(json.dumps(powerpipe_snapshot_v1)) + new_file.write_text(json.dumps(powerpipe_snapshot_v1)) + + monkeypatch.setattr(sys, "argv", ["compare_snapshots.py", str(old_file), str(new_file)]) + with pytest.raises(SystemExit) as exc: + cs.main() + assert exc.value.code == 0 + + def test_main_with_changes_exits_1(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes, tmp_path, monkeypatch): + import compare_snapshots as cs + old_file = tmp_path / "old.json" + new_file = tmp_path / "new.json" + old_file.write_text(json.dumps(powerpipe_snapshot_v1)) + new_file.write_text(json.dumps(powerpipe_snapshot_v2_with_changes)) + + monkeypatch.setattr(sys, "argv", ["compare_snapshots.py", str(old_file), str(new_file)]) + with pytest.raises(SystemExit) as exc: + cs.main() + assert exc.value.code == 1 + + def test_main_bad_json_exits_2(self, tmp_path, monkeypatch): + import compare_snapshots as cs + bad_file = tmp_path / "bad.json" + bad_file.write_text("not valid {{{") + good_file = tmp_path / "good.json" + good_file.write_text("{}") + + monkeypatch.setattr(sys, "argv", ["compare_snapshots.py", str(bad_file), str(good_file)]) + with pytest.raises(SystemExit) as exc: + cs.main() + assert exc.value.code == 2 + + def test_main_writes_output_files(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes, tmp_path, monkeypatch): + import compare_snapshots as cs + old_file = tmp_path / "old.json" + new_file = tmp_path / "new.json" + out_json = tmp_path / "diff.json" + out_md = tmp_path / "diff.md" + old_file.write_text(json.dumps(powerpipe_snapshot_v1)) + new_file.write_text(json.dumps(powerpipe_snapshot_v2_with_changes)) + + monkeypatch.setattr(sys, "argv", [ + "compare_snapshots.py", str(old_file), str(new_file), + "--output-json", str(out_json), + "--output-md", str(out_md), + ]) + with pytest.raises(SystemExit): + cs.main() + + assert out_json.exists() + assert out_md.exists() + data = json.loads(out_json.read_text()) + assert data["has_changes"] is True + assert "CLI Behavioral Changes" in out_md.read_text() + + +# --------------------------------------------------------------------------- +# CLI invocation via subprocess (exit codes) +# --------------------------------------------------------------------------- + +class TestExitCodes: + def _run(self, old_data, new_data, tmp_path): + import subprocess + old_file = tmp_path / "old.json" + new_file = tmp_path / "new.json" + old_file.write_text(json.dumps(old_data)) + new_file.write_text(json.dumps(new_data)) + script = Path(__file__).parent.parent / "scripts" / "compare_snapshots.py" + return subprocess.run( + [sys.executable, str(script), str(old_file), str(new_file)], + capture_output=True, + ) + + def test_exit_0_when_no_changes(self, powerpipe_snapshot_v1, tmp_path): + result = self._run(powerpipe_snapshot_v1, powerpipe_snapshot_v1, tmp_path) + assert result.returncode == 0 + + def test_exit_1_when_changes(self, powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes, tmp_path): + result = self._run(powerpipe_snapshot_v1, powerpipe_snapshot_v2_with_changes, tmp_path) + assert result.returncode == 1 + + def test_exit_2_on_missing_file(self, tmp_path): + import subprocess + missing = tmp_path / "missing.json" + good_file = tmp_path / "good.json" + good_file.write_text("{}") + script = Path(__file__).parent.parent / "scripts" / "compare_snapshots.py" + result = subprocess.run( + [sys.executable, str(script), str(missing), str(good_file)], + capture_output=True, + ) + assert result.returncode == 2 diff --git a/trivy.yaml b/trivy.yaml new file mode 100644 index 0000000..28bcece --- /dev/null +++ b/trivy.yaml @@ -0,0 +1,9 @@ +# Trivy configuration +# https://trivy.dev/docs/references/configuration/config-file/ + +scan: + # Skip the powerpipe binary from vulnerability scanning. + # CVEs in the Go binary are upstream Turbot's responsibility; + # we cannot patch a pre-compiled third-party binary. + skip-files: + - usr/local/bin/powerpipe