diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml new file mode 100644 index 0000000..e393a2e --- /dev/null +++ b/.github/workflows/go.yml @@ -0,0 +1,34 @@ +name: Go CI +on: + push: + branches: [main, dev] + paths: ['go/**', '.github/workflows/go.yml'] + pull_request: + paths: ['go/**', '.github/workflows/go.yml'] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + go-version: ['1.21', '1.22'] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: ${{ matrix.go-version }} + - name: gofmt + working-directory: go + run: | + out=$(gofmt -l .) + if [ -n "$out" ]; then + echo "::error::gofmt would reformat the following files:" + echo "$out" + exit 1 + fi + - name: go vet + working-directory: go + run: go vet ./... + - name: go test + working-directory: go + run: go test ./... diff --git a/.github/workflows/javascript.yml b/.github/workflows/javascript.yml new file mode 100644 index 0000000..c71e8e0 --- /dev/null +++ b/.github/workflows/javascript.yml @@ -0,0 +1,27 @@ +name: JavaScript CI +on: + push: + branches: [main] + paths: ['javascript/**'] + pull_request: + paths: ['javascript/**'] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: ['18', '20'] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + - working-directory: javascript + # The JS SDK has no runtime deps and intentionally does not commit a + # lockfile (it is in .gitignore), so use `npm install` instead of + # `npm ci`. Lock-free installs are fine here since devDependencies are + # only used for tests/lint, not anything that ships. + run: | + npm install --no-audit --no-fund + npm test diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml new file mode 100644 index 0000000..83812b4 --- /dev/null +++ b/.github/workflows/python.yml @@ -0,0 +1,23 @@ +name: Python CI +on: + push: + branches: [main] + paths: ['python/**'] + pull_request: + paths: ['python/**'] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.9', '3.12'] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - working-directory: python + run: | + pip install -e ".[dev]" + python -m pytest tests/ -v diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..d1c8b4c --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,26 @@ +name: Release Check +on: + push: + branches: [main] + pull_request: + +jobs: + version-consistency: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Check version consistency + run: | + RUST_VER=$(grep '^version' crates/agentpin/Cargo.toml | head -1 | sed 's/.*"\(.*\)"/\1/') + PY_VER=$(grep 'version' python/pyproject.toml | head -1 | sed 's/.*"\(.*\)"/\1/') + JS_VER=$(node -e "console.log(require('./javascript/package.json').version)") + GO_VER=$(grep -E '^const Version' go/internal/version/version.go | sed 's/.*"\(.*\)".*/\1/') + echo "Rust: $RUST_VER" + echo "Python: $PY_VER" + echo "JavaScript: $JS_VER" + echo "Go: $GO_VER" + if [ "$RUST_VER" != "$PY_VER" ] || [ "$RUST_VER" != "$JS_VER" ] || [ "$RUST_VER" != "$GO_VER" ]; then + echo "::error::Version mismatch! Rust=$RUST_VER Python=$PY_VER JavaScript=$JS_VER Go=$GO_VER" + exit 1 + fi + echo "All versions match: $RUST_VER" diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml new file mode 100644 index 0000000..74fc87e --- /dev/null +++ b/.github/workflows/rust.yml @@ -0,0 +1,26 @@ +name: Rust CI +on: + push: + branches: [main] + paths: ['crates/**', 'Cargo.toml', 'Cargo.lock'] + pull_request: + paths: ['crates/**', 'Cargo.toml', 'Cargo.lock'] + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + rust: [stable, '1.86'] + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ matrix.rust }} + components: clippy, rustfmt + - run: cargo fmt --check + if: matrix.rust == 'stable' + - run: cargo clippy --workspace -j2 -- -D warnings + if: matrix.rust == 'stable' + - run: cargo test --workspace -j2 + - run: cargo test --workspace -j2 --features fetch diff --git a/.gitignore b/.gitignore index 55b3e98..dca424f 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,10 @@ Cargo.lock # Private keys — NEVER commit these *.private.pem *.private.jwk.json +# Exception: cross-language interop test fixtures under testdata/ ARE +# deliberately committed throwaway keypairs used only for SDK interop tests. +# They are NOT used to sign anything in production. +!go/pkg/verification/testdata/*.private.pem # IDE .idea/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 8210757..409bd15 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,127 @@ All notable changes to the AgentPin project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.3.0] - 2026-05-14 + +### Added + +Four-language parity (Rust, JavaScript, Python, Go) for the A2A AgentCard +extension surface and DNS TXT cross-verification. Cards signed in any of the +four SDKs verify cleanly in the other three; signature canonicalisation is +byte-identical across implementations. + +#### A2A AgentCard extension types, signed builder, and verifier + +- **`A2aAgentCard` + supporting types** — minimal A2A AgentCard subset + (`A2aAgentCard`, `A2aAgentCapabilities`, `A2aAgentSkill`) plus the + AgentPin-specific `AgentpinExtension` payload (`agentpin_endpoint`, + `public_key_jwk`, `signature`). Inline definition rather than depending on + the upstream `a2a-types` crate while the A2A spec is still draft. +- **`A2aAgentCardBuilder`** (Rust) / `buildAndSignAgentCard` (JS) / + `build_and_sign_agent_card` (Python) / `a2a.BuildAndSignAgentCard` (Go) — + turns an AgentDeclaration into a signed `A2aAgentCard`. Maps capabilities + to skills via `capability_to_skill`; propagates `allowed_domains` from the + source constraints into `A2aAgentCapabilities.allowed_domains`. Detached + ECDSA P-256 signature covers the canonical bytes of the AgentCard with the + extension cleared. +- **`verify_agentpin_extension(card)`** — verifies the AgentPin extension + signature against the JWK embedded in the extension. Sorted-key canonical + JSON shared across all four SDKs; mirrors the canonicalisation pattern + used by SchemaPin. +- **`AllowedDomains` typed wrapper** — extracted from + `Constraints::allowed_domains` and exposed for cross-protocol use + (SchemaPin v1.4's `A2aVerificationContext` scopes tool verification to the + intersection of caller and provider domains). Empty list = no restriction + (all domains trusted) by convention; `intersect(unrestricted, X) = X`. +- **`a2a_endpoint` field** on `DiscoveryDocument` — optional URL of the + entity's A2A AgentCard endpoint, enabling cross-protocol discovery. + +#### Discovery resolvers for A2A AgentCards + +- **`LocalAgentCardStore`** (all SDKs) — in-memory store of pre-registered + AgentCards keyed by their AgentPin discovery domain. Verifies the + AgentPin extension signature at registration time and pre-derives a + `DiscoveryDocument` so the rest of the AgentPin verification stack runs + unchanged. Supports Symbiont's push-based external-agent registration + flow where the coordinator receives AgentCard JSON inline rather than + fetching it from a `.well-known` endpoint. +- **`A2aAgentCardResolver`** (all SDKs; gated on `fetch` in Rust) — fetches + `https://{domain}/.well-known/agent-card.json`, verifies the AgentPin + extension, cross-checks that the embedded `agentpin_endpoint` host + matches the fetched domain, and derives a `DiscoveryDocument`. Exposes + the original A2A representation alongside the derived doc for callers + that want both. + +#### DNS TXT cross-verification at `_agentpin.{domain}` + +- **`dns` module** (all SDKs) — `parse_txt_record`, `verify_dns_match`, + `txt_record_name`. Wire format mirrors SchemaPin's `_schemapin.{domain}` + record with the version tag changed: + `"v=agentpin1; kid=...; fp=sha256:"`. Whitespace-tolerant parser, + case-insensitive on `fp`, ignores unknown fields for forward + compatibility. +- **`fetch_dns_txt(domain)`** — Rust: async lookup behind the new `dns` + Cargo feature (`hickory-resolver`, `tokio`, `async-trait`). JavaScript: + uses Node's built-in `dns/promises`. Python: uses the optional + `dnspython` package. Go: `dns.LookupTxt(ctx, resolver, domain)` over the + standard-library `net.Resolver`. +- **Multi-key match semantics** — AgentPin discovery docs may carry several + keys for rotation; a published TXT record need only match one of them. + When the TXT carries an explicit `kid`, the matching key MUST also carry + the same `kid`. +- **Fail-closed on mismatch** — a publisher who *intentionally* publishes a + TXT record has signaled DNS is part of their trust chain. Divergence + between DNS and `.well-known` indicates compromise of one channel and is + treated as a hard failure. + +#### Go SDK (fourth-language port) + +- **Initial `go/` SDK** — wire-compatible with Rust, JavaScript, and Python. + Mirrors the package layout of the SchemaPin Go SDK. Module path: + `github.com/ThirdKeyAi/agentpin/go`. +- **Packages**: `crypto`, `jwk`, `jwt`, `types`, `discovery`, `credential`, + `verification`, `revocation`, `pinning`, `delegation`, `mutual`, `nonce`, + `bundle`, `resolver`, plus the new `a2a` and `dns` packages added in this + release. +- **CLI**: `cmd/agentpin` with `keygen`, `issue`, `verify`, `bundle` + subcommands matching the Rust binary. +- **ES256-only** enforcement implemented inline using `crypto/ecdsa`. The + JWT verifier rejects `none`, `HS256`, `RS256`, `ES384`, and any other + algorithm before any signature work. No third-party JWT dependency. +- **CI**: `.github/workflows/go.yml` runs `go test`, `go vet`, and + `gofmt -l` on every PR touching `go/**`. The version-consistency check + in `.github/workflows/release.yml` validates the Go SDK's declared + version against the Rust/JavaScript/Python versions. + +### Changed + +- Cross-SDK version coordination — Rust, JavaScript, Python, and Go SDKs + all release as **0.3.0** together. The earlier `0.3.0-alpha.1` Rust + preview is superseded by this entry. +- **Rust MSRV bumped from 1.70 to 1.86.** Downstream ecosystem crates + (`getrandom`, `clap_builder`, the `icu_*` family) have moved to edition + 2024 and/or to declared rust-version 1.86, making the previously- + declared 1.70 floor unbuildable from scratch in practice. The CI + matrix's MSRV row now tests against 1.86. + +### Notes + +- This release is the unblock for **Symbiont v1.8.0 Phase 3** (AgentPin- + verified AgentCards, A2A auth middleware) and **SchemaPin v1.4.0**'s + `A2aVerificationContext` (which consumes `AllowedDomains` for tool- + verification scoping). Both downstream releases were waiting on this + surface. +- DNS TXT defends against HTTPS-origin compromise (compromised hosting + account, expired domain not removed from CDN, ACME ownership-validation + bypass) and TLS cert mis-issuance — the DNS credential chain (registrar, + DNS provider, optionally DNSSEC) is independent of the HTTPS hosting + chain. Spec § 4.8.3 reserved this slot in v0.1; this release ships the + implementation across all SDKs. +- All additions are purely additive — v0.2.0 callers are unaffected. + Discovery documents without `a2a_endpoint`, AgentCards without an + `agentpin` extension, and absent `_agentpin` TXT records all behave + exactly as before. + ## [0.2.0] - 2026-02-12 ### Added diff --git a/README.md b/README.md index bcf0fbd..a27185f 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ AgentPin lets organizations publish verifiable identity for their AI agents. Iss - **Credential revocation** at credential, agent, and key level - **Mutual authentication** with challenge-response - **Trust bundles** for air-gapped and enterprise environments -- **Cross-language** — Rust, JavaScript, and Python SDKs produce interoperable credentials +- **Cross-language** — Rust, JavaScript, Python, and Go SDKs produce interoperable credentials ## Quick Start @@ -43,7 +43,7 @@ agentpin verify --credential ```toml [dependencies] -agentpin = { version = "0.2", features = ["fetch"] } +agentpin = { version = "0.3", features = ["fetch"] } ``` ### JavaScript @@ -58,6 +58,13 @@ npm install agentpin pip install agentpin ``` +### Go + +```bash +go install github.com/ThirdKeyAi/agentpin/go/cmd/agentpin@latest +go get github.com/ThirdKeyAi/agentpin/go +``` + ## Documentation | Topic | Link | @@ -80,6 +87,7 @@ crates/ └── agentpin-server/ # HTTP server for .well-known endpoints javascript/ # JavaScript/Node.js SDK python/ # Python SDK +go/ # Go SDK ``` ## License diff --git a/ROADMAP.md b/ROADMAP.md index ba1b1bf..7a21269 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -1,7 +1,7 @@ # AgentPin Roadmap -![Version](https://img.shields.io/badge/current-v0.2.0-brightgreen) -![Next](https://img.shields.io/badge/next-v0.3.0-blue) +![Version](https://img.shields.io/badge/current-v0.3.0-brightgreen) +![Next](https://img.shields.io/badge/next-v0.4.0-blue) ![License](https://img.shields.io/badge/license-MIT-green) **Domain-anchored cryptographic identity for AI agents — the identity layer of the ThirdKey trust stack.** @@ -14,8 +14,8 @@ |---------|--------|----------|--------| | **v0.1.0** | 2026-01 | Core identity, verification, delegation | Shipped | | **v0.2.0** | 2026-02 | Trust bundles, alternative discovery, directory listing | Shipped | -| **v0.3.0** | Q2 2026 | A2A AgentCard extension types + resolver | Planning | -| **v0.4.0** | Q3 2026 | Mutual auth as A2A handshake, cross-language parity | Planning | +| **v0.3.0** | 2026-05-14 | A2A AgentCard extension types + resolvers + AllowedDomains + DNS TXT (Rust, JavaScript, Python, Go) | **Shipped** | +| **v0.4.0** | Q3 2026 | Mutual auth as A2A handshake, hardware-backed keys | Planning | | **v1.0.0** | Q4 2026 | Stable API, full specification compliance | Planning | --- @@ -28,43 +28,35 @@ See [CHANGELOG.md](CHANGELOG.md) for full release notes. --- -## v0.3.0 — A2A AgentCard Types + Resolver (Q2 2026) - -AgentPin becomes the cryptographic identity layer for A2A (Agent-to-Agent) networks. This release defines extension types for A2A AgentCards and a resolver that discovers AgentPin identity from A2A endpoints. - -### A2A AgentCard Extension Types - -| Item | Details | -|------|---------| -| `A2aAgentCardExtension` | New type: `agentpin_endpoint`, `public_key_jwk`, `signature` fields | -| `A2aAgentCardBuilder` | Constructs signed A2A AgentCard from `AgentDeclaration` + signing key | -| Capability mapping | `AgentDeclaration.capabilities` → `AgentSkill`, `AgentDeclaration.constraints` → `AgentCapabilities` | -| Verification | Validate A2A extensions during 12-step verification | - -### A2A AgentCard Resolver - -| Item | Details | -|------|---------| -| `A2aAgentCardResolver` | Implements `DiscoveryResolver` — fetches `/.well-known/agent-card.json`, extracts AgentPin extensions | -| `LocalAgentCardStore` | In-memory store of pre-registered AgentCards for agents that don't serve HTTP (e.g., CLI tools, daemon processes). Implements `DiscoveryResolver` — looks up cards by domain/agent-id from local store instead of making HTTP requests. Cards are added via `store.register(card)`. This supports Symbiont v1.7.0's push-based external agent registration where the coordinator receives AgentCard JSON inline rather than fetching it from a `.well-known` endpoint. | -| Fallback chain | Try local store first → A2A card fetch → `agent-identity.json` via `WellKnownResolver` | -| Feature flag | Optional dependency on `a2a-types` behind `a2a` feature flag | - -### Allowed Domains Interface - -| Item | Details | -|------|---------| -| `AllowedDomains` type | New type in `src/types/discovery.rs`: `Vec` of trusted domains extracted from `AgentDeclaration.constraints`. Exported for use by SchemaPin v1.4.0's `A2aVerificationContext` when scoping tool verification to the intersection of caller and provider domains. Convention: empty list means "all domains trusted" (no restriction). | - -### Touchpoints - -| Area | Change | -|------|--------| -| New | `src/types/a2a.rs` — `A2aAgentCardExtension`, `A2aAgentCardBuilder` | -| New | `src/a2a.rs` — A2A extension signing and validation logic | -| New | `src/resolver_a2a.rs` — `A2aAgentCardResolver` implementing `DiscoveryResolver` | -| New | `src/resolver_local.rs` — `LocalAgentCardStore` implementing `DiscoveryResolver` | -| Extend | `src/types/discovery.rs` — `a2a_endpoint` field, `AllowedDomains` type | +## v0.3.0 — Shipped (2026-05-14) + +AgentPin became the cryptographic identity layer for A2A (Agent-to-Agent) +networks. Four-language parity (Rust, JavaScript, Python, Go) for the A2A +AgentCard extension surface and DNS TXT cross-verification: cards signed in +any of the four SDKs verify cleanly in the other three. + +Highlights: + +- **A2A AgentCard extension** — signed AgentCards published at + `/.well-known/agent-card.json` with an AgentPin payload + (`agentpin_endpoint`, `public_key_jwk`, `signature`). Detached ECDSA P-256 + signature over the canonical bytes of the card with the extension + cleared, byte-identical across all four SDKs. +- **Resolvers** — `A2aAgentCardResolver` for HTTPS fetch + extension + verification + endpoint-host cross-check; `LocalAgentCardStore` for + in-memory pre-registered cards (backs Symbiont's push-based external- + agent registration). +- **`AllowedDomains` typed wrapper** — empty list = unrestricted convention + shared with SchemaPin v1.4 `A2aVerificationContext` for intersection- + based tool-verification scoping. +- **DNS TXT cross-verification** — `_agentpin.{domain}` IN TXT + `"v=agentpin1; kid=...; fp=sha256:"`. Fail-closed on mismatch + because an intentional publish signals DNS is part of the trust chain. +- **Go SDK** — initial fourth-language port at the v0.3.0 surface, + including A2A and DNS modules. Module path + `github.com/ThirdKeyAi/agentpin/go`, mirrors the SchemaPin Go SDK layout. + +See [CHANGELOG.md](CHANGELOG.md#030---2026-05-14) for full release notes. --- @@ -81,13 +73,13 @@ Adapts AgentPin's challenge-response mutual authentication as an A2A handshake p | `MutualAuthPolicy` | `Required` \| `Optional` \| `Disabled` — configurable per agent | | Nonce expiry | Configurable nonce TTL for A2A use cases (shorter default than general use) | -### Cross-Language Parity - -JavaScript and Python SDKs gain matching implementations: +### Hardware-Backed Keys -- `A2aAgentCardExtension`, `A2aAgentCardBuilder`, `A2aAgentCardResolver` -- Mutual auth JSON-RPC helpers (`createChallenge`, `verifyResponse`) -- Feature-flag equivalents for A2A dependencies +| Item | Details | +|------|---------| +| HSM support | Sign credentials and AgentCards via PKCS#11-compatible HSMs | +| TPM support | TPM 2.0 backend for OS-bound signing keys | +| Key migration | Helpers to migrate existing software keys onto hardware-backed slots | --- @@ -96,10 +88,10 @@ JavaScript and Python SDKs gain matching implementations: | Item | Details | |------|---------| | API audit | Review and stabilize all public types — remove experimental markers | -| A2A types | Finalize `A2aAgentCardExtension` and related types | +| A2A types | Finalize `A2aAgentCard`, `AgentpinExtension`, and related types; re-export upstream `a2a-types` once that crate stabilises | | Integration tests | Comprehensive test suite covering AgentPin + A2A interop scenarios | | Specification | Published spec for AgentPin identity model and A2A extension format | -| Cross-language | Full parity across Rust, JavaScript, and Python — identical verification guarantees | +| Cross-language | Full parity across Rust, JavaScript, Python, and Go — identical verification guarantees | --- @@ -110,7 +102,6 @@ JavaScript and Python SDKs gain matching implementations: | Delegated A2A Auth | Delegate identity verification to trusted intermediaries for hub-and-spoke topologies | | Agent Directory Protocol | Standardized directory for discovering agents by capability, domain, or trust level | | Key Rotation for A2A | Seamless key rotation with grace periods — A2A peers notified via protocol extension | -| Hardware-Backed Keys | HSM and TPM support for agent signing keys | --- @@ -124,4 +115,4 @@ We welcome input on roadmap priorities: --- -*Last updated: 2026-03-01 (cross-repo alignment with Symbiont v1.7.0/v1.8.0 and SchemaPin v1.4.0)* +*Last updated: 2026-05-14 (v0.3.0 shipped — A2A AgentCard types, AllowedDomains, LocalAgentCardStore, A2aAgentCardResolver, DNS TXT cross-verification across Rust, JavaScript, Python, and Go)* diff --git a/SKILL.md b/SKILL.md index 17cffa8..83df8c5 100644 --- a/SKILL.md +++ b/SKILL.md @@ -1,8 +1,9 @@ --- name: agentpin title: AgentPin -description: Domain-anchored cryptographic identity protocol for AI agents — ES256 JWT credentials, 12-step verification, TOFU key pinning, revocation, delegation chains, and mutual authentication -version: 0.2.0 +description: Domain-anchored cryptographic identity protocol for AI agents — ES256 JWT credentials, 12-step verification, TOFU key pinning, revocation, delegation chains, mutual authentication, and (v0.3.0, four-language parity) A2A AgentCard extension types + signed AgentCard builder + LocalAgentCardStore + A2aAgentCardResolver + AllowedDomains type for cross-protocol use with SchemaPin v1.4, plus DNS TXT cross-verification at _agentpin.{domain} for second-channel trust independent of HTTPS hosting +version: 0.3.0 +stable_version: 0.3.0 --- # AgentPin Development Skills Guide @@ -157,6 +158,40 @@ pin_store = KeyPinStore() result = verify_credential(credential, discovery_doc, pin_store) ``` +### Go + +```bash +go get github.com/ThirdKeyAi/agentpin/go +go install github.com/ThirdKeyAi/agentpin/go/cmd/agentpin@latest +``` + +```go +import ( + "github.com/ThirdKeyAi/agentpin/go/pkg/credential" + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/pinning" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" + "github.com/ThirdKeyAi/agentpin/go/pkg/verification" +) + +kp, _ := crypto.GenerateKeyPair() +priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + +cred, _ := credential.IssueCredential( + priv, "my-key-2026", "example.com", + "urn:agentpin:example.com:my-agent", + "verifier.com", + []types.Capability{"read:data", "write:report"}, + nil, nil, 3600, +) + +pinStore := pinning.NewKeyPinStore() +result := verification.VerifyCredentialOffline( + cred, discoveryDoc, nil, pinStore, + "verifier.com", verification.DefaultVerifierConfig(), +) +``` + ### Serve .well-known Endpoints ```bash @@ -193,14 +228,14 @@ Serves: ### Language API Reference -| Operation | Rust | JavaScript | Python | -|-----------|------|------------|--------| -| Generate keys | `crypto::generate_keypair()` | `generateKeypair()` | `generate_keypair()` | -| Issue credential | `CredentialBuilder::new().sign()` | `issueCredential()` | `issue_credential()` | -| Verify credential | `verify_credential()` | `verifyCredential()` | `verify_credential()` | -| Key pinning | `KeyPinStore` | `KeyPinStore` | `KeyPinStore` | -| Trust bundle | `TrustBundle::from_json()` | `TrustBundle.fromJson()` | `TrustBundle.from_json()` | -| Mutual auth | `MutualAuth::challenge()` | `createChallenge()` | `create_challenge()` | +| Operation | Rust | JavaScript | Python | Go | +|-----------|------|------------|--------|-----| +| Generate keys | `crypto::generate_key_pair()` | `generateKeypair()` | `generate_keypair()` | `crypto.GenerateKeyPair()` | +| Issue credential | `credential::issue_credential()` | `issueCredential()` | `issue_credential()` | `credential.IssueCredential()` | +| Verify credential | `verification::verify_credential_offline()` | `verifyCredentialOffline()` | `verify_credential_offline()` | `verification.VerifyCredentialOffline()` | +| Key pinning | `KeyPinStore` | `KeyPinStore` | `KeyPinStore` | `pinning.KeyPinStore` | +| Trust bundle | `TrustBundle::new()` | `new TrustBundle()` | `TrustBundle()` | `bundle.NewTrustBundle()` | +| Mutual auth | `mutual::create_challenge()` | `createChallenge()` | `create_challenge()` | `mutual.CreateChallenge()` | ### Feature Flags @@ -348,4 +383,4 @@ cargo fmt --check 7. **Feature-gate HTTP** — use the `fetch` feature only when online discovery is needed; default is offline-capable 8. **Cross-compatible with SchemaPin** — both use ECDSA P-256, same crypto primitives 9. **Trust bundles** are ideal for CI/CD and air-gapped deployments — pre-package discovery + revocation data -10. **JavaScript and Python SDKs** provide identical verification guarantees to the Rust crate +10. **JavaScript, Python, and Go SDKs** provide identical verification guarantees to the Rust crate diff --git a/context7.json b/context7.json index 0364b30..b6b6291 100644 --- a/context7.json +++ b/context7.json @@ -3,7 +3,7 @@ "url": "https://context7.com/thirdkeyai/agentpin", "public_key": "pk_Ehy7QXQTu2Keb0e5BNeyx", "projectTitle": "AgentPin", - "description": "Domain-anchored cryptographic identity protocol for AI agents — ES256 JWT credentials, 12-step verification, TOFU key pinning, revocation checking, delegation chains, and mutual authentication. Implementations in Rust, JavaScript, and Python. Part of the ThirdKey trust stack.", + "description": "Domain-anchored cryptographic identity protocol for AI agents — ES256 JWT credentials, 12-step verification, TOFU key pinning, revocation checking, delegation chains, mutual authentication, and (v0.3.0, four-language parity) A2A AgentCard extension types + signed AgentCard builder + LocalAgentCardStore + A2aAgentCardResolver + AllowedDomains type for cross-protocol use, plus DNS TXT cross-verification at _agentpin.{domain} (`v=agentpin1; kid=...; fp=sha256:...`) for second-channel trust independent of HTTPS hosting. Implementations in Rust, JavaScript, Python, and Go — cards signed in any SDK verify in the other three. Part of the ThirdKey trust stack: SchemaPin (tool integrity) → AgentPin (agent identity) → Symbiont (runtime).", "folders": [ "SKILL.md", "README.md", @@ -11,6 +11,7 @@ "ROADMAP.md", "javascript/README.md", "python/README.md", + "go/README.md", "docs/index.md", "docs/getting-started.md", "docs/verification-flow.md", @@ -37,6 +38,7 @@ "**/*.py", "**/*.ts", "**/*.js", + "**/*.go", "**/*.lock", "**/*.toml", "**/*.cfg", diff --git a/crates/agentpin-cli/Cargo.toml b/crates/agentpin-cli/Cargo.toml index bd45e7e..518d6c8 100644 --- a/crates/agentpin-cli/Cargo.toml +++ b/crates/agentpin-cli/Cargo.toml @@ -2,7 +2,7 @@ name = "agentpin-cli" version = "0.1.0" edition = "2021" -rust-version = "1.70" +rust-version = "1.86" description = "CLI tool for AgentPin credential management" license = "MIT" authors = ["Jascha Wanger "] @@ -14,7 +14,7 @@ path = "src/main.rs" [dependencies] agentpin = { path = "../agentpin", features = ["fetch"] } -clap = { version = "4.0", features = ["derive"] } +clap = { version = "4", features = ["derive"] } tokio = { version = "1.0", features = ["full"] } serde_json = "1.0" chrono = "0.4" diff --git a/crates/agentpin-server/Cargo.toml b/crates/agentpin-server/Cargo.toml index e751b07..a9fd967 100644 --- a/crates/agentpin-server/Cargo.toml +++ b/crates/agentpin-server/Cargo.toml @@ -2,7 +2,7 @@ name = "agentpin-server" version = "0.1.0" edition = "2021" -rust-version = "1.70" +rust-version = "1.86" description = "HTTP server for AgentPin discovery and revocation endpoints" license = "MIT" authors = ["Jascha Wanger "] diff --git a/crates/agentpin-server/src/routes.rs b/crates/agentpin-server/src/routes.rs index 921b3fc..1851c4f 100644 --- a/crates/agentpin-server/src/routes.rs +++ b/crates/agentpin-server/src/routes.rs @@ -80,6 +80,7 @@ mod tests { revocation_endpoint: None, policy_url: None, schemapin_endpoint: None, + a2a_endpoint: None, max_delegation_depth: 2, updated_at: "2026-01-01T00:00:00Z".to_string(), }; diff --git a/crates/agentpin/Cargo.toml b/crates/agentpin/Cargo.toml index 6412595..e913f1e 100644 --- a/crates/agentpin/Cargo.toml +++ b/crates/agentpin/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "agentpin" -version = "0.2.0" +version = "0.3.0" edition = "2021" -rust-version = "1.70" +rust-version = "1.86" description = "Domain-anchored cryptographic identity protocol for AI agents" license = "MIT" authors = ["Jascha Wanger "] @@ -13,6 +13,7 @@ categories = ["authentication", "cryptography"] [features] default = [] fetch = ["reqwest", "tokio", "async-trait"] +dns = ["hickory-resolver", "tokio", "async-trait"] [dependencies] p256 = { version = "0.13", features = ["ecdsa", "pem", "jwk"] } @@ -31,5 +32,8 @@ reqwest = { version = "0.12", features = ["json"], optional = true } tokio = { version = "1.0", optional = true } tempfile = { version = "3.0", optional = true } +# Optional dependency for DNS TXT cross-verification (v0.3.0) +hickory-resolver = { version = "0.24", optional = true } + [dev-dependencies] tempfile = "3.0" diff --git a/crates/agentpin/src/a2a.rs b/crates/agentpin/src/a2a.rs new file mode 100644 index 0000000..556e943 --- /dev/null +++ b/crates/agentpin/src/a2a.rs @@ -0,0 +1,385 @@ +//! A2A AgentCard signing and verification (v0.3.0). +//! +//! This module turns an AgentPin [`AgentDeclaration`] into a signed A2A +//! [`A2aAgentCard`] via [`A2aAgentCardBuilder`], and verifies a received +//! AgentCard's [`AgentpinExtension`] payload via [`verify_agentpin_extension`]. +//! +//! ## Signature canonicalisation +//! +//! The detached ECDSA P-256 signature inside an [`AgentpinExtension`] covers +//! the **canonical bytes of the AgentCard with the extension cleared**: +//! +//! 1. Take the AgentCard you intend to publish. +//! 2. Replace its `agentpin` field with `None`. +//! 3. Serialise it via `serde_json::to_vec` with sorted-key canonical form. +//! 4. Sign those bytes with ECDSA P-256. +//! +//! The verifier reproduces step 1–3 from the received card and checks the +//! signature against the JWK in the extension. This means the signature +//! covers everything except the extension itself — including `name`, `url`, +//! `capabilities`, `skills`, and `agentpin_endpoint` → if any field is +//! tampered with, the signature breaks. + +use p256::pkcs8::{DecodePrivateKey, EncodePublicKey}; +use serde::Serialize; + +use crate::crypto; +use crate::error::Error; +use crate::jwk::{jwk_thumbprint, pem_to_jwk}; +use crate::types::a2a::{ + capability_to_skill, A2aAgentCapabilities, A2aAgentCard, A2aAgentSkill, AgentpinExtension, +}; +use crate::types::discovery::{AgentDeclaration, AllowedDomains}; + +/// Derive the public-key PEM from a private-key PEM (P-256). +fn derive_public_pem(private_key_pem: &str) -> Result { + let secret = p256::SecretKey::from_pkcs8_pem(private_key_pem) + .map_err(|e| Error::Ecdsa(e.to_string()))?; + let public = secret.public_key(); + public + .to_public_key_pem(p256::pkcs8::LineEnding::LF) + .map_err(|e| Error::Ecdsa(e.to_string())) +} + +/// Builder that turns an AgentPin [`AgentDeclaration`] into a signed A2A +/// [`A2aAgentCard`]. +/// +/// Minimal usage: +/// +/// ```ignore +/// let card = A2aAgentCardBuilder::from_declaration( +/// "https://example.com/agent", +/// &declaration, +/// ) +/// .agentpin_endpoint("https://example.com/.well-known/agent-identity.json") +/// .sign(&private_key_pem, "kid-1")?; +/// ``` +pub struct A2aAgentCardBuilder<'a> { + url: String, + declaration: &'a AgentDeclaration, + agentpin_endpoint: Option, + skill_overrides: Vec, + streaming: bool, + push_notifications: bool, +} + +impl<'a> A2aAgentCardBuilder<'a> { + /// Start a builder seeded from an [`AgentDeclaration`]. + /// + /// The capability list is mapped 1:1 to A2A skills via + /// [`capability_to_skill`]; the `allowed_domains` constraint is mapped to + /// [`A2aAgentCapabilities::allowed_domains`]. + pub fn from_declaration(url: impl Into, declaration: &'a AgentDeclaration) -> Self { + Self { + url: url.into(), + declaration, + agentpin_endpoint: None, + skill_overrides: Vec::new(), + streaming: false, + push_notifications: false, + } + } + + /// Set the AgentPin discovery endpoint URL written into the extension. + pub fn agentpin_endpoint(mut self, endpoint: impl Into) -> Self { + self.agentpin_endpoint = Some(endpoint.into()); + self + } + + /// Replace the auto-generated skill list with caller-supplied entries. + /// Useful when callers want richer names or descriptions than the raw + /// capability strings provide. + pub fn with_skill_overrides(mut self, skills: Vec) -> Self { + self.skill_overrides = skills; + self + } + + /// Mark the agent as supporting streaming responses. + pub fn streaming(mut self, value: bool) -> Self { + self.streaming = value; + self + } + + /// Mark the agent as emitting push notifications. + pub fn push_notifications(mut self, value: bool) -> Self { + self.push_notifications = value; + self + } + + /// Construct an unsigned [`A2aAgentCard`] (extension absent). + /// + /// Useful for testing the structural mapping without performing crypto. + pub fn build_unsigned(&self) -> A2aAgentCard { + let skills = if self.skill_overrides.is_empty() { + self.declaration + .capabilities + .iter() + .map(capability_to_skill) + .collect() + } else { + self.skill_overrides.clone() + }; + + let allowed_domains = self + .declaration + .constraints + .as_ref() + .map(|c| c.allowed_domains_typed()) + .unwrap_or_else(AllowedDomains::unrestricted); + + A2aAgentCard { + name: self.declaration.name.clone(), + description: self.declaration.description.clone(), + version: self.declaration.version.clone(), + url: self.url.clone(), + capabilities: A2aAgentCapabilities { + streaming: self.streaming, + push_notifications: self.push_notifications, + allowed_domains, + }, + skills, + agentpin: None, + } + } + + /// Sign the AgentCard with the given ECDSA P-256 private key (PEM). + /// + /// Requires [`agentpin_endpoint`](Self::agentpin_endpoint) to have been + /// set, otherwise returns [`Error::Validation`]. + pub fn sign(&self, private_key_pem: &str, kid: &str) -> Result { + let endpoint = self.agentpin_endpoint.clone().ok_or_else(|| { + Error::Discovery( + "A2aAgentCardBuilder::sign requires agentpin_endpoint to be set".to_string(), + ) + })?; + + // Build the unsigned card; sign over its canonical bytes. + let mut card = self.build_unsigned(); + let canonical = canonicalize_for_signing(&card)?; + let signature_b64 = crypto::sign_data(private_key_pem, &canonical)?; + + let public_key_pem = derive_public_pem(private_key_pem)?; + let public_key_jwk = pem_to_jwk(&public_key_pem, kid)?; + + card.agentpin = Some(AgentpinExtension { + agentpin_endpoint: endpoint, + public_key_jwk, + signature: signature_b64, + }); + Ok(card) + } +} + +/// Verify the [`AgentpinExtension`] of an A2A AgentCard. +/// +/// Returns `Ok(())` when: +/// 1. The extension is present. +/// 2. The detached signature verifies against `public_key_jwk` over the +/// canonicalised bytes of the AgentCard with the extension cleared. +/// +/// The caller still has to verify the JWK chains back to a trusted AgentPin +/// discovery document — this function only proves the AgentCard hasn't been +/// tampered with relative to the key inside its own extension. Pair it with +/// [`crate::resolver_a2a::A2aAgentCardResolver`] for the full chain. +pub fn verify_agentpin_extension(card: &A2aAgentCard) -> Result<(), Error> { + let extension = card + .agentpin + .as_ref() + .ok_or_else(|| Error::Discovery("AgentCard has no agentpin extension".to_string()))?; + + // Reconstruct the canonical signing input: card with extension cleared. + let mut without_ext = card.clone(); + without_ext.agentpin = None; + let canonical = canonicalize_for_signing(&without_ext)?; + + // Convert JWK -> PEM for the existing crypto helper, then verify. + let public_key_pem = crate::jwk::jwk_to_pem(&extension.public_key_jwk)?; + let valid = crypto::verify_signature(&public_key_pem, &canonical, &extension.signature)?; + if !valid { + return Err(Error::Discovery( + "A2A AgentCard signature did not verify against extension JWK".to_string(), + )); + } + Ok(()) +} + +/// Compute the JWK thumbprint (key id) of the public key in an +/// [`AgentpinExtension`]. Convenience wrapper used by resolvers when they +/// need to match the card's key against a discovery document. +pub fn extension_key_thumbprint(ext: &AgentpinExtension) -> String { + jwk_thumbprint(&ext.public_key_jwk) +} + +// --------------------------------------------------------------------------- +// Canonicalisation +// --------------------------------------------------------------------------- + +/// Produce the canonical signing input for an [`A2aAgentCard`]. +/// +/// JSON-encoded with sorted keys via `serde_json::to_vec` — `serde_json` does +/// not sort by default, so we go through a `BTreeMap` intermediate produced +/// by re-serialising the card. The same trick is used by SchemaPin for its +/// schema canonicalisation. +fn canonicalize_for_signing(card: &A2aAgentCard) -> Result, Error> { + // Serialise to a Value, then re-serialise with sorted keys. Cheap and + // avoids implementing a hand-rolled canonical form. + let value: serde_json::Value = serde_json::to_value(card)?; + let canonical = sorted_canonical(&value); + serde_json::to_vec(&canonical).map_err(Error::from) +} + +/// Recursively rebuild a [`serde_json::Value`] with object keys sorted. +fn sorted_canonical(value: &serde_json::Value) -> serde_json::Value { + match value { + serde_json::Value::Object(map) => { + let mut entries: Vec<(&String, &serde_json::Value)> = map.iter().collect(); + entries.sort_by(|a, b| a.0.cmp(b.0)); + let mut sorted = serde_json::Map::new(); + for (k, v) in entries { + sorted.insert(k.clone(), sorted_canonical(v)); + } + serde_json::Value::Object(sorted) + } + serde_json::Value::Array(items) => { + serde_json::Value::Array(items.iter().map(sorted_canonical).collect()) + } + other => other.clone(), + } +} + +// Trick to silence the `unused` lint on the trait import when only used in +// tests below — the public API does not need it. +#[allow(dead_code)] +fn _serialize_marker(_: &T) {} + +#[cfg(test)] +mod tests { + use super::*; + use crate::crypto::{generate_key_pair, KeyPair}; + use crate::types::capability::Capability; + use crate::types::constraint::Constraints; + use crate::types::discovery::{AgentDeclaration, AgentStatus}; + + fn declaration_with_caps(caps: Vec<&str>, allowed: Option>) -> AgentDeclaration { + AgentDeclaration { + agent_id: "urn:agentpin:example.com:test".to_string(), + agent_type: None, + name: "Test Agent".to_string(), + description: Some("test".to_string()), + version: Some("1.0.0".to_string()), + capabilities: caps.into_iter().map(Capability::from).collect(), + constraints: allowed.map(|d| Constraints { + allowed_domains: Some(d.into_iter().map(String::from).collect()), + ..Default::default() + }), + maker_attestation: None, + credential_ttl_max: Some(3600), + status: AgentStatus::Active, + directory_listing: None, + } + } + + fn keypair() -> KeyPair { + generate_key_pair().unwrap() + } + + #[test] + fn build_unsigned_maps_capabilities_to_skills() { + let decl = declaration_with_caps(vec!["read:customers", "write:invoices"], None); + let card = A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .build_unsigned(); + assert_eq!(card.skills.len(), 2); + assert_eq!(card.skills[0].id, "read:customers"); + assert_eq!(card.skills[1].id, "write:invoices"); + assert!(card.agentpin.is_none(), "unsigned card has no extension"); + } + + #[test] + fn build_unsigned_maps_allowed_domains() { + let decl = declaration_with_caps(vec!["read:*"], Some(vec!["a.com", "b.com"])); + let card = A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .build_unsigned(); + assert_eq!( + card.capabilities.allowed_domains.as_slice(), + &["a.com".to_string(), "b.com".to_string()] + ); + } + + #[test] + fn build_unsigned_treats_missing_constraints_as_unrestricted() { + let decl = declaration_with_caps(vec!["read:*"], None); + let card = A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .build_unsigned(); + assert!(card.capabilities.allowed_domains.is_unrestricted()); + } + + #[test] + fn sign_requires_agentpin_endpoint() { + let decl = declaration_with_caps(vec!["read:*"], None); + let kp = keypair(); + let result = A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .sign(&kp.private_key_pem, "kid-1"); + assert!(matches!(result, Err(Error::Discovery(_)))); + } + + #[test] + fn signed_card_round_trips_and_verifies() { + let decl = declaration_with_caps( + vec!["read:customers", "write:invoices"], + Some(vec!["partner.com"]), + ); + let kp = keypair(); + let card = A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .agentpin_endpoint("https://example.com/.well-known/agent-identity.json") + .streaming(true) + .sign(&kp.private_key_pem, "kid-1") + .unwrap(); + + // Card carries the extension and verifies cleanly. + assert!(card.agentpin.is_some()); + verify_agentpin_extension(&card).unwrap(); + + // Round-trip through JSON and re-verify. + let json = serde_json::to_string(&card).unwrap(); + let parsed: A2aAgentCard = serde_json::from_str(&json).unwrap(); + verify_agentpin_extension(&parsed).unwrap(); + } + + #[test] + fn verify_fails_when_extension_missing() { + let decl = declaration_with_caps(vec!["read:*"], None); + let card = A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .build_unsigned(); + let err = verify_agentpin_extension(&card).unwrap_err(); + assert!(matches!(err, Error::Discovery(_))); + } + + #[test] + fn verify_fails_when_card_tampered() { + let decl = declaration_with_caps(vec!["read:customers"], None); + let kp = keypair(); + let mut card = A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .agentpin_endpoint("https://example.com/.well-known/agent-identity.json") + .sign(&kp.private_key_pem, "kid-1") + .unwrap(); + + // Tamper with the URL — signature should now fail. + card.url = "https://attacker.example/agent".to_string(); + let err = verify_agentpin_extension(&card).unwrap_err(); + assert!(matches!(err, Error::Discovery(_))); + } + + #[test] + fn extension_key_thumbprint_matches_jwk_thumbprint() { + let decl = declaration_with_caps(vec!["read:*"], None); + let kp = keypair(); + let card = A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .agentpin_endpoint("https://example.com/.well-known/agent-identity.json") + .sign(&kp.private_key_pem, "kid-1") + .unwrap(); + let ext = card.agentpin.as_ref().unwrap(); + let from_helper = extension_key_thumbprint(ext); + let direct = jwk_thumbprint(&ext.public_key_jwk); + assert_eq!(from_helper, direct); + } +} diff --git a/crates/agentpin/src/discovery.rs b/crates/agentpin/src/discovery.rs index 8d8e7f9..9aa6262 100644 --- a/crates/agentpin/src/discovery.rs +++ b/crates/agentpin/src/discovery.rs @@ -23,6 +23,7 @@ pub fn build_discovery_document( )), policy_url: None, schemapin_endpoint: None, + a2a_endpoint: None, max_delegation_depth, updated_at: updated_at.to_string(), } diff --git a/crates/agentpin/src/dns.rs b/crates/agentpin/src/dns.rs new file mode 100644 index 0000000..94ab2e5 --- /dev/null +++ b/crates/agentpin/src/dns.rs @@ -0,0 +1,360 @@ +//! DNS TXT cross-verification at `_agentpin.{domain}` (v0.3.0). +//! +//! AgentPin's [`crate::types::discovery::DiscoveryDocument`] is published over +//! HTTPS at `.well-known/agent-identity.json`. v0.3.0 adds an OPTIONAL +//! second-channel verification: a DNS `TXT` record at `_agentpin.{domain}` +//! whose `fp=` field carries the JWK thumbprint of one of the discovery +//! document's public keys. DNS is administered through a separate credential +//! chain (registrar account, DNS provider, optionally DNSSEC) — compromising +//! one channel doesn't automatically give an attacker the other. +//! +//! The wire format mirrors SchemaPin's `_schemapin.{domain}` record exactly, +//! with the version tag changed: +//! +//! ```text +//! _agentpin.example.com. 3600 IN TXT "v=agentpin1; kid=acme-2026-04; fp=sha256:a1b2c3..." +//! ``` +//! +//! ## Verifier semantics +//! +//! - **Absent record** — no effect (DNS TXT is purely additive) +//! - **Present and matching** — verification succeeds; absence of mismatch is +//! the trust signal +//! - **Present and mismatching** — hard failure ([`Error::Discovery`]) +//! - **Present and malformed** — hard failure ([`Error::Discovery`]) +//! +//! The mismatch case is fail-closed because a publisher who *intentionally* +//! published a TXT record has signaled that DNS is part of their trust chain +//! — a divergence between DNS and `.well-known` indicates compromise of one +//! of the two channels, and there's no way for the verifier to tell which is +//! authentic. Better to refuse than to guess. + +use crate::error::Error; +use crate::jwk::jwk_thumbprint; +use crate::types::discovery::DiscoveryDocument; + +/// Parsed `_agentpin.{domain}` TXT record. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct DnsTxtRecord { + pub version: String, + pub kid: Option, + /// Lowercase fingerprint string, including the `sha256:` prefix. + pub fingerprint: String, +} + +/// Parse a raw TXT record value (e.g. `"v=agentpin1; kid=acme-2026-04; fp=sha256:..."`). +/// +/// Whitespace around `;` and `=` is tolerated. Field order is not significant. +/// Returns an error if the record is missing the required `v` or `fp` fields, +/// or if the version isn't `agentpin1`. Unknown fields are ignored for forward +/// compatibility. +pub fn parse_txt_record(value: &str) -> Result { + let mut version: Option = None; + let mut kid: Option = None; + let mut fp: Option = None; + + for raw_part in value.split(';') { + let part = raw_part.trim(); + if part.is_empty() { + continue; + } + let (k, v) = part + .split_once('=') + .ok_or_else(|| Error::Discovery(format!("DNS TXT field missing '=': {}", part)))?; + let k = k.trim().to_ascii_lowercase(); + let v = v.trim(); + match k.as_str() { + "v" => version = Some(v.to_string()), + "kid" => kid = Some(v.to_string()), + "fp" => fp = Some(v.to_ascii_lowercase()), + // Forward-compat: ignore unknown fields rather than reject. + _ => {} + } + } + + let version = version + .ok_or_else(|| Error::Discovery("DNS TXT record missing required 'v' field".to_string()))?; + if version != "agentpin1" { + return Err(Error::Discovery(format!( + "DNS TXT unsupported version: {}", + version + ))); + } + let fingerprint = fp.ok_or_else(|| { + Error::Discovery("DNS TXT record missing required 'fp' field".to_string()) + })?; + if !fingerprint.starts_with("sha256:") { + return Err(Error::Discovery(format!( + "DNS TXT 'fp' must be sha256:: {}", + fingerprint + ))); + } + + Ok(DnsTxtRecord { + version, + kid, + fingerprint, + }) +} + +/// Cross-check the DNS TXT record's fingerprint against the discovery document. +/// +/// Returns `Ok(())` when the TXT `fp` matches the JWK thumbprint of *any* key +/// in `discovery.public_keys`. AgentPin discovery docs may carry multiple +/// keys (for rotation); a published TXT record need only match one of them. +/// +/// When the TXT carries a `kid`, the matching key MUST also carry the same +/// `kid` — defends against the case where two of the publisher's keys share +/// a fingerprint (vanishingly unlikely with SHA-256 but cheap to enforce). +pub fn verify_dns_match(discovery: &DiscoveryDocument, txt: &DnsTxtRecord) -> Result<(), Error> { + let target_fp = txt.fingerprint.to_ascii_lowercase(); + for jwk in &discovery.public_keys { + let computed = jwk_thumbprint(jwk).to_ascii_lowercase(); + let normalized = if computed.starts_with("sha256:") { + computed + } else { + format!("sha256:{}", computed) + }; + if normalized != target_fp { + continue; + } + // If the TXT specifies a kid, require it to match. + if let Some(ref txt_kid) = txt.kid { + if &jwk.kid != txt_kid { + continue; + } + } + return Ok(()); + } + Err(Error::Discovery(format!( + "DNS TXT fingerprint {} does not match any key in the discovery document", + target_fp + ))) +} + +/// Construct the DNS lookup name for a given AgentPin domain. +/// +/// Strips a trailing dot if present so callers can pass either `example.com` +/// or `example.com.`. +pub fn txt_record_name(domain: &str) -> String { + format!("_agentpin.{}", domain.trim_end_matches('.')) +} + +/// Fetch and parse the `_agentpin.{domain}` TXT record. Behind the `dns` feature. +/// +/// Returns: +/// - `Ok(Some(record))` — record present and parseable +/// - `Ok(None)` — no `_agentpin` TXT record exists for the domain +/// - `Err(_)` — DNS resolution error or the record exists but is malformed +/// +/// Multiple matching TXT chunks are joined per RFC 1464 (concatenation in +/// emit order). When several separate TXT records exist at the same name, +/// the first one whose value contains `v=agentpin1` is used. +#[cfg(feature = "dns")] +pub async fn fetch_dns_txt(domain: &str) -> Result, Error> { + use hickory_resolver::error::ResolveErrorKind; + use hickory_resolver::TokioAsyncResolver; + + let name = txt_record_name(domain); + let resolver = TokioAsyncResolver::tokio(Default::default(), Default::default()); + let lookup = match resolver.txt_lookup(&name).await { + Ok(l) => l, + Err(e) => { + if matches!(e.kind(), ResolveErrorKind::NoRecordsFound { .. }) { + return Ok(None); + } + return Err(Error::Discovery(format!( + "DNS TXT lookup failed for {}: {}", + name, e + ))); + } + }; + + for record in lookup.iter() { + // hickory yields TxtData as Vec>; concatenate chunks per RFC 1464. + let joined: String = record + .iter() + .map(|chunk| String::from_utf8_lossy(chunk).into_owned()) + .collect::>() + .join(""); + if joined.contains("v=agentpin1") { + return parse_txt_record(&joined).map(Some); + } + } + Ok(None) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::crypto::generate_key_pair; + use crate::jwk::pem_to_jwk; + use crate::types::discovery::EntityType; + + fn make_discovery(jwks: Vec) -> DiscoveryDocument { + DiscoveryDocument { + agentpin_version: "0.3".to_string(), + entity: "example.com".to_string(), + entity_type: EntityType::Maker, + public_keys: jwks, + agents: vec![], + revocation_endpoint: None, + policy_url: None, + schemapin_endpoint: None, + a2a_endpoint: None, + max_delegation_depth: 0, + updated_at: "2026-05-01T00:00:00Z".to_string(), + } + } + + #[test] + fn parse_full_record() { + let r = parse_txt_record("v=agentpin1; kid=acme-2026-04; fp=sha256:abcd1234").unwrap(); + assert_eq!(r.version, "agentpin1"); + assert_eq!(r.kid.as_deref(), Some("acme-2026-04")); + assert_eq!(r.fingerprint, "sha256:abcd1234"); + } + + #[test] + fn parse_minimal_record() { + let r = parse_txt_record("v=agentpin1;fp=sha256:abc").unwrap(); + assert_eq!(r.version, "agentpin1"); + assert_eq!(r.kid, None); + assert_eq!(r.fingerprint, "sha256:abc"); + } + + #[test] + fn parse_lowercases_fingerprint() { + let r = parse_txt_record("v=agentpin1; fp=SHA256:ABCDEF").unwrap(); + assert_eq!(r.fingerprint, "sha256:abcdef"); + } + + #[test] + fn parse_tolerates_whitespace_and_order() { + let r = parse_txt_record(" fp = sha256:beef ; v = agentpin1 ").unwrap(); + assert_eq!(r.version, "agentpin1"); + assert_eq!(r.fingerprint, "sha256:beef"); + } + + #[test] + fn parse_ignores_unknown_fields() { + let r = parse_txt_record("v=agentpin1; fp=sha256:abc; future=ignoreme").unwrap(); + assert_eq!(r.fingerprint, "sha256:abc"); + } + + #[test] + fn parse_missing_v_fails() { + assert!(parse_txt_record("fp=sha256:abc").is_err()); + } + + #[test] + fn parse_missing_fp_fails() { + assert!(parse_txt_record("v=agentpin1").is_err()); + } + + #[test] + fn parse_unsupported_version_fails() { + assert!(parse_txt_record("v=agentpin99; fp=sha256:abc").is_err()); + } + + #[test] + fn parse_fp_without_sha256_prefix_fails() { + assert!(parse_txt_record("v=agentpin1; fp=abc").is_err()); + } + + #[test] + fn parse_field_without_equals_fails() { + assert!(parse_txt_record("v=agentpin1; broken").is_err()); + } + + #[test] + fn schemapin_record_rejected_under_agentpin_parser() { + // Sanity: must reject SchemaPin's TXT format so a misconfigured DNS + // entry doesn't accidentally validate. + assert!(parse_txt_record("v=schemapin1; fp=sha256:abc").is_err()); + } + + #[test] + fn verify_match_against_single_key() { + let kp = generate_key_pair().unwrap(); + let jwk = pem_to_jwk(&kp.public_key_pem, "kid-1").unwrap(); + let raw_fp = jwk_thumbprint(&jwk); + let normalized_fp = if raw_fp.starts_with("sha256:") { + raw_fp.clone() + } else { + format!("sha256:{}", raw_fp) + }; + + let discovery = make_discovery(vec![jwk]); + let txt = DnsTxtRecord { + version: "agentpin1".to_string(), + kid: None, + fingerprint: normalized_fp, + }; + verify_dns_match(&discovery, &txt).unwrap(); + } + + #[test] + fn verify_match_against_one_of_multiple_keys() { + let kp_a = generate_key_pair().unwrap(); + let kp_b = generate_key_pair().unwrap(); + let jwk_a = pem_to_jwk(&kp_a.public_key_pem, "kid-a").unwrap(); + let jwk_b = pem_to_jwk(&kp_b.public_key_pem, "kid-b").unwrap(); + let raw_fp_b = jwk_thumbprint(&jwk_b); + let normalized_fp_b = if raw_fp_b.starts_with("sha256:") { + raw_fp_b + } else { + format!("sha256:{}", raw_fp_b) + }; + + let discovery = make_discovery(vec![jwk_a, jwk_b]); + let txt = DnsTxtRecord { + version: "agentpin1".to_string(), + kid: Some("kid-b".to_string()), + fingerprint: normalized_fp_b, + }; + verify_dns_match(&discovery, &txt).unwrap(); + } + + #[test] + fn verify_kid_mismatch_fails_even_when_fp_matches() { + let kp = generate_key_pair().unwrap(); + let jwk = pem_to_jwk(&kp.public_key_pem, "kid-real").unwrap(); + let raw_fp = jwk_thumbprint(&jwk); + let normalized_fp = if raw_fp.starts_with("sha256:") { + raw_fp + } else { + format!("sha256:{}", raw_fp) + }; + + let discovery = make_discovery(vec![jwk]); + let txt = DnsTxtRecord { + version: "agentpin1".to_string(), + kid: Some("kid-different".to_string()), + fingerprint: normalized_fp, + }; + let err = verify_dns_match(&discovery, &txt).unwrap_err(); + assert!(matches!(err, Error::Discovery(_))); + } + + #[test] + fn verify_mismatch_returns_discovery_error() { + let kp = generate_key_pair().unwrap(); + let jwk = pem_to_jwk(&kp.public_key_pem, "kid-1").unwrap(); + let discovery = make_discovery(vec![jwk]); + let txt = DnsTxtRecord { + version: "agentpin1".to_string(), + kid: None, + fingerprint: "sha256:0000000000000000000000000000000000000000000000000000000000000000" + .to_string(), + }; + let err = verify_dns_match(&discovery, &txt).unwrap_err(); + assert!(matches!(err, Error::Discovery(_))); + } + + #[test] + fn txt_record_name_strips_trailing_dot() { + assert_eq!(txt_record_name("example.com"), "_agentpin.example.com"); + assert_eq!(txt_record_name("example.com."), "_agentpin.example.com"); + } +} diff --git a/crates/agentpin/src/error.rs b/crates/agentpin/src/error.rs index 9734445..c02f9ce 100644 --- a/crates/agentpin/src/error.rs +++ b/crates/agentpin/src/error.rs @@ -38,6 +38,9 @@ pub enum Error { #[error("Delegation error: {0}")] Delegation(String), + #[error("Transport error: {0}")] + Transport(String), + #[error("IO error: {0}")] Io(#[from] std::io::Error), diff --git a/crates/agentpin/src/lib.rs b/crates/agentpin/src/lib.rs index cc2b9a8..154db75 100644 --- a/crates/agentpin/src/lib.rs +++ b/crates/agentpin/src/lib.rs @@ -9,6 +9,20 @@ pub mod credential; pub mod delegation; pub mod discovery; pub mod mutual; +pub mod nonce; pub mod pinning; pub mod revocation; +pub mod rotation; +pub mod transport; pub mod verification; + +// v0.3.0: A2A AgentCard signing + verification, plus two new resolvers +// (LocalAgentCardStore always available; A2aAgentCardResolver behind `fetch`). +pub mod a2a; +#[cfg(feature = "fetch")] +pub mod resolver_a2a; +pub mod resolver_local; + +// v0.3.0: DNS TXT cross-verification at `_agentpin.{domain}`. Parser/matcher +// always available; the async fetcher lives behind the `dns` Cargo feature. +pub mod dns; diff --git a/crates/agentpin/src/mutual.rs b/crates/agentpin/src/mutual.rs index 7be4945..f951803 100644 --- a/crates/agentpin/src/mutual.rs +++ b/crates/agentpin/src/mutual.rs @@ -1,3 +1,5 @@ +use std::time::Duration; + use base64::{engine::general_purpose::URL_SAFE_NO_PAD as BASE64URL, Engine}; use chrono::Utc; use p256::ecdsa::{SigningKey, VerifyingKey}; @@ -5,6 +7,7 @@ use rand::RngCore; use crate::crypto; use crate::error::Error; +use crate::nonce::NonceStore; use crate::types::mutual::{Challenge, Response}; const NONCE_EXPIRY_SECS: i64 = 60; @@ -39,6 +42,19 @@ pub fn verify_response( response: &Response, challenge: &Challenge, verifying_key: &VerifyingKey, +) -> Result { + verify_response_with_nonce_store(response, challenge, verifying_key, None) +} + +/// Verify a challenge response with optional nonce deduplication. +/// +/// When a `nonce_store` is provided, the response nonce is checked against +/// previously seen nonces to prevent replay attacks within the validity window. +pub fn verify_response_with_nonce_store( + response: &Response, + challenge: &Challenge, + verifying_key: &VerifyingKey, + nonce_store: Option<&dyn NonceStore>, ) -> Result { // Check nonce matches if response.nonce != challenge.nonce { @@ -56,6 +72,14 @@ pub fn verify_response( } } + // Check nonce deduplication if a store is provided + if let Some(store) = nonce_store { + let fresh = store.check_and_record(&response.nonce, Duration::from_secs(60))?; + if !fresh { + return Err(Error::Jwt("Nonce has already been used".to_string())); + } + } + // Verify signature over the nonce crypto::verify_bytes( verifying_key, @@ -146,4 +170,41 @@ mod tests { Some("eyJ...test-jwt".to_string()) ); } + + #[test] + fn test_verify_with_nonce_store() { + let kp = crypto::generate_key_pair().unwrap(); + let sk = crypto::load_signing_key(&kp.private_key_pem).unwrap(); + let vk = crypto::load_verifying_key(&kp.public_key_pem).unwrap(); + + let store = crate::nonce::InMemoryNonceStore::new(); + let challenge = create_challenge(None); + let response = create_response(&challenge, &sk, "test-key"); + + // First verification should succeed. + let valid = + verify_response_with_nonce_store(&response, &challenge, &vk, Some(&store)).unwrap(); + assert!(valid); + + // Second verification with the same nonce should fail (replay). + let result = verify_response_with_nonce_store(&response, &challenge, &vk, Some(&store)); + assert!(result.is_err(), "Replayed nonce should be rejected"); + } + + #[test] + fn test_verify_without_nonce_store() { + let kp = crypto::generate_key_pair().unwrap(); + let sk = crypto::load_signing_key(&kp.private_key_pem).unwrap(); + let vk = crypto::load_verifying_key(&kp.public_key_pem).unwrap(); + + let challenge = create_challenge(None); + let response = create_response(&challenge, &sk, "test-key"); + + // Without a nonce store, same nonce can be verified multiple times. + let valid1 = verify_response(&response, &challenge, &vk).unwrap(); + assert!(valid1); + + let valid2 = verify_response(&response, &challenge, &vk).unwrap(); + assert!(valid2); + } } diff --git a/crates/agentpin/src/nonce.rs b/crates/agentpin/src/nonce.rs new file mode 100644 index 0000000..d3f1ce4 --- /dev/null +++ b/crates/agentpin/src/nonce.rs @@ -0,0 +1,103 @@ +use std::collections::HashMap; +use std::sync::Mutex; +use std::time::{Duration, Instant}; + +use crate::error::Error; + +/// Trait for nonce deduplication stores. +pub trait NonceStore: Send + Sync { + /// Check if a nonce has been seen before. If not, record it with the given TTL. + /// Returns `Ok(true)` if the nonce is fresh (not seen before). + /// Returns `Ok(false)` if the nonce has already been used (replay). + fn check_and_record(&self, nonce: &str, ttl: Duration) -> Result; +} + +/// In-memory nonce store with lazy expiry cleanup. +pub struct InMemoryNonceStore { + entries: Mutex>, +} + +impl InMemoryNonceStore { + /// Create a new empty nonce store. + pub fn new() -> Self { + Self { + entries: Mutex::new(HashMap::new()), + } + } +} + +impl Default for InMemoryNonceStore { + fn default() -> Self { + Self::new() + } +} + +impl NonceStore for InMemoryNonceStore { + fn check_and_record(&self, nonce: &str, ttl: Duration) -> Result { + let mut map = self + .entries + .lock() + .map_err(|e| Error::Jwt(format!("Nonce store lock poisoned: {}", e)))?; + + let now = Instant::now(); + + // Lazy cleanup: remove all expired entries. + map.retain(|_, expiry| *expiry > now); + + // Check if the nonce is already present (and not expired, since we just cleaned). + if map.contains_key(nonce) { + return Ok(false); + } + + // Record the nonce with its expiry. + map.insert(nonce.to_string(), now + ttl); + Ok(true) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fresh_nonce_accepted() { + let store = InMemoryNonceStore::new(); + let result = store + .check_and_record("nonce-1", Duration::from_secs(60)) + .unwrap(); + assert!(result, "First use of a nonce should return true"); + } + + #[test] + fn test_duplicate_nonce_rejected() { + let store = InMemoryNonceStore::new(); + let ttl = Duration::from_secs(60); + store.check_and_record("nonce-dup", ttl).unwrap(); + let result = store.check_and_record("nonce-dup", ttl).unwrap(); + assert!(!result, "Second use of the same nonce should return false"); + } + + #[test] + fn test_expired_nonce_reusable() { + let store = InMemoryNonceStore::new(); + let ttl = Duration::from_millis(1); + store.check_and_record("nonce-exp", ttl).unwrap(); + + std::thread::sleep(Duration::from_millis(10)); + + let result = store.check_and_record("nonce-exp", ttl).unwrap(); + assert!(result, "Expired nonce should be accepted again"); + } + + #[test] + fn test_concurrent_safety() { + let store = InMemoryNonceStore::new(); + let ttl = Duration::from_secs(60); + + let first = store.check_and_record("nonce-cc", ttl).unwrap(); + assert!(first); + + let second = store.check_and_record("nonce-cc", ttl).unwrap(); + assert!(!second); + } +} diff --git a/crates/agentpin/src/resolver_a2a.rs b/crates/agentpin/src/resolver_a2a.rs new file mode 100644 index 0000000..cd6a36a --- /dev/null +++ b/crates/agentpin/src/resolver_a2a.rs @@ -0,0 +1,132 @@ +//! [`A2aAgentCardResolver`] — fetches A2A AgentCards over HTTPS (v0.3.0). +//! +//! Implements [`crate::resolver::AsyncDiscoveryResolver`] (gated on the +//! `fetch` feature). Performs: +//! +//! 1. `GET https://{domain}/.well-known/agent-card.json` +//! 2. JSON-decode into [`A2aAgentCard`] +//! 3. Verify the AgentPin extension signature against its embedded JWK +//! 4. Derive a [`DiscoveryDocument`] from the card so the rest of the +//! AgentPin verification stack can run unchanged +//! +//! The resulting AgentCard is also exposed via [`A2aAgentCardResolver::last_card`] +//! for callers who want to inspect the original A2A representation alongside +//! the derived discovery doc. + +#![cfg(feature = "fetch")] + +use std::sync::Mutex; +use std::time::Duration; + +use crate::a2a::verify_agentpin_extension; +use crate::error::Error; +use crate::resolver::AsyncDiscoveryResolver; +use crate::resolver_local::{card_endpoint_host, derive_discovery_from_card}; +use crate::types::a2a::A2aAgentCard; +use crate::types::discovery::DiscoveryDocument; +use crate::types::revocation::RevocationDocument; + +const AGENT_CARD_PATH: &str = "/.well-known/agent-card.json"; +const DEFAULT_TIMEOUT_SECS: u64 = 10; + +/// HTTPS resolver for A2A AgentCards published at `.well-known/agent-card.json`. +pub struct A2aAgentCardResolver { + timeout: Duration, + last_card: Mutex>, +} + +impl A2aAgentCardResolver { + /// Construct with the default 10s timeout. + pub fn new() -> Self { + Self { + timeout: Duration::from_secs(DEFAULT_TIMEOUT_SECS), + last_card: Mutex::new(None), + } + } + + /// Construct with a custom HTTP request timeout. + pub fn with_timeout(timeout: Duration) -> Self { + Self { + timeout, + last_card: Mutex::new(None), + } + } + + /// Return the last successfully resolved AgentCard, if any. + /// + /// Useful for callers that want to inspect the A2A card's URL, + /// capabilities, or skill list after `resolve_discovery` has converted + /// it into a [`DiscoveryDocument`]. + pub fn last_card(&self, domain: &str) -> Option { + self.last_card.lock().ok().and_then(|guard| { + guard + .as_ref() + .and_then(|(d, c)| if d == domain { Some(c.clone()) } else { None }) + }) + } +} + +impl Default for A2aAgentCardResolver { + fn default() -> Self { + Self::new() + } +} + +#[async_trait::async_trait] +impl AsyncDiscoveryResolver for A2aAgentCardResolver { + async fn resolve_discovery(&self, domain: &str) -> Result { + let url = format!("https://{}{}", domain, AGENT_CARD_PATH); + let client = reqwest::Client::builder() + .timeout(self.timeout) + .build() + .map_err(|e| Error::Discovery(format!("HTTP client init failed: {e}")))?; + let response = client + .get(&url) + .send() + .await + .map_err(|e| Error::Discovery(format!("Failed to fetch {}: {}", url, e)))?; + if !response.status().is_success() { + return Err(Error::Discovery(format!( + "Failed to fetch {}: HTTP {}", + url, + response.status() + ))); + } + let card: A2aAgentCard = response.json().await.map_err(|e| { + Error::Discovery(format!("Failed to parse AgentCard at {}: {}", url, e)) + })?; + + // Verify the extension signature before trusting any field. + verify_agentpin_extension(&card)?; + + // Cross-check that the agentpin endpoint inside the card matches the + // domain we just fetched from — defends against a card that points at + // some other domain's AgentPin discovery. + let endpoint_host = card_endpoint_host(&card)?; + if endpoint_host != domain { + return Err(Error::Discovery(format!( + "AgentCard at {} declares agentpin endpoint host {} (mismatch)", + domain, endpoint_host + ))); + } + + let discovery = derive_discovery_from_card(&card)?; + + // Cache the original card for callers that want to inspect it. + if let Ok(mut guard) = self.last_card.lock() { + *guard = Some((domain.to_string(), card)); + } + + Ok(discovery) + } + + async fn resolve_revocation( + &self, + _domain: &str, + _discovery: &DiscoveryDocument, + ) -> Result, Error> { + // A2A AgentCards do not carry revocation data. Fall back to a separate + // revocation resolver via ChainResolver if revocation is required. + Ok(None) + } +} diff --git a/crates/agentpin/src/resolver_local.rs b/crates/agentpin/src/resolver_local.rs new file mode 100644 index 0000000..7d2263c --- /dev/null +++ b/crates/agentpin/src/resolver_local.rs @@ -0,0 +1,336 @@ +//! [`LocalAgentCardStore`] — in-memory A2A AgentCard store (v0.3.0). +//! +//! For agents that do not serve HTTP themselves (CLI tools, daemon processes, +//! external agents pushed into a coordinator at registration time), the +//! coordinator can keep their AgentCards in memory and resolve them via +//! [`DiscoveryResolver`] without making any network calls. +//! +//! This supports Symbiont v1.7.0's push-based external-agent registration +//! flow, where a coordinator receives the AgentCard JSON inline rather than +//! fetching it from a `.well-known` endpoint. + +use std::collections::HashMap; +use std::sync::Mutex; + +use crate::a2a::verify_agentpin_extension; +use crate::error::Error; +use crate::resolver::DiscoveryResolver; +use crate::types::a2a::A2aAgentCard; +use crate::types::discovery::DiscoveryDocument; +use crate::types::revocation::RevocationDocument; + +/// In-memory store of pre-registered A2A AgentCards keyed by their AgentPin +/// discovery domain. +/// +/// Cards are added via [`register`](Self::register) (after their AgentPin +/// extension signature is verified) and looked up via the +/// [`DiscoveryResolver`] trait — see [`A2aAgentCardStore::resolve_card`] for +/// raw card access. +/// +/// Pair with [`crate::resolver::ChainResolver`] to fall back to HTTP fetches +/// for domains that aren't pre-registered. +pub struct LocalAgentCardStore { + /// `agentpin_endpoint -> (card, derived_discovery_doc)`. We pre-derive the + /// discovery doc at registration time so [`DiscoveryResolver`] is cheap. + inner: Mutex>, +} + +struct StoredCard { + card: A2aAgentCard, + discovery: DiscoveryDocument, +} + +impl LocalAgentCardStore { + /// Construct an empty store. + pub fn new() -> Self { + Self { + inner: Mutex::new(HashMap::new()), + } + } + + /// Register an AgentCard for the domain implied by its agentpin endpoint. + /// + /// Verifies the AgentPin extension signature before storing. The card is + /// keyed by its `agentpin_endpoint` host so [`resolve_discovery`] / + /// [`resolve_revocation`] can find it later. + /// + /// Re-registering an existing domain replaces the prior entry — useful + /// for handling key rotations on long-lived coordinators. + pub fn register(&self, card: A2aAgentCard) -> Result<(), Error> { + verify_agentpin_extension(&card)?; + let domain = card_endpoint_host(&card)?; + let discovery = derive_discovery_from_card(&card)?; + let stored = StoredCard { + card: card.clone(), + discovery, + }; + self.inner + .lock() + .map_err(|e| Error::Discovery(format!("LocalAgentCardStore mutex poisoned: {e}")))? + .insert(domain, stored); + Ok(()) + } + + /// Number of registered AgentCards. + pub fn len(&self) -> usize { + self.inner.lock().map(|m| m.len()).unwrap_or(0) + } + + /// `true` when no AgentCards are registered. + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Return the raw [`A2aAgentCard`] for a given domain, if registered. + pub fn resolve_card(&self, domain: &str) -> Option { + self.inner + .lock() + .ok() + .and_then(|m| m.get(domain).map(|s| s.card.clone())) + } + + /// Drop a registered AgentCard. Returns `true` when one was removed. + pub fn remove(&self, domain: &str) -> bool { + self.inner + .lock() + .map(|mut m| m.remove(domain).is_some()) + .unwrap_or(false) + } +} + +impl Default for LocalAgentCardStore { + fn default() -> Self { + Self::new() + } +} + +impl DiscoveryResolver for LocalAgentCardStore { + fn resolve_discovery(&self, domain: &str) -> Result { + let map = self + .inner + .lock() + .map_err(|e| Error::Discovery(format!("LocalAgentCardStore mutex poisoned: {e}")))?; + map.get(domain).map(|s| s.discovery.clone()).ok_or_else(|| { + Error::Discovery(format!("Domain '{}' not in LocalAgentCardStore", domain)) + }) + } + + fn resolve_revocation( + &self, + _domain: &str, + _discovery: &DiscoveryDocument, + ) -> Result, Error> { + // The store doesn't carry revocation data. Pair with `ChainResolver` + // and a HTTP / file resolver for revocation fallback. + Ok(None) + } +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/// Derive the host portion of the AgentCard's agentpin endpoint URL. +/// +/// `https://example.com/.well-known/agent-identity.json` -> `example.com`. +pub(crate) fn card_endpoint_host(card: &A2aAgentCard) -> Result { + let ext = card + .agentpin + .as_ref() + .ok_or_else(|| Error::Discovery("AgentCard has no agentpin extension".to_string()))?; + let url = url::Url::parse(&ext.agentpin_endpoint) + .map_err(|e| Error::Discovery(format!("Invalid agentpin_endpoint URL: {e}")))?; + url.host_str() + .map(|h| h.to_string()) + .ok_or_else(|| Error::Discovery("agentpin_endpoint URL has no host".to_string())) +} + +/// Derive a minimal [`DiscoveryDocument`] from an A2A AgentCard. +/// +/// The card's [`AgentpinExtension::public_key_jwk`] becomes the sole entry in +/// `public_keys`; the card's name/description/version/capabilities become a +/// single [`AgentDeclaration`]. This lets the rest of the AgentPin verification +/// stack (TOFU pinning, revocation checking, capability validation) run +/// against AgentCards exactly the way it runs against fetched discovery docs. +pub(crate) fn derive_discovery_from_card(card: &A2aAgentCard) -> Result { + use crate::types::discovery::{AgentDeclaration, AgentStatus, EntityType}; + + let extension = card + .agentpin + .as_ref() + .ok_or_else(|| Error::Discovery("AgentCard has no agentpin extension".to_string()))?; + let domain = card_endpoint_host(card)?; + + // Reverse-engineer the capability list from skill IDs (built by + // capability_to_skill on the issuer side). + let capabilities = card + .skills + .iter() + .map(|s| crate::types::capability::Capability::from(s.id.as_str())) + .collect(); + + let constraints = if card.capabilities.allowed_domains.is_unrestricted() { + None + } else { + Some(crate::types::constraint::Constraints { + allowed_domains: Some(card.capabilities.allowed_domains.0.clone()), + ..Default::default() + }) + }; + + let agent_id = format!("urn:agentpin:{}:{}", domain, slug(&card.name)); + + let declaration = AgentDeclaration { + agent_id, + agent_type: None, + name: card.name.clone(), + description: card.description.clone(), + version: card.version.clone(), + capabilities, + constraints, + maker_attestation: None, + credential_ttl_max: None, + status: AgentStatus::Active, + directory_listing: None, + }; + + Ok(DiscoveryDocument { + agentpin_version: "0.3".to_string(), + entity: domain, + entity_type: EntityType::Both, + public_keys: vec![extension.public_key_jwk.clone()], + agents: vec![declaration], + revocation_endpoint: None, + policy_url: None, + schemapin_endpoint: None, + a2a_endpoint: Some(extension.agentpin_endpoint.clone()), + max_delegation_depth: 0, + updated_at: chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true), + }) +} + +fn slug(input: &str) -> String { + input + .chars() + .map(|c| { + if c.is_ascii_alphanumeric() { + c.to_ascii_lowercase() + } else { + '-' + } + }) + .collect::() + .trim_matches('-') + .to_string() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::a2a::A2aAgentCardBuilder; + use crate::crypto::generate_key_pair; + use crate::types::capability::Capability; + use crate::types::constraint::Constraints; + use crate::types::discovery::{AgentDeclaration, AgentStatus}; + + fn declaration() -> AgentDeclaration { + AgentDeclaration { + agent_id: "urn:agentpin:example.com:tester".to_string(), + agent_type: None, + name: "Tester".to_string(), + description: Some("Test agent".to_string()), + version: Some("1.0.0".to_string()), + capabilities: vec![Capability::from("read:*")], + constraints: Some(Constraints { + allowed_domains: Some(vec!["partner.com".to_string()]), + ..Default::default() + }), + maker_attestation: None, + credential_ttl_max: Some(3600), + status: AgentStatus::Active, + directory_listing: None, + } + } + + fn signed_card() -> A2aAgentCard { + let kp = generate_key_pair().unwrap(); + let decl = declaration(); + A2aAgentCardBuilder::from_declaration("https://example.com/agent", &decl) + .agentpin_endpoint("https://example.com/.well-known/agent-identity.json") + .sign(&kp.private_key_pem, "kid-1") + .unwrap() + } + + #[test] + fn register_then_resolve() { + let store = LocalAgentCardStore::new(); + store.register(signed_card()).unwrap(); + assert_eq!(store.len(), 1); + let doc = store.resolve_discovery("example.com").unwrap(); + assert_eq!(doc.entity, "example.com"); + assert_eq!(doc.public_keys.len(), 1); + assert_eq!(doc.agents.len(), 1); + assert_eq!(doc.agents[0].name, "Tester"); + } + + #[test] + fn register_propagates_signature_failure() { + let mut card = signed_card(); + card.url = "https://attacker.example/agent".to_string(); // tampered + let store = LocalAgentCardStore::new(); + assert!(store.register(card).is_err()); + assert!(store.is_empty()); + } + + #[test] + fn resolve_discovery_missing_returns_err() { + let store = LocalAgentCardStore::new(); + assert!(store.resolve_discovery("missing.com").is_err()); + } + + #[test] + fn re_register_replaces_prior_entry() { + let store = LocalAgentCardStore::new(); + store.register(signed_card()).unwrap(); + store.register(signed_card()).unwrap(); + assert_eq!(store.len(), 1); + } + + #[test] + fn remove_drops_entry() { + let store = LocalAgentCardStore::new(); + store.register(signed_card()).unwrap(); + assert!(store.remove("example.com")); + assert!(store.is_empty()); + assert!(!store.remove("example.com")); + } + + #[test] + fn resolve_card_returns_clone() { + let store = LocalAgentCardStore::new(); + store.register(signed_card()).unwrap(); + let card = store.resolve_card("example.com").unwrap(); + assert_eq!(card.name, "Tester"); + } + + #[test] + fn allowed_domains_propagate_into_derived_doc() { + let store = LocalAgentCardStore::new(); + store.register(signed_card()).unwrap(); + let doc = store.resolve_discovery("example.com").unwrap(); + let constraints = doc.agents[0].constraints.as_ref().unwrap(); + assert_eq!( + constraints.allowed_domains.as_ref().unwrap(), + &vec!["partner.com".to_string()] + ); + } + + #[test] + fn revocation_lookup_returns_none() { + let store = LocalAgentCardStore::new(); + store.register(signed_card()).unwrap(); + let doc = store.resolve_discovery("example.com").unwrap(); + let rev = store.resolve_revocation("example.com", &doc).unwrap(); + assert!(rev.is_none()); + } +} diff --git a/crates/agentpin/src/rotation.rs b/crates/agentpin/src/rotation.rs new file mode 100644 index 0000000..86f8f1e --- /dev/null +++ b/crates/agentpin/src/rotation.rs @@ -0,0 +1,147 @@ +use chrono::Utc; + +use crate::crypto::{generate_key_id, generate_key_pair, load_verifying_key, KeyPair}; +use crate::error::Error; +use crate::jwk::{verifying_key_to_jwk, Jwk}; +use crate::revocation::add_revoked_key; +use crate::types::discovery::DiscoveryDocument; +use crate::types::revocation::{RevocationDocument, RevocationReason}; + +/// A plan for key rotation, returned by `prepare_rotation()`. +pub struct RotationPlan { + pub new_key_pair: KeyPair, + pub new_kid: String, + pub new_jwk: Jwk, + pub old_kid: String, +} + +/// Prepare a key rotation: generate a new key pair, compute its kid and JWK. +/// +/// The caller should then call `apply_rotation` to add the new key to a +/// discovery document and, after an overlap window, `complete_rotation` to +/// retire the old key. +pub fn prepare_rotation(old_kid: &str) -> Result { + let new_key_pair = generate_key_pair()?; + let new_kid = generate_key_id(&new_key_pair.public_key_pem)?; + let vk = load_verifying_key(&new_key_pair.public_key_pem)?; + let new_jwk = verifying_key_to_jwk(&vk, &new_kid); + + Ok(RotationPlan { + new_key_pair, + new_kid, + new_jwk, + old_kid: old_kid.to_string(), + }) +} + +/// Apply a rotation plan to a discovery document: add the new key while +/// keeping the old key active. Updates the `updated_at` timestamp. +pub fn apply_rotation(doc: &mut DiscoveryDocument, plan: &RotationPlan) -> Result<(), Error> { + doc.public_keys.push(plan.new_jwk.clone()); + doc.updated_at = Utc::now().to_rfc3339(); + Ok(()) +} + +/// Complete a rotation: remove the old key from the discovery document and +/// record it in the revocation document. +/// +/// Call this after the overlap window has passed so that relying parties have +/// had time to pick up the new key. +pub fn complete_rotation( + doc: &mut DiscoveryDocument, + revocation_doc: &mut RevocationDocument, + old_kid: &str, + reason: RevocationReason, +) -> Result<(), Error> { + doc.public_keys.retain(|k| k.kid != old_kid); + doc.updated_at = Utc::now().to_rfc3339(); + add_revoked_key(revocation_doc, old_kid, reason); + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::jwk::pem_to_jwk; + use crate::revocation::build_revocation_document; + use crate::types::discovery::{DiscoveryDocument, EntityType}; + + fn make_discovery_doc(keys: Vec) -> DiscoveryDocument { + DiscoveryDocument { + agentpin_version: "0.1".to_string(), + entity: "example.com".to_string(), + entity_type: EntityType::Maker, + public_keys: keys, + agents: vec![], + revocation_endpoint: None, + policy_url: None, + schemapin_endpoint: None, + a2a_endpoint: None, + max_delegation_depth: 2, + updated_at: "2026-01-01T00:00:00Z".to_string(), + } + } + + #[test] + fn test_prepare_rotation() { + let plan = prepare_rotation("old-kid-placeholder").unwrap(); + // kid is a SHA-256 hex digest: 64 hex chars + assert_eq!(plan.new_kid.len(), 64); + assert!(plan.new_kid.chars().all(|c| c.is_ascii_hexdigit())); + assert_eq!(plan.old_kid, "old-kid-placeholder"); + assert_eq!(plan.new_jwk.kid, plan.new_kid); + assert_eq!(plan.new_jwk.kty, "EC"); + assert_eq!(plan.new_jwk.crv, "P-256"); + } + + #[test] + fn test_apply_rotation() { + let old_kp = generate_key_pair().unwrap(); + let old_kid = generate_key_id(&old_kp.public_key_pem).unwrap(); + let old_jwk = pem_to_jwk(&old_kp.public_key_pem, &old_kid).unwrap(); + + let mut doc = make_discovery_doc(vec![old_jwk]); + assert_eq!(doc.public_keys.len(), 1); + + let plan = prepare_rotation(&old_kid).unwrap(); + apply_rotation(&mut doc, &plan).unwrap(); + + assert_eq!(doc.public_keys.len(), 2); + assert!(doc.public_keys.iter().any(|k| k.kid == plan.new_kid)); + assert!(doc.public_keys.iter().any(|k| k.kid == old_kid)); + assert_ne!(doc.updated_at, "2026-01-01T00:00:00Z"); + } + + #[test] + fn test_complete_rotation() { + let old_kp = generate_key_pair().unwrap(); + let old_kid = generate_key_id(&old_kp.public_key_pem).unwrap(); + let old_jwk = pem_to_jwk(&old_kp.public_key_pem, &old_kid).unwrap(); + + let mut doc = make_discovery_doc(vec![old_jwk]); + let plan = prepare_rotation(&old_kid).unwrap(); + apply_rotation(&mut doc, &plan).unwrap(); + assert_eq!(doc.public_keys.len(), 2); + + let mut revocation_doc = build_revocation_document("example.com"); + complete_rotation( + &mut doc, + &mut revocation_doc, + &old_kid, + RevocationReason::Superseded, + ) + .unwrap(); + + // Old key removed from discovery + assert_eq!(doc.public_keys.len(), 1); + assert_eq!(doc.public_keys[0].kid, plan.new_kid); + + // Old key added to revocation + assert_eq!(revocation_doc.revoked_keys.len(), 1); + assert_eq!(revocation_doc.revoked_keys[0].kid, old_kid); + assert_eq!( + revocation_doc.revoked_keys[0].reason, + RevocationReason::Superseded + ); + } +} diff --git a/crates/agentpin/src/transport/grpc.rs b/crates/agentpin/src/transport/grpc.rs new file mode 100644 index 0000000..b8c44f9 --- /dev/null +++ b/crates/agentpin/src/transport/grpc.rs @@ -0,0 +1,59 @@ +//! gRPC metadata transport binding. +//! +//! Extracts and formats AgentPin credentials in gRPC metadata +//! via the `agentpin-credential` key. + +use crate::error::Error; + +/// The gRPC metadata key for AgentPin credentials. +pub const METADATA_KEY: &str = "agentpin-credential"; + +/// Extract the JWT from a gRPC metadata value. +/// +/// Validates that the value is non-empty and returns it as the JWT string. +pub fn extract_credential(metadata_value: &str) -> Result { + if metadata_value.is_empty() { + return Err(Error::Transport( + "Empty gRPC metadata value for agentpin-credential".into(), + )); + } + + Ok(metadata_value.to_string()) +} + +/// Format a JWT for use as a gRPC metadata value. +/// +/// Returns the JWT string directly. The caller should attach it +/// to the `agentpin-credential` metadata key. +pub fn format_metadata_value(jwt: &str) -> String { + jwt.to_string() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extract_valid_metadata() { + let jwt = extract_credential("eyJ.payload.sig").unwrap(); + assert_eq!(jwt, "eyJ.payload.sig"); + } + + #[test] + fn test_extract_empty_value() { + assert!(extract_credential("").is_err()); + } + + #[test] + fn test_metadata_key() { + assert_eq!(METADATA_KEY, "agentpin-credential"); + } + + #[test] + fn test_format_roundtrip() { + let jwt = "eyJ.payload.sig"; + let value = format_metadata_value(jwt); + let extracted = extract_credential(&value).unwrap(); + assert_eq!(extracted, jwt); + } +} diff --git a/crates/agentpin/src/transport/http.rs b/crates/agentpin/src/transport/http.rs new file mode 100644 index 0000000..c751293 --- /dev/null +++ b/crates/agentpin/src/transport/http.rs @@ -0,0 +1,62 @@ +//! HTTP header transport binding. +//! +//! Extracts and formats AgentPin credentials in `Authorization: AgentPin ` headers. + +use crate::error::Error; + +const PREFIX: &str = "AgentPin "; + +/// Extract the JWT from an `Authorization` header value. +/// +/// Expects the format `AgentPin `. Returns the raw JWT string. +pub fn extract_credential(header_value: &str) -> Result { + let jwt = header_value.strip_prefix(PREFIX).ok_or_else(|| { + Error::Transport("Missing 'AgentPin ' prefix in Authorization header".into()) + })?; + + if jwt.is_empty() { + return Err(Error::Transport( + "Empty credential in Authorization header".into(), + )); + } + + Ok(jwt.to_string()) +} + +/// Format a JWT for use in an `Authorization` header. +/// +/// Returns `"AgentPin "`. +pub fn format_authorization_header(jwt: &str) -> String { + format!("AgentPin {}", jwt) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extract_valid_header() { + let jwt = extract_credential("AgentPin eyJhbGciOiJFUzI1NiJ9.payload.sig").unwrap(); + assert_eq!(jwt, "eyJhbGciOiJFUzI1NiJ9.payload.sig"); + } + + #[test] + fn test_extract_missing_prefix() { + let err = extract_credential("Bearer eyJhbGciOiJFUzI1NiJ9.payload.sig"); + assert!(err.is_err()); + } + + #[test] + fn test_extract_empty_credential() { + let err = extract_credential("AgentPin "); + assert!(err.is_err()); + } + + #[test] + fn test_format_roundtrip() { + let jwt = "eyJhbGciOiJFUzI1NiJ9.payload.sig"; + let header = format_authorization_header(jwt); + let extracted = extract_credential(&header).unwrap(); + assert_eq!(extracted, jwt); + } +} diff --git a/crates/agentpin/src/transport/mcp.rs b/crates/agentpin/src/transport/mcp.rs new file mode 100644 index 0000000..24b2b4b --- /dev/null +++ b/crates/agentpin/src/transport/mcp.rs @@ -0,0 +1,61 @@ +//! MCP (Model Context Protocol) transport binding. +//! +//! Extracts and formats AgentPin credentials in MCP message metadata +//! via the `agentpin_credential` field. + +use crate::error::Error; + +const FIELD_NAME: &str = "agentpin_credential"; + +/// Extract the JWT from an MCP metadata JSON value. +/// +/// Expects `meta["agentpin_credential"]` to be a string containing the JWT. +pub fn extract_credential(meta: &serde_json::Value) -> Result { + let field = meta.get(FIELD_NAME).ok_or_else(|| { + Error::Transport(format!("Missing '{}' field in MCP metadata", FIELD_NAME)) + })?; + + field + .as_str() + .map(|s| s.to_string()) + .ok_or_else(|| Error::Transport(format!("'{}' field is not a string", FIELD_NAME))) +} + +/// Format a JWT as an MCP metadata JSON value. +/// +/// Returns `{"agentpin_credential": ""}`. +pub fn format_meta_field(jwt: &str) -> serde_json::Value { + serde_json::json!({ FIELD_NAME: jwt }) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extract_valid_meta() { + let meta = serde_json::json!({ "agentpin_credential": "eyJ.payload.sig" }); + let jwt = extract_credential(&meta).unwrap(); + assert_eq!(jwt, "eyJ.payload.sig"); + } + + #[test] + fn test_extract_missing_field() { + let meta = serde_json::json!({ "other_field": "value" }); + assert!(extract_credential(&meta).is_err()); + } + + #[test] + fn test_extract_wrong_type() { + let meta = serde_json::json!({ "agentpin_credential": 42 }); + assert!(extract_credential(&meta).is_err()); + } + + #[test] + fn test_format_roundtrip() { + let jwt = "eyJ.payload.sig"; + let meta = format_meta_field(jwt); + let extracted = extract_credential(&meta).unwrap(); + assert_eq!(extracted, jwt); + } +} diff --git a/crates/agentpin/src/transport/mod.rs b/crates/agentpin/src/transport/mod.rs new file mode 100644 index 0000000..c57104b --- /dev/null +++ b/crates/agentpin/src/transport/mod.rs @@ -0,0 +1,9 @@ +//! Transport binding modules (spec Section 13). +//! +//! Framework-agnostic helpers for extracting and formatting AgentPin +//! credentials across common transport protocols. + +pub mod grpc; +pub mod http; +pub mod mcp; +pub mod websocket; diff --git a/crates/agentpin/src/transport/websocket.rs b/crates/agentpin/src/transport/websocket.rs new file mode 100644 index 0000000..96b8b90 --- /dev/null +++ b/crates/agentpin/src/transport/websocket.rs @@ -0,0 +1,77 @@ +//! WebSocket transport binding. +//! +//! Extracts and formats AgentPin credentials in JSON auth messages +//! of the form `{"type":"agentpin-auth","credential":""}`. + +use crate::error::Error; + +const AUTH_TYPE: &str = "agentpin-auth"; + +/// Extract the JWT from a WebSocket JSON auth message. +/// +/// Expects `{"type":"agentpin-auth","credential":""}`. +pub fn extract_credential(message: &str) -> Result { + let parsed: serde_json::Value = serde_json::from_str(message) + .map_err(|e| Error::Transport(format!("Invalid JSON: {}", e)))?; + + let msg_type = parsed + .get("type") + .and_then(|v| v.as_str()) + .ok_or_else(|| Error::Transport("Missing or non-string 'type' field".into()))?; + + if msg_type != AUTH_TYPE { + return Err(Error::Transport(format!( + "Expected type '{}', got '{}'", + AUTH_TYPE, msg_type + ))); + } + + parsed + .get("credential") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + .ok_or_else(|| Error::Transport("Missing or non-string 'credential' field".into())) +} + +/// Format a JWT as a WebSocket auth message JSON string. +/// +/// Returns `{"type":"agentpin-auth","credential":""}`. +pub fn format_auth_message(jwt: &str) -> String { + serde_json::json!({ + "type": AUTH_TYPE, + "credential": jwt, + }) + .to_string() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_extract_valid_message() { + let msg = r#"{"type":"agentpin-auth","credential":"eyJ.payload.sig"}"#; + let jwt = extract_credential(msg).unwrap(); + assert_eq!(jwt, "eyJ.payload.sig"); + } + + #[test] + fn test_extract_wrong_type() { + let msg = r#"{"type":"other-auth","credential":"eyJ.payload.sig"}"#; + assert!(extract_credential(msg).is_err()); + } + + #[test] + fn test_extract_missing_credential() { + let msg = r#"{"type":"agentpin-auth"}"#; + assert!(extract_credential(msg).is_err()); + } + + #[test] + fn test_format_roundtrip() { + let jwt = "eyJ.payload.sig"; + let msg = format_auth_message(jwt); + let extracted = extract_credential(&msg).unwrap(); + assert_eq!(extracted, jwt); + } +} diff --git a/crates/agentpin/src/types/a2a.rs b/crates/agentpin/src/types/a2a.rs new file mode 100644 index 0000000..1ba7186 --- /dev/null +++ b/crates/agentpin/src/types/a2a.rs @@ -0,0 +1,192 @@ +//! A2A AgentCard extension types (v0.3.0). +//! +//! AgentPin extends the [Google A2A](https://github.com/google-a2a/A2A) AgentCard +//! format with cryptographic identity verification. The `AgentpinExtension` +//! payload carries the AgentPin endpoint URL, the entity's public key in JWK +//! form, and a detached ECDSA signature over the rest of the AgentCard so +//! verifiers can confirm an AgentCard came from the entity that owns the +//! AgentPin discovery domain. +//! +//! These types are structural only — the signing and verification logic live +//! in [`crate::a2a`]. Resolution from a network endpoint or an in-memory store +//! lives in [`crate::resolver_a2a`] and [`crate::resolver_local`]. +//! +//! ## Why an inline definition (not the upstream `a2a-types` crate)? +//! +//! The upstream A2A spec is still draft. Embedding the minimal subset we need +//! (`AgentCard`, `AgentSkill`, `AgentCapabilities`) inline keeps AgentPin from +//! pinning an external version that's likely to churn. When the upstream crate +//! stabilises, this module can re-export from it without changing the public +//! surface here. + +use serde::{Deserialize, Serialize}; + +use crate::jwk::Jwk; + +use super::capability::Capability; +use super::discovery::AllowedDomains; + +// --------------------------------------------------------------------------- +// Minimal A2A AgentCard subset (inline; upstream `a2a-types` candidate) +// --------------------------------------------------------------------------- + +/// Minimal A2A `AgentCard` representation. +/// +/// Only the fields AgentPin needs to populate or read are exposed. Additional +/// upstream fields can be carried verbatim via the catch-all `extensions` map. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct A2aAgentCard { + /// Human-readable agent name (matches AgentPin `AgentDeclaration.name`). + pub name: String, + /// Free-form description. + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + /// Semver string for the agent (matches AgentPin `AgentDeclaration.version`). + #[serde(skip_serializing_if = "Option::is_none")] + pub version: Option, + /// Public URL where the agent receives A2A traffic. + pub url: String, + /// Capabilities advertised under the A2A `AgentCapabilities` shape. + pub capabilities: A2aAgentCapabilities, + /// Skills exposed under the A2A `AgentSkill` shape — one per AgentPin + /// [`Capability`] in the source `AgentDeclaration`. + pub skills: Vec, + /// AgentPin extension payload — present when this AgentCard is signed and + /// resolvable via the AgentPin protocol. + #[serde(skip_serializing_if = "Option::is_none")] + pub agentpin: Option, +} + +/// Minimal A2A `AgentCapabilities` representation. +/// +/// AgentPin populates `allowed_domains` from the source [`crate::types::constraint::Constraints`] +/// so A2A peers can scope tool verification (SchemaPin v1.4 `A2aVerificationContext`) +/// against the same allow-list AgentPin already enforces. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] +pub struct A2aAgentCapabilities { + /// Whether the agent supports streaming responses. + #[serde(default)] + pub streaming: bool, + /// Whether the agent emits push notifications. + #[serde(default, rename = "pushNotifications")] + pub push_notifications: bool, + /// AgentPin v0.3.0 extension: domains this agent is permitted to interact + /// with. Populated from `Constraints.allowed_domains`. + /// Empty list = no restriction (all domains trusted). + #[serde(default, skip_serializing_if = "AllowedDomains::is_unrestricted")] + pub allowed_domains: AllowedDomains, +} + +/// Minimal A2A `AgentSkill` representation. +/// +/// AgentPin's [`Capability`] strings (e.g. `read:customers/*`) map to the A2A +/// `id`. Free-form `name` and `description` carry over from the source +/// declaration when present. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct A2aAgentSkill { + /// Stable identifier — equal to the AgentPin [`Capability`] verb-resource string. + pub id: String, + /// Human-readable skill name. AgentPin defaults to the capability id when + /// no name is supplied at builder time. + pub name: String, + /// Optional description. + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, +} + +// --------------------------------------------------------------------------- +// AgentPin extension payload +// --------------------------------------------------------------------------- + +/// AgentPin extension carried inside an A2A AgentCard's `agentpin` field. +/// +/// The signature is a detached ECDSA P-256 signature over the canonical bytes +/// of the rest of the AgentCard (everything *except* this `AgentpinExtension` +/// itself — the canonical input is computed by serialising the card with the +/// extension field cleared). Verifiers reconstruct that canonical input and +/// check the signature against `public_key_jwk`. +/// +/// Use [`crate::a2a::A2aAgentCardBuilder`] to construct + sign one and +/// [`crate::a2a::verify_agentpin_extension`] to verify one. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct AgentpinExtension { + /// URL of the entity's `.well-known/agent-identity.json` discovery document. + pub agentpin_endpoint: String, + /// Public key (JWK form) used to sign the AgentCard. + pub public_key_jwk: Jwk, + /// Detached ECDSA P-256 signature, base64url-encoded. + pub signature: String, +} + +// --------------------------------------------------------------------------- +// Capability mapping helpers (AgentDeclaration -> A2A skill list) +// --------------------------------------------------------------------------- + +/// Map an AgentPin [`Capability`] to a minimal [`A2aAgentSkill`]. +/// +/// The capability string itself (`verb:resource`) becomes both the skill `id` +/// and the default `name`. Callers that want richer names/descriptions should +/// use [`crate::a2a::A2aAgentCardBuilder::with_skill_overrides`]. +pub fn capability_to_skill(cap: &Capability) -> A2aAgentSkill { + let id = cap.0.clone(); + A2aAgentSkill { + id: id.clone(), + name: id, + description: None, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn capability_maps_to_skill() { + let cap = Capability::from("read:customers/*"); + let skill = capability_to_skill(&cap); + assert_eq!(skill.id, "read:customers/*"); + assert_eq!(skill.name, "read:customers/*"); + assert_eq!(skill.description, None); + } + + #[test] + fn allowed_domains_serializes_omits_when_empty() { + let caps = A2aAgentCapabilities::default(); + let json = serde_json::to_string(&caps).unwrap(); + assert!(!json.contains("allowed_domains"), "got: {json}"); + } + + #[test] + fn allowed_domains_serializes_when_populated() { + let caps = A2aAgentCapabilities { + allowed_domains: AllowedDomains::from_domains(["a.com", "b.com"]), + ..Default::default() + }; + let json = serde_json::to_string(&caps).unwrap(); + assert!( + json.contains("\"allowed_domains\":[\"a.com\",\"b.com\"]"), + "got: {json}" + ); + } + + #[test] + fn agentpin_extension_roundtrips() { + let ext = AgentpinExtension { + agentpin_endpoint: "https://example.com/.well-known/agent-identity.json".to_string(), + public_key_jwk: Jwk { + kid: "kid-1".to_string(), + kty: "EC".to_string(), + crv: "P-256".to_string(), + x: "x".to_string(), + y: "y".to_string(), + use_: "sig".to_string(), + key_ops: None, + exp: None, + }, + signature: "sig".to_string(), + }; + let json = serde_json::to_string(&ext).unwrap(); + let back: AgentpinExtension = serde_json::from_str(&json).unwrap(); + assert_eq!(ext, back); + } +} diff --git a/crates/agentpin/src/types/bundle.rs b/crates/agentpin/src/types/bundle.rs index 5c0e052..caccf01 100644 --- a/crates/agentpin/src/types/bundle.rs +++ b/crates/agentpin/src/types/bundle.rs @@ -67,6 +67,7 @@ mod tests { revocation_endpoint: None, policy_url: None, schemapin_endpoint: None, + a2a_endpoint: None, max_delegation_depth: 2, updated_at: "2026-01-15T00:00:00Z".to_string(), }; diff --git a/crates/agentpin/src/types/capability.rs b/crates/agentpin/src/types/capability.rs index f4b5394..8dea371 100644 --- a/crates/agentpin/src/types/capability.rs +++ b/crates/agentpin/src/types/capability.rs @@ -78,6 +78,55 @@ impl From for Capability { } } +/// Core actions from the AgentPin capability taxonomy. +pub const CORE_ACTIONS: &[&str] = &["read", "write", "execute", "admin", "delegate"]; + +/// Check whether a string looks like a reverse-domain prefix (e.g., `com.example.scan`). +/// Requires at least two dot-separated segments, each non-empty. +fn is_reverse_domain(action: &str) -> bool { + let parts: Vec<&str> = action.split('.').collect(); + parts.len() >= 2 && parts.iter().all(|p| !p.is_empty()) +} + +/// Validate a capability string against the AgentPin taxonomy. +/// +/// Rules: +/// - Must be in `action:resource` format +/// - If action is a core action, no additional validation +/// - If action is not a core action (custom), it MUST use reverse-domain prefix (e.g., `com.example.scan:target`) +/// - `admin:*` wildcard is rejected (admin capabilities must be explicitly scoped) +/// +/// Returns Ok(()) if valid, Err with description if invalid. +pub fn validate_capability(cap: &Capability) -> Result<(), String> { + let (action, resource) = match Capability::parse(&cap.0) { + Some(parts) => parts, + None => return Err("capability must be in 'action:resource' format".to_string()), + }; + + // Reject admin:* wildcard — admin must be explicitly scoped + if action == "admin" && resource == "*" { + return Err( + "admin:* wildcard is not allowed; admin capabilities must be explicitly scoped" + .to_string(), + ); + } + + // Core actions are always valid (with any resource) + if CORE_ACTIONS.contains(&action) { + return Ok(()); + } + + // Custom actions must use reverse-domain prefix + if !is_reverse_domain(action) { + return Err(format!( + "custom action '{}' must use reverse-domain prefix (e.g., com.example.{})", + action, action + )); + } + + Ok(()) +} + /// Check that all requested capabilities are covered by declared capabilities. pub fn capabilities_subset(declared: &[Capability], requested: &[Capability]) -> bool { requested @@ -151,6 +200,48 @@ mod tests { assert_eq!(h1.len(), 64); } + #[test] + fn test_validate_core_action() { + let cap = Capability::from("read:codebase"); + assert!(validate_capability(&cap).is_ok()); + } + + #[test] + fn test_validate_wildcard() { + let cap = Capability::from("read:*"); + assert!(validate_capability(&cap).is_ok()); + } + + #[test] + fn test_validate_admin_wildcard_rejected() { + let cap = Capability::from("admin:*"); + assert!(validate_capability(&cap).is_err()); + } + + #[test] + fn test_validate_admin_scoped_ok() { + let cap = Capability::from("admin:users"); + assert!(validate_capability(&cap).is_ok()); + } + + #[test] + fn test_validate_custom_action_with_domain() { + let cap = Capability::from("com.example.scan:target"); + assert!(validate_capability(&cap).is_ok()); + } + + #[test] + fn test_validate_custom_action_without_domain() { + let cap = Capability::from("scan:target"); + assert!(validate_capability(&cap).is_err()); + } + + #[test] + fn test_validate_missing_colon() { + let cap = Capability::from("readcodebase"); + assert!(validate_capability(&cap).is_err()); + } + #[test] fn test_capabilities_hash_order_independent() { let caps1 = vec![ diff --git a/crates/agentpin/src/types/discovery.rs b/crates/agentpin/src/types/discovery.rs index f19df32..e528fd5 100644 --- a/crates/agentpin/src/types/discovery.rs +++ b/crates/agentpin/src/types/discovery.rs @@ -4,6 +4,96 @@ use super::capability::Capability; use super::constraint::Constraints; use crate::jwk::Jwk; +/// List of domains an agent is permitted to interact with (v0.3.0). +/// +/// Extracted from [`Constraints::allowed_domains`] for use by cross-protocol +/// A2A verification — most notably SchemaPin v1.4's `A2aVerificationContext`, +/// which scopes tool verification to the intersection of caller and provider +/// domains. +/// +/// Convention: an empty list means *no restriction* (all domains trusted). +/// A non-empty list means the agent is restricted to exactly those domains. +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(transparent)] +pub struct AllowedDomains(pub Vec); + +impl AllowedDomains { + /// Construct an empty list (no restriction — all domains trusted). + pub fn unrestricted() -> Self { + Self(Vec::new()) + } + + /// Construct from any iterable of strings. + pub fn from_domains(iter: I) -> Self + where + I: IntoIterator, + S: Into, + { + Self(iter.into_iter().map(Into::into).collect()) + } + + /// `true` when the list is empty (no restriction). + pub fn is_unrestricted(&self) -> bool { + self.0.is_empty() + } + + /// `true` when `domain` is allowed under this list. + /// An empty list trusts all domains. + pub fn allows(&self, domain: &str) -> bool { + self.is_unrestricted() || self.0.iter().any(|d| d == domain) + } + + /// Intersection of two allow-lists. + /// + /// Following the convention that empty = unrestricted: + /// - `unrestricted ∩ X = X` + /// - `X ∩ unrestricted = X` + /// - `[a,b] ∩ [b,c] = [b]` + pub fn intersect(&self, other: &Self) -> Self { + if self.is_unrestricted() { + return other.clone(); + } + if other.is_unrestricted() { + return self.clone(); + } + let inter: Vec = self + .0 + .iter() + .filter(|d| other.0.contains(d)) + .cloned() + .collect(); + Self(inter) + } + + /// Borrow the inner vector. + pub fn as_slice(&self) -> &[String] { + &self.0 + } +} + +impl std::iter::FromIterator for AllowedDomains +where + S: Into, +{ + fn from_iter>(iter: I) -> Self { + Self(iter.into_iter().map(Into::into).collect()) + } +} + +impl Constraints { + /// Extract [`AllowedDomains`] from the constraints' `allowed_domains` field. + /// + /// Returns [`AllowedDomains::unrestricted`] when the field is absent — the + /// "no allow-list specified" case is treated as "no restriction" so the + /// intersection helper composes correctly with cross-protocol callers. + pub fn allowed_domains_typed(&self) -> AllowedDomains { + match &self.allowed_domains { + Some(list) => AllowedDomains(list.clone()), + None => AllowedDomains::unrestricted(), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct DiscoveryDocument { pub agentpin_version: String, @@ -17,6 +107,10 @@ pub struct DiscoveryDocument { pub policy_url: Option, #[serde(skip_serializing_if = "Option::is_none")] pub schemapin_endpoint: Option, + /// Optional v0.3.0: URL of the entity's A2A AgentCard endpoint + /// (`.well-known/agent-card.json`), enabling cross-protocol discovery. + #[serde(skip_serializing_if = "Option::is_none")] + pub a2a_endpoint: Option, pub max_delegation_depth: u8, pub updated_at: String, } @@ -91,6 +185,7 @@ mod tests { ), policy_url: None, schemapin_endpoint: None, + a2a_endpoint: None, max_delegation_depth: 2, updated_at: "2026-01-15T00:00:00Z".to_string(), }; @@ -127,4 +222,86 @@ mod tests { "\"suspended\"" ); } + + // ── v0.3.0: AllowedDomains tests ───────────────────────────────── + + #[test] + fn allowed_domains_unrestricted_accepts_anything() { + let ad = AllowedDomains::unrestricted(); + assert!(ad.is_unrestricted()); + assert!(ad.allows("anything.com")); + assert!(ad.allows("example.com")); + } + + #[test] + fn allowed_domains_restricted_filters() { + let ad = AllowedDomains::from_domains(["a.com", "b.com"]); + assert!(!ad.is_unrestricted()); + assert!(ad.allows("a.com")); + assert!(ad.allows("b.com")); + assert!(!ad.allows("c.com")); + } + + #[test] + fn allowed_domains_intersect_with_unrestricted_returns_other() { + let unrestricted = AllowedDomains::unrestricted(); + let restricted = AllowedDomains::from_domains(["a.com", "b.com"]); + assert_eq!( + unrestricted.intersect(&restricted).as_slice(), + restricted.as_slice() + ); + assert_eq!( + restricted.intersect(&unrestricted).as_slice(), + restricted.as_slice() + ); + } + + #[test] + fn allowed_domains_intersect_returns_overlap() { + let lhs = AllowedDomains::from_domains(["a.com", "b.com", "c.com"]); + let rhs = AllowedDomains::from_domains(["b.com", "c.com", "d.com"]); + assert_eq!( + lhs.intersect(&rhs).as_slice(), + &["b.com".to_string(), "c.com".to_string()] + ); + } + + #[test] + fn allowed_domains_intersect_no_overlap_yields_empty() { + let lhs = AllowedDomains::from_domains(["a.com"]); + let rhs = AllowedDomains::from_domains(["b.com"]); + let inter = lhs.intersect(&rhs); + // empty == unrestricted under our convention; documented in the type + assert!(inter.is_unrestricted()); + } + + #[test] + fn allowed_domains_serializes_transparently() { + let ad = AllowedDomains::from_domains(["a.com", "b.com"]); + let json = serde_json::to_string(&ad).unwrap(); + assert_eq!(json, "[\"a.com\",\"b.com\"]"); + let back: AllowedDomains = serde_json::from_str(&json).unwrap(); + assert_eq!(back.as_slice(), ad.as_slice()); + } + + #[test] + fn constraints_allowed_domains_typed_extracts_or_defaults() { + let with_list = Constraints { + allowed_domains: Some(vec!["a.com".to_string()]), + ..Default::default() + }; + assert_eq!( + with_list.allowed_domains_typed().as_slice(), + &["a.com".to_string()] + ); + + let without = Constraints::default(); + assert!(without.allowed_domains_typed().is_unrestricted()); + } + + #[test] + fn allowed_domains_collects_via_from_iterator() { + let ad: AllowedDomains = ["a.com", "b.com"].iter().copied().collect(); + assert_eq!(ad.as_slice(), &["a.com".to_string(), "b.com".to_string()]); + } } diff --git a/crates/agentpin/src/types/mod.rs b/crates/agentpin/src/types/mod.rs index 6f8e426..b396ca0 100644 --- a/crates/agentpin/src/types/mod.rs +++ b/crates/agentpin/src/types/mod.rs @@ -1,3 +1,4 @@ +pub mod a2a; pub mod bundle; pub mod capability; pub mod constraint; diff --git a/crates/agentpin/src/verification.rs b/crates/agentpin/src/verification.rs index ddd92d3..4f2584b 100644 --- a/crates/agentpin/src/verification.rs +++ b/crates/agentpin/src/verification.rs @@ -21,6 +21,8 @@ pub struct VerifierConfig { pub clock_skew_secs: i64, /// Maximum credential lifetime in seconds (default: 86400) pub max_ttl_secs: i64, + /// When true, capabilities are validated against the taxonomy (default: false) + pub strict_capabilities: bool, } impl Default for VerifierConfig { @@ -28,6 +30,7 @@ impl Default for VerifierConfig { Self { clock_skew_secs: 60, max_ttl_secs: 86400, + strict_capabilities: false, } } } diff --git a/crates/agentpin/tests/integration.rs b/crates/agentpin/tests/integration.rs new file mode 100644 index 0000000..a2845bc --- /dev/null +++ b/crates/agentpin/tests/integration.rs @@ -0,0 +1,306 @@ +//! End-to-end integration tests for AgentPin. + +use agentpin::credential::issue_credential; +use agentpin::crypto::{generate_key_id, generate_key_pair, load_signing_key, load_verifying_key}; +use agentpin::discovery::{find_agent_by_id, find_key_by_kid, validate_discovery_document}; +use agentpin::jwk::pem_to_jwk; +use agentpin::jwt::{decode_jwt_unverified, verify_jwt}; +use agentpin::mutual::{create_challenge, create_response, verify_response_with_nonce_store}; +use agentpin::nonce::InMemoryNonceStore; +use agentpin::pinning::{check_pinning, KeyPinStore, PinningResult}; +use agentpin::resolver::TrustBundleResolver; +use agentpin::revocation::{add_revoked_key, build_revocation_document, check_revocation}; +use agentpin::rotation::{apply_rotation, complete_rotation, prepare_rotation}; +use agentpin::transport; +use agentpin::types::bundle::TrustBundle; +use agentpin::types::capability::Capability; +use agentpin::types::discovery::*; +use agentpin::types::revocation::RevocationReason; +use agentpin::verification::{ + verify_credential_offline, verify_credential_with_resolver, VerifierConfig, +}; + +fn make_test_setup() -> (String, String, String, String, DiscoveryDocument) { + let kp = generate_key_pair().unwrap(); + let kid = generate_key_id(&kp.public_key_pem).unwrap(); + let jwk = pem_to_jwk(&kp.public_key_pem, &kid).unwrap(); + let doc = DiscoveryDocument { + agentpin_version: "0.1".to_string(), + entity: "example.com".to_string(), + entity_type: EntityType::Maker, + public_keys: vec![jwk], + agents: vec![AgentDeclaration { + agent_id: "urn:agentpin:example.com:test-agent".to_string(), + agent_type: None, + name: "Test Agent".to_string(), + description: None, + version: None, + capabilities: vec![Capability::from("read:*"), Capability::from("write:report")], + constraints: None, + maker_attestation: None, + credential_ttl_max: Some(3600), + status: AgentStatus::Active, + directory_listing: None, + }], + revocation_endpoint: None, + policy_url: None, + schemapin_endpoint: None, + a2a_endpoint: None, + max_delegation_depth: 2, + updated_at: "2026-01-01T00:00:00Z".to_string(), + }; + ( + kp.private_key_pem, + kp.public_key_pem, + kid, + "urn:agentpin:example.com:test-agent".to_string(), + doc, + ) +} + +#[test] +fn test_maker_deployer_flow() { + let (private_pem, public_pem, kid, agent_id, doc) = make_test_setup(); + + // Validate the discovery document + validate_discovery_document(&doc, "example.com").unwrap(); + assert!(find_key_by_kid(&doc, &kid).is_some()); + assert!(find_agent_by_id(&doc, &agent_id).is_some()); + + // Issue a credential + let sk = load_signing_key(&private_pem).unwrap(); + let jwt_str = issue_credential( + &sk, + &kid, + "example.com", + &agent_id, + Some("verifier.com"), + vec![ + Capability::from("read:data"), + Capability::from("write:report"), + ], + None, + None, + 3600, + ) + .unwrap(); + + // Decode unverified to inspect + let (header, payload, _sig) = decode_jwt_unverified(&jwt_str).unwrap(); + assert_eq!(header.alg, "ES256"); + assert_eq!(header.typ, "agentpin-credential+jwt"); + assert_eq!(header.kid, kid); + assert_eq!(payload.iss, "example.com"); + assert_eq!(payload.sub, agent_id); + + // Verify signature + let vk = load_verifying_key(&public_pem).unwrap(); + let (verified_header, verified_payload) = verify_jwt(&jwt_str, &vk).unwrap(); + assert_eq!(verified_header.kid, kid); + assert_eq!(verified_payload.iss, "example.com"); + + // Full offline verification via TrustBundleResolver + let revocation = build_revocation_document("example.com"); + let bundle = TrustBundle { + agentpin_bundle_version: "0.1".to_string(), + created_at: "2026-01-01T00:00:00Z".to_string(), + documents: vec![doc.clone()], + revocations: vec![revocation], + }; + let resolver = TrustBundleResolver::new(&bundle); + let mut pin_store = KeyPinStore::new(); + let config = VerifierConfig::default(); + + let result = verify_credential_with_resolver( + &jwt_str, + &resolver, + &mut pin_store, + Some("verifier.com"), + &config, + ); + assert!(result.valid, "Expected valid, got: {:?}", result); + assert_eq!(result.agent_id, Some(agent_id)); + assert_eq!(result.issuer, Some("example.com".to_string())); +} + +#[test] +fn test_revocation_flow() { + let (private_pem, _public_pem, kid, agent_id, doc) = make_test_setup(); + + let sk = load_signing_key(&private_pem).unwrap(); + let jwt_str = issue_credential( + &sk, + &kid, + "example.com", + &agent_id, + None, + vec![Capability::from("read:data")], + None, + None, + 3600, + ) + .unwrap(); + + // Parse JWT to get the jti + let (_header, payload, _sig) = decode_jwt_unverified(&jwt_str).unwrap(); + + // Clean revocation: should pass + let mut rev_doc = build_revocation_document("example.com"); + check_revocation(&rev_doc, &payload.jti, &agent_id, &kid).unwrap(); + + // Add revoked key + add_revoked_key(&mut rev_doc, &kid, RevocationReason::KeyCompromise); + + // Now check_revocation should fail + let result = check_revocation(&rev_doc, &payload.jti, &agent_id, &kid); + assert!(result.is_err(), "Expected revocation check to fail"); + + // Full offline verification should also fail + let mut pin_store = KeyPinStore::new(); + let config = VerifierConfig::default(); + let vresult = verify_credential_offline( + &jwt_str, + &doc, + Some(&rev_doc), + &mut pin_store, + None, + &config, + ); + assert!(!vresult.valid); +} + +#[test] +fn test_mutual_verification_with_nonce_store() { + let kp = generate_key_pair().unwrap(); + let sk = load_signing_key(&kp.private_key_pem).unwrap(); + let vk = load_verifying_key(&kp.public_key_pem).unwrap(); + + let store = InMemoryNonceStore::new(); + let challenge = create_challenge(None); + let response = create_response(&challenge, &sk, "test-key"); + + // First verification should succeed + let valid = verify_response_with_nonce_store(&response, &challenge, &vk, Some(&store)).unwrap(); + assert!(valid); + + // Second verification with the same nonce should fail (replay) + let result = verify_response_with_nonce_store(&response, &challenge, &vk, Some(&store)); + assert!(result.is_err(), "Replayed nonce should be rejected"); +} + +#[test] +fn test_transport_roundtrip() { + let kp = generate_key_pair().unwrap(); + let sk = load_signing_key(&kp.private_key_pem).unwrap(); + let kid = generate_key_id(&kp.public_key_pem).unwrap(); + + let jwt_str = issue_credential( + &sk, + &kid, + "example.com", + "urn:agentpin:example.com:test-agent", + None, + vec![Capability::from("read:data")], + None, + None, + 3600, + ) + .unwrap(); + + // HTTP roundtrip + let http_header = transport::http::format_authorization_header(&jwt_str); + let http_extracted = transport::http::extract_credential(&http_header).unwrap(); + assert_eq!(http_extracted, jwt_str); + + // MCP roundtrip + let mcp_meta = transport::mcp::format_meta_field(&jwt_str); + let mcp_extracted = transport::mcp::extract_credential(&mcp_meta).unwrap(); + assert_eq!(mcp_extracted, jwt_str); + + // WebSocket roundtrip + let ws_msg = transport::websocket::format_auth_message(&jwt_str); + let ws_extracted = transport::websocket::extract_credential(&ws_msg).unwrap(); + assert_eq!(ws_extracted, jwt_str); + + // gRPC roundtrip + let grpc_val = transport::grpc::format_metadata_value(&jwt_str); + let grpc_extracted = transport::grpc::extract_credential(&grpc_val).unwrap(); + assert_eq!(grpc_extracted, jwt_str); +} + +#[test] +fn test_key_rotation_lifecycle() { + let kp = generate_key_pair().unwrap(); + let old_kid = generate_key_id(&kp.public_key_pem).unwrap(); + let old_jwk = pem_to_jwk(&kp.public_key_pem, &old_kid).unwrap(); + + let mut doc = DiscoveryDocument { + agentpin_version: "0.1".to_string(), + entity: "example.com".to_string(), + entity_type: EntityType::Maker, + public_keys: vec![old_jwk], + agents: vec![], + revocation_endpoint: None, + policy_url: None, + schemapin_endpoint: None, + a2a_endpoint: None, + max_delegation_depth: 2, + updated_at: "2026-01-01T00:00:00Z".to_string(), + }; + + assert_eq!(doc.public_keys.len(), 1); + + // Prepare rotation + let plan = prepare_rotation(&old_kid).unwrap(); + assert_ne!(plan.new_kid, old_kid); + + // Apply rotation: both keys should be present + apply_rotation(&mut doc, &plan).unwrap(); + assert_eq!(doc.public_keys.len(), 2); + assert!(doc.public_keys.iter().any(|k| k.kid == old_kid)); + assert!(doc.public_keys.iter().any(|k| k.kid == plan.new_kid)); + + // Complete rotation: old key removed, added to revocation + let mut rev_doc = build_revocation_document("example.com"); + complete_rotation( + &mut doc, + &mut rev_doc, + &old_kid, + RevocationReason::Superseded, + ) + .unwrap(); + + assert_eq!(doc.public_keys.len(), 1); + assert_eq!(doc.public_keys[0].kid, plan.new_kid); + assert_eq!(rev_doc.revoked_keys.len(), 1); + assert_eq!(rev_doc.revoked_keys[0].kid, old_kid); + assert_eq!(rev_doc.revoked_keys[0].reason, RevocationReason::Superseded); +} + +#[test] +fn test_pinning_flow() { + let kp1 = generate_key_pair().unwrap(); + let kid1 = generate_key_id(&kp1.public_key_pem).unwrap(); + let jwk1 = pem_to_jwk(&kp1.public_key_pem, &kid1).unwrap(); + + let mut store = KeyPinStore::new(); + + // First verification pins the key + let result1 = check_pinning(&mut store, "example.com", &jwk1).unwrap(); + assert_eq!(result1, PinningResult::FirstUse); + + // Same key succeeds + let result2 = check_pinning(&mut store, "example.com", &jwk1).unwrap(); + assert_eq!(result2, PinningResult::Matched); + + // Different key triggers error + let kp2 = generate_key_pair().unwrap(); + let kid2 = generate_key_id(&kp2.public_key_pem).unwrap(); + let jwk2 = pem_to_jwk(&kp2.public_key_pem, &kid2).unwrap(); + + let result3 = check_pinning(&mut store, "example.com", &jwk2); + assert!( + result3.is_err(), + "Different key should trigger pinning error" + ); +} diff --git a/examples/axum_middleware.rs b/examples/axum_middleware.rs new file mode 100644 index 0000000..a1f6e44 --- /dev/null +++ b/examples/axum_middleware.rs @@ -0,0 +1,67 @@ +//! Reference: AgentPin credential extraction as an Axum extractor. +//! +//! This example shows how to integrate AgentPin verification into an Axum HTTP server. +//! It is not a published crate — copy and adapt for your own server. +//! +//! Usage: +//! cargo run --example axum_middleware --features fetch +//! +//! Dependencies needed in your Cargo.toml: +//! axum = "0.7" +//! tokio = { version = "1", features = ["full"] } +//! agentpin = { version = "0.2", features = ["fetch"] } + +use axum::{ + async_trait, + extract::FromRequestParts, + http::{request::Parts, StatusCode}, + response::IntoResponse, + routing::get, + Router, +}; + +/// Extractor that pulls an AgentPin credential from the Authorization header. +pub struct AgentPinCredential(pub String); + +#[async_trait] +impl FromRequestParts for AgentPinCredential { + type Rejection = (StatusCode, String); + + async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result { + let header = parts + .headers + .get("authorization") + .and_then(|v| v.to_str().ok()) + .ok_or((StatusCode::UNAUTHORIZED, "Missing Authorization header".to_string()))?; + + let jwt = agentpin::transport::http::extract_credential(header) + .map_err(|e| (StatusCode::UNAUTHORIZED, e.to_string()))?; + + // In production, you would verify the JWT here: + // let resolver = agentpin::resolver::ChainResolver::new(vec![...]); + // let result = agentpin::verification::verify_credential(&jwt, &resolver, &config).await?; + + Ok(AgentPinCredential(jwt)) + } +} + +async fn protected_handler(AgentPinCredential(jwt): AgentPinCredential) -> impl IntoResponse { + format!("Authenticated with credential: {}...", &jwt[..20.min(jwt.len())]) +} + +async fn health() -> &'static str { + "ok" +} + +#[tokio::main] +async fn main() { + let app = Router::new() + .route("/protected", get(protected_handler)) + .route("/health", get(health)); + + let listener = tokio::net::TcpListener::bind("127.0.0.1:3000") + .await + .unwrap(); + println!("Listening on http://127.0.0.1:3000"); + axum::serve(listener, app).await.unwrap(); +} diff --git a/examples/express_middleware.js b/examples/express_middleware.js new file mode 100644 index 0000000..6d156db --- /dev/null +++ b/examples/express_middleware.js @@ -0,0 +1,52 @@ +/** + * Reference: AgentPin credential extraction as Express middleware. + * + * Usage: + * npm install express agentpin + * node examples/express_middleware.js + * + * This is example code — copy and adapt for your own server. + */ + +import express from 'express'; +import { httpExtractCredential } from 'agentpin'; + +/** + * Express middleware that extracts an AgentPin credential from the + * Authorization header and attaches it to req.agentpinCredential. + */ +function agentpinAuth(req, res, next) { + const auth = req.headers.authorization; + if (!auth) { + return res.status(401).json({ error: 'Missing Authorization header' }); + } + + try { + req.agentpinCredential = httpExtractCredential(auth); + } catch (err) { + return res.status(401).json({ error: err.message }); + } + + // In production, verify the credential here: + // import { verifyCredentialOffline } from 'agentpin'; + // const result = verifyCredentialOffline(jwt, discoveryDoc, ...); + // if (!result.valid) return res.status(403).json({ error: result.error }); + + next(); +} + +const app = express(); + +app.get('/protected', agentpinAuth, (req, res) => { + const jwt = req.agentpinCredential; + res.json({ message: `Authenticated with credential: ${jwt.slice(0, 20)}...` }); +}); + +app.get('/health', (_req, res) => { + res.json({ status: 'ok' }); +}); + +const port = process.env.PORT || 3000; +app.listen(port, () => { + console.log(`Listening on http://127.0.0.1:${port}`); +}); diff --git a/examples/fastapi_middleware.py b/examples/fastapi_middleware.py new file mode 100644 index 0000000..279e9c7 --- /dev/null +++ b/examples/fastapi_middleware.py @@ -0,0 +1,43 @@ +"""Reference: AgentPin credential extraction as a FastAPI dependency. + +Usage: + pip install fastapi uvicorn agentpin + uvicorn examples.fastapi_middleware:app --reload + +This is example code — copy and adapt for your own server. +""" + +from fastapi import Depends, FastAPI, HTTPException, Request + +from agentpin.transport import http_extract_credential + +app = FastAPI(title="AgentPin Example Server") + + +async def get_agentpin_credential(request: Request) -> str: + """FastAPI dependency that extracts and returns the AgentPin JWT.""" + auth = request.headers.get("authorization") + if not auth: + raise HTTPException(status_code=401, detail="Missing Authorization header") + try: + jwt = http_extract_credential(auth) + except Exception as e: + raise HTTPException(status_code=401, detail=str(e)) + + # In production, verify the credential here: + # from agentpin import verify_credential_offline + # result = verify_credential_offline(jwt, discovery_doc, ...) + # if not result["valid"]: + # raise HTTPException(status_code=403, detail=result["error"]) + + return jwt + + +@app.get("/protected") +async def protected_route(credential: str = Depends(get_agentpin_credential)): + return {"message": f"Authenticated with credential: {credential[:20]}..."} + + +@app.get("/health") +async def health(): + return {"status": "ok"} diff --git a/go/README.md b/go/README.md new file mode 100644 index 0000000..af868e9 --- /dev/null +++ b/go/README.md @@ -0,0 +1,180 @@ +# AgentPin Go SDK + +[![Go Reference](https://pkg.go.dev/badge/github.com/ThirdKeyAi/agentpin/go.svg)](https://pkg.go.dev/github.com/ThirdKeyAi/agentpin/go) + +Go implementation of the AgentPin domain-anchored cryptographic identity +protocol for AI agents. Wire-compatible with the +[Rust](../crates/agentpin), [JavaScript](../javascript), and +[Python](../python) SDKs. + +Part of the ThirdKey trust stack: [SchemaPin](https://schemapin.org) → +**AgentPin** → [Symbiont](https://symbiont.dev). + +## Install + +```bash +# Library +go get github.com/ThirdKeyAi/agentpin/go + +# CLI +go install github.com/ThirdKeyAi/agentpin/go/cmd/agentpin@latest +``` + +Requires Go 1.21+. + +## Quick start + +```go +package main + +import ( + "fmt" + "log" + + "github.com/ThirdKeyAi/agentpin/go/pkg/credential" + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/discovery" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/pinning" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" + "github.com/ThirdKeyAi/agentpin/go/pkg/verification" +) + +func main() { + // 1. Generate a keypair. + kp, err := crypto.GenerateKeyPair() + if err != nil { + log.Fatal(err) + } + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + + // 2. Build a discovery document. + disc := discovery.BuildDiscoveryDocument( + "example.com", + types.EntityMaker, + []types.JWK{jwk.VerifyingKeyToJWK(pub, "example-2026-01")}, + []types.AgentDeclaration{ + { + AgentID: "urn:agentpin:example.com:my-agent", + Name: "My Agent", + Capabilities: []types.Capability{"read:data"}, + Status: types.AgentActive, + }, + }, + 2, // max_delegation_depth + "2026-01-15T00:00:00Z", + ) + + // 3. Issue a credential. + cred, err := credential.IssueCredential( + priv, "example-2026-01", + "example.com", "urn:agentpin:example.com:my-agent", + "verifier.com", + []types.Capability{"read:data"}, + nil, nil, + 3600, // ttl_secs + ) + if err != nil { + log.Fatal(err) + } + fmt.Println("credential:", cred[:60], "...") + + // 4. Verify it offline. + pinStore := pinning.NewKeyPinStore() + result := verification.VerifyCredentialOffline( + cred, &disc, nil, pinStore, + "verifier.com", + verification.DefaultVerifierConfig(), + ) + if !result.Valid { + log.Fatalf("verify failed: %s", result.ErrorMessage) + } + fmt.Println("verified agent:", result.AgentID) +} +``` + +## CLI + +The `agentpin` CLI mirrors the Rust binary: + +```bash +agentpin keygen --domain example.com --kid example-2026-01 --output-dir ./keys +agentpin issue \ + --private-key ./keys/example-2026-01.private.pem \ + --kid example-2026-01 \ + --issuer example.com \ + --agent-id urn:agentpin:example.com:my-agent \ + --capabilities read:data,write:report \ + --ttl 3600 +agentpin verify \ + --credential \ + --discovery ./discovery.json \ + --offline +agentpin bundle \ + --discovery ./d1.json --discovery ./d2.json \ + --output trust-bundle.json +``` + +## Language API reference + +| Function (Go) | Rust equivalent | Purpose | +|---------------------------------------------------------|------------------------------------------------|--------------------------------------| +| `crypto.GenerateKeyPair` | `agentpin::crypto::generate_key_pair` | New ECDSA P-256 keypair (PEM) | +| `crypto.SignData` / `VerifySignature` | `sign_data` / `verify_signature` | DER-signature over arbitrary bytes | +| `crypto.GenerateKeyID` | `generate_key_id` | SHA-256 hex of SPKI DER | +| `jwk.PEMToJWK` / `JWKToPEM` | `jwk::pem_to_jwk` / `jwk_to_pem` | PEM ↔ JWK conversion | +| `jwk.JWKThumbprint` | `jwk_thumbprint` | RFC 7638 thumbprint | +| `jwt.EncodeJWT` / `VerifyJWT` / `DecodeJWTUnverified` | `jwt::encode_jwt` / `verify_jwt` | ES256-only JWT (rejects all else) | +| `discovery.BuildDiscoveryDocument` | `discovery::build_discovery_document` | Build `.well-known/agent-identity` | +| `discovery.FetchDiscoveryDocument` | `discovery::fetch_discovery_document` (fetch) | HTTPS fetch (no redirects) | +| `credential.IssueCredential` | `credential::issue_credential` | Issue agent credential JWT | +| `verification.VerifyCredentialOffline` | `verification::verify_credential_offline` | 12-step offline verifier | +| `verification.VerifyCredentialWithResolver` | `verification::verify_credential_with_resolver`| Verify via DiscoveryResolver | +| `revocation.BuildRevocationDocument` / `CheckRevocation`| `revocation::*` | Build / query revocations | +| `pinning.KeyPinStore` | `pinning::KeyPinStore` | TOFU key pin store | +| `delegation.CreateAttestation` / `VerifyAttestation` | `delegation::*` | Maker→deployer attestation chain | +| `mutual.CreateChallenge` / `VerifyResponse` | `mutual::*` | 128-bit nonce challenge / response | +| `nonce.InMemoryStore` | `nonce::InMemoryNonceStore` | Replay-protection nonce store | +| `bundle.NewTrustBundle` | `types::bundle::TrustBundle::new` | Offline trust-bundle builder | +| `resolver.{WellKnown,LocalFile,TrustBundle,Chain}Resolver` | `resolver::*` | Pluggable discovery resolution | + +## Security guarantees + +- **ES256 only.** `jwt.DecodeJWTUnverified` rejects every algorithm except + `ES256` and every typ except `agentpin-credential+jwt` *before* any + signature work. There is no third-party JWT dependency with permissive + `alg` defaults — we use `crypto/ecdsa` directly. +- **Wire compatibility.** Discovery documents, credentials, revocation lists, + and trust bundles round-trip byte-identically across the Rust, JavaScript, + Python, and Go SDKs. This is asserted by cross-language interop tests in + `pkg/verification/cross_language_test.go`. +- **TOFU key pinning.** Verification fail-closes on key change unless the + caller explicitly trusts the rotation via `KeyPinStore.AddKey`. +- **Fail-closed revocation.** When a resolver returns an error fetching a + revocation document, verification rejects the credential rather than + proceeding without revocation data. + +## Development + +```bash +cd go +go test ./... # all packages green +go vet ./... # no findings +gofmt -l . # must be empty +``` + +To regenerate cross-language test fixtures from the Rust CLI: + +```bash +cargo build -p agentpin-cli --release +target/release/agentpin keygen \ + --domain example.com --kid example-2026-01 \ + --output-dir go/pkg/verification/testdata --format both +# Then update go/pkg/verification/testdata/discovery.json with the new JWK +# and reissue go/pkg/verification/testdata/credential.jwt accordingly. +``` + +## License + +MIT — see [LICENSE](../LICENSE). diff --git a/go/cmd/agentpin/bundle.go b/go/cmd/agentpin/bundle.go new file mode 100644 index 0000000..3c4be12 --- /dev/null +++ b/go/cmd/agentpin/bundle.go @@ -0,0 +1,73 @@ +package main + +import ( + "encoding/json" + "flag" + "fmt" + "os" + "strings" + "time" + + "github.com/ThirdKeyAi/agentpin/go/pkg/bundle" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// stringSlice is a flag.Value for --discovery / --revocation that can be +// repeated. +type stringSlice []string + +func (s *stringSlice) String() string { return strings.Join(*s, ",") } +func (s *stringSlice) Set(v string) error { *s = append(*s, v); return nil } + +func runBundle(args []string) error { + fs := flag.NewFlagSet("bundle", flag.ContinueOnError) + var discFiles stringSlice + var revFiles stringSlice + fs.Var(&discFiles, "discovery", "Path to a discovery document JSON file (repeatable)") + fs.Var(&revFiles, "revocation", "Path to a revocation document JSON file (repeatable)") + output := fs.String("output", "", "Output path (defaults to stdout)") + if err := fs.Parse(args); err != nil { + return err + } + if len(discFiles) == 0 { + return fmt.Errorf("at least one --discovery file is required") + } + + b := bundle.NewTrustBundle(time.Now().UTC().Format(time.RFC3339)) + for _, p := range discFiles { + data, err := os.ReadFile(p) + if err != nil { + return err + } + var d types.DiscoveryDocument + if err := json.Unmarshal(data, &d); err != nil { + return fmt.Errorf("invalid discovery document %s: %w", p, err) + } + b.Documents = append(b.Documents, d) + } + for _, p := range revFiles { + data, err := os.ReadFile(p) + if err != nil { + return err + } + var r types.RevocationDocument + if err := json.Unmarshal(data, &r); err != nil { + return fmt.Errorf("invalid revocation document %s: %w", p, err) + } + b.Revocations = append(b.Revocations, r) + } + + out, err := json.MarshalIndent(b, "", " ") + if err != nil { + return err + } + if *output != "" { + if err := os.WriteFile(*output, out, 0o644); err != nil { + return err + } + fmt.Fprintln(os.Stderr, "Trust bundle written to", *output) + } else { + fmt.Println(string(out)) + } + return nil +} diff --git a/go/cmd/agentpin/issue.go b/go/cmd/agentpin/issue.go new file mode 100644 index 0000000..f212c5f --- /dev/null +++ b/go/cmd/agentpin/issue.go @@ -0,0 +1,86 @@ +package main + +import ( + "encoding/json" + "flag" + "fmt" + "os" + "strings" + + "github.com/ThirdKeyAi/agentpin/go/pkg/credential" + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func runIssue(args []string) error { + fs := flag.NewFlagSet("issue", flag.ContinueOnError) + privKeyPath := fs.String("private-key", "", "Path to private key PEM file (required)") + kid := fs.String("kid", "", "Key identifier (required)") + issuer := fs.String("issuer", "", "Issuer domain (required)") + agentID := fs.String("agent-id", "", "Agent URN (required)") + audience := fs.String("audience", "", "Audience domain (optional)") + caps := fs.String("capabilities", "", "Comma-separated capabilities (required)") + ttl := fs.Uint64("ttl", 3600, "Credential TTL in seconds") + delChainPath := fs.String("delegation-chain", "", "JSON file with delegation chain entries") + constraintsArg := fs.String("constraints", "", "JSON string or file with constraint overrides") + if err := fs.Parse(args); err != nil { + return err + } + if *privKeyPath == "" || *kid == "" || *issuer == "" || *agentID == "" || *caps == "" { + return fmt.Errorf("--private-key, --kid, --issuer, --agent-id, --capabilities are required") + } + + pemData, err := os.ReadFile(*privKeyPath) + if err != nil { + return err + } + priv, err := crypto.LoadPrivateKey(string(pemData)) + if err != nil { + return err + } + + capList := []types.Capability{} + for _, c := range strings.Split(*caps, ",") { + c = strings.TrimSpace(c) + if c == "" { + continue + } + capList = append(capList, types.Capability(c)) + } + + var constraints *types.Constraints + if *constraintsArg != "" { + var data []byte + if _, err := os.Stat(*constraintsArg); err == nil { + data, err = os.ReadFile(*constraintsArg) + if err != nil { + return err + } + } else { + data = []byte(*constraintsArg) + } + var c types.Constraints + if err := json.Unmarshal(data, &c); err != nil { + return fmt.Errorf("invalid constraints JSON: %w", err) + } + constraints = &c + } + + var chain []types.DelegationAttestation + if *delChainPath != "" { + data, err := os.ReadFile(*delChainPath) + if err != nil { + return err + } + if err := json.Unmarshal(data, &chain); err != nil { + return fmt.Errorf("invalid delegation chain JSON: %w", err) + } + } + + jwt, err := credential.IssueCredential(priv, *kid, *issuer, *agentID, *audience, capList, constraints, chain, *ttl) + if err != nil { + return err + } + fmt.Println(jwt) + return nil +} diff --git a/go/cmd/agentpin/keygen.go b/go/cmd/agentpin/keygen.go new file mode 100644 index 0000000..ae146ef --- /dev/null +++ b/go/cmd/agentpin/keygen.go @@ -0,0 +1,66 @@ +package main + +import ( + "encoding/json" + "flag" + "fmt" + "os" + "path/filepath" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" +) + +func runKeygen(args []string) error { + fs := flag.NewFlagSet("keygen", flag.ContinueOnError) + domain := fs.String("domain", "", "Domain this key is associated with (required)") + kid := fs.String("kid", "", "Key identifier, e.g. 'example-2026-01' (required)") + outputDir := fs.String("output-dir", ".", "Output directory for key files") + format := fs.String("format", "both", "Output format: jwk, pem, or both") + if err := fs.Parse(args); err != nil { + return err + } + if *domain == "" || *kid == "" { + return fmt.Errorf("--domain and --kid are required") + } + + kp, err := crypto.GenerateKeyPair() + if err != nil { + return err + } + if err := os.MkdirAll(*outputDir, 0o755); err != nil { + return err + } + + privPath := filepath.Join(*outputDir, *kid+".private.pem") + if err := os.WriteFile(privPath, []byte(kp.PrivateKeyPEM), 0o600); err != nil { + return err + } + fmt.Fprintln(os.Stderr, "Generated ECDSA P-256 keypair for domain", *domain, "(kid:", *kid+")") + fmt.Fprintln(os.Stderr, " Private key:", privPath) + + if *format == "pem" || *format == "both" { + pubPath := filepath.Join(*outputDir, *kid+".public.pem") + if err := os.WriteFile(pubPath, []byte(kp.PublicKeyPEM), 0o644); err != nil { + return err + } + fmt.Fprintln(os.Stderr, " Public key (PEM):", pubPath) + } + if *format == "jwk" || *format == "both" { + j, err := jwk.PEMToJWK(kp.PublicKeyPEM, *kid) + if err != nil { + return err + } + jb, err := json.MarshalIndent(j, "", " ") + if err != nil { + return err + } + jwkPath := filepath.Join(*outputDir, *kid+".public.jwk.json") + if err := os.WriteFile(jwkPath, jb, 0o644); err != nil { + return err + } + fmt.Fprintln(os.Stderr, " Public key (JWK):", jwkPath) + } + + return nil +} diff --git a/go/cmd/agentpin/main.go b/go/cmd/agentpin/main.go new file mode 100644 index 0000000..1888571 --- /dev/null +++ b/go/cmd/agentpin/main.go @@ -0,0 +1,61 @@ +// Command agentpin is the AgentPin CLI: keygen, issue, verify, bundle. +// +// It mirrors the Rust `agentpin` binary's subcommand surface to keep +// operator workflows portable across language stacks. +package main + +import ( + "fmt" + "os" +) + +func main() { + if len(os.Args) < 2 { + usage() + os.Exit(2) + } + cmd := os.Args[1] + args := os.Args[2:] + + var err error + switch cmd { + case "keygen": + err = runKeygen(args) + case "issue": + err = runIssue(args) + case "verify": + err = runVerify(args) + case "bundle": + err = runBundle(args) + case "-h", "--help", "help": + usage() + return + case "version", "--version", "-V": + runVersion() + return + default: + fmt.Fprintf(os.Stderr, "unknown subcommand: %s\n", cmd) + usage() + os.Exit(2) + } + if err != nil { + fmt.Fprintln(os.Stderr, "error:", err) + os.Exit(1) + } +} + +func usage() { + fmt.Fprintln(os.Stderr, `agentpin — AgentPin credential management CLI + +USAGE: + agentpin [OPTIONS] + +SUBCOMMANDS: + keygen Generate a new ECDSA P-256 keypair + issue Issue an agent credential (JWT) + verify Verify an agent credential + bundle Create a trust bundle from discovery and revocation documents + version Print the AgentPin Go SDK version + +Run 'agentpin --help' for subcommand help.`) +} diff --git a/go/cmd/agentpin/verify.go b/go/cmd/agentpin/verify.go new file mode 100644 index 0000000..beaa0de --- /dev/null +++ b/go/cmd/agentpin/verify.go @@ -0,0 +1,132 @@ +package main + +import ( + "encoding/json" + "flag" + "fmt" + "os" + "strings" + + "github.com/ThirdKeyAi/agentpin/go/pkg/pinning" + "github.com/ThirdKeyAi/agentpin/go/pkg/resolver" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" + "github.com/ThirdKeyAi/agentpin/go/pkg/verification" +) + +func runVerify(args []string) error { + fs := flag.NewFlagSet("verify", flag.ContinueOnError) + credentialArg := fs.String("credential", "", "JWT credential string or file path (required)") + discoveryPath := fs.String("discovery", "", "Path to discovery document JSON file (offline)") + revocationPath := fs.String("revocation", "", "Path to revocation document JSON file (offline)") + pinStorePath := fs.String("pin-store", "", "Path to pin store JSON file") + audience := fs.String("audience", "", "Verifier's audience domain") + offline := fs.Bool("offline", false, "Use offline-only verification") + trustBundlePath := fs.String("trust-bundle", "", "Trust bundle JSON file for resolver mode") + discoveryDir := fs.String("discovery-dir", "", "Directory of {domain}.json discovery files for resolver mode") + if err := fs.Parse(args); err != nil { + return err + } + if *credentialArg == "" { + return fmt.Errorf("--credential is required") + } + + cred := *credentialArg + if _, err := os.Stat(cred); err == nil { + data, err := os.ReadFile(cred) + if err != nil { + return err + } + cred = strings.TrimSpace(string(data)) + } + + pinStore := pinning.NewKeyPinStore() + if *pinStorePath != "" { + if data, err := os.ReadFile(*pinStorePath); err == nil { + if err := pinStore.LoadFromJSON(data); err != nil { + return fmt.Errorf("load pin store: %w", err) + } + } + } + cfg := verification.DefaultVerifierConfig() + + var result verification.Result + if *trustBundlePath != "" || *discoveryDir != "" { + r, err := buildResolver(*trustBundlePath, *discoveryDir) + if err != nil { + return err + } + result = verification.VerifyCredentialWithResolver(cred, r, pinStore, *audience, cfg) + } else if *offline || *discoveryPath != "" { + if *discoveryPath == "" { + return fmt.Errorf("--discovery is required for offline verification") + } + dData, err := os.ReadFile(*discoveryPath) + if err != nil { + return err + } + var disc types.DiscoveryDocument + if err := json.Unmarshal(dData, &disc); err != nil { + return err + } + var rev *types.RevocationDocument + if *revocationPath != "" { + rData, err := os.ReadFile(*revocationPath) + if err != nil { + return err + } + var r types.RevocationDocument + if err := json.Unmarshal(rData, &r); err != nil { + return err + } + rev = &r + } + result = verification.VerifyCredentialOffline(cred, &disc, rev, pinStore, *audience, cfg) + } else { + // Online verification via WellKnownResolver. + r := resolver.NewWellKnownResolver() + result = verification.VerifyCredentialWithResolver(cred, r, pinStore, *audience, cfg) + } + + out, err := json.MarshalIndent(result, "", " ") + if err != nil { + return err + } + fmt.Println(string(out)) + + if *pinStorePath != "" { + data, err := pinStore.MarshalJSON() + if err == nil { + _ = os.WriteFile(*pinStorePath, data, 0o644) + } + } + + if !result.Valid { + os.Exit(1) + } + return nil +} + +func buildResolver(trustBundlePath, discoveryDir string) (resolver.DiscoveryResolver, error) { + resolvers := []resolver.DiscoveryResolver{} + if trustBundlePath != "" { + data, err := os.ReadFile(trustBundlePath) + if err != nil { + return nil, err + } + r, err := resolver.TrustBundleResolverFromJSON(data) + if err != nil { + return nil, err + } + resolvers = append(resolvers, r) + } + if discoveryDir != "" { + resolvers = append(resolvers, resolver.NewLocalFileResolver(discoveryDir, "")) + } + if len(resolvers) == 0 { + return nil, fmt.Errorf("at least --trust-bundle or --discovery-dir must be provided") + } + if len(resolvers) == 1 { + return resolvers[0], nil + } + return resolver.NewChainResolver(resolvers), nil +} diff --git a/go/cmd/agentpin/version.go b/go/cmd/agentpin/version.go new file mode 100644 index 0000000..9f0ac34 --- /dev/null +++ b/go/cmd/agentpin/version.go @@ -0,0 +1,12 @@ +package main + +import ( + "fmt" + + "github.com/ThirdKeyAi/agentpin/go/internal/version" +) + +func runVersion() { + fmt.Printf("agentpin %s (protocol %s, bundle %s)\n", + version.Version, version.ProtocolVersion, version.BundleVersion) +} diff --git a/go/go.mod b/go/go.mod new file mode 100644 index 0000000..f8ca70d --- /dev/null +++ b/go/go.mod @@ -0,0 +1,5 @@ +module github.com/ThirdKeyAi/agentpin/go + +go 1.21 + +require github.com/google/uuid v1.6.0 diff --git a/go/go.sum b/go/go.sum new file mode 100644 index 0000000..7790d7c --- /dev/null +++ b/go/go.sum @@ -0,0 +1,2 @@ +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= diff --git a/go/internal/version/version.go b/go/internal/version/version.go new file mode 100644 index 0000000..4f6e497 --- /dev/null +++ b/go/internal/version/version.go @@ -0,0 +1,14 @@ +// Package version exposes the AgentPin Go SDK version, kept in sync with the +// Rust crate, JavaScript package, and Python package on the same release. +package version + +// Version is the current AgentPin Go SDK version. It must match the Rust +// crate, JavaScript package, and Python package versions; CI enforces this. +const Version = "0.3.0" + +// ProtocolVersion is the AgentPin protocol version embedded in discovery +// documents and credentials. +const ProtocolVersion = "0.1" + +// BundleVersion is the AgentPin trust bundle format version. +const BundleVersion = "0.1" diff --git a/go/pkg/a2a/a2a.go b/go/pkg/a2a/a2a.go new file mode 100644 index 0000000..b905a21 --- /dev/null +++ b/go/pkg/a2a/a2a.go @@ -0,0 +1,200 @@ +// Package a2a builds and verifies A2A AgentCards with the AgentPin +// cryptographic-identity extension (v0.3.0). +// +// Wire-compatible with the Rust, JavaScript, and Python ports: cards signed +// in any language verify cleanly in the others. Signing input is the +// canonical bytes of the AgentCard with the `agentpin` field cleared +// (sorted-key JSON, compact separators, no whitespace). +package a2a + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// BuildOptions controls optional fields when constructing an unsigned card. +type BuildOptions struct { + // Skills overrides the auto-generated skill list. When empty, capabilities + // in the declaration are mapped 1:1 to skills via + // types.CapabilityToSkill. + Skills []types.A2aAgentSkill + Streaming bool + PushNotifications bool +} + +// BuildUnsignedAgentCard maps an AgentPin AgentDeclaration to a minimal A2A +// AgentCard (no extension). Capabilities map 1:1 to skills; the +// allowed_domains constraint is copied into capabilities.allowed_domains. +func BuildUnsignedAgentCard(url string, declaration *types.AgentDeclaration, opts BuildOptions) types.A2aAgentCard { + var skills []types.A2aAgentSkill + if len(opts.Skills) > 0 { + skills = append(skills, opts.Skills...) + } else { + skills = make([]types.A2aAgentSkill, 0, len(declaration.Capabilities)) + for _, cap := range declaration.Capabilities { + skills = append(skills, types.CapabilityToSkill(cap)) + } + } + + allowed := types.AllowedDomainsHelper.FromConstraints(declaration.Constraints) + caps := types.A2aAgentCapabilities{ + Streaming: opts.Streaming, + PushNotifications: opts.PushNotifications, + } + if !types.AllowedDomainsHelper.IsUnrestricted(allowed) { + caps.AllowedDomains = allowed + } + + return types.A2aAgentCard{ + Name: declaration.Name, + Description: declaration.Description, + Version: declaration.Version, + URL: url, + Capabilities: caps, + Skills: skills, + } +} + +// SignAgentCard signs the canonical bytes of `unsigned` (with the extension +// cleared) using the PEM-encoded private key and writes the AgentpinExtension +// payload onto a copy of the card. +func SignAgentCard(unsigned types.A2aAgentCard, privateKeyPEM, kid, agentpinEndpoint string) (types.A2aAgentCard, error) { + if agentpinEndpoint == "" { + return types.A2aAgentCard{}, types.NewVerificationError( + types.ErrDiscoveryInvalid, + "SignAgentCard requires agentpin_endpoint", + ) + } + priv, err := crypto.LoadPrivateKey(privateKeyPEM) + if err != nil { + return types.A2aAgentCard{}, err + } + + // Build the canonical signing input from the unsigned card. + signing := unsigned + signing.Agentpin = nil + canonical, err := canonicalizeCard(signing) + if err != nil { + return types.A2aAgentCard{}, err + } + + signature, err := crypto.SignBytes(priv, canonical) + if err != nil { + return types.A2aAgentCard{}, err + } + + pubPEM, err := crypto.MarshalPublicKeyPEM(&priv.PublicKey) + if err != nil { + return types.A2aAgentCard{}, err + } + publicJWK, err := jwk.PEMToJWK(pubPEM, kid) + if err != nil { + return types.A2aAgentCard{}, err + } + + signed := unsigned + signed.Agentpin = &types.AgentpinExtension{ + AgentpinEndpoint: agentpinEndpoint, + PublicKeyJWK: publicJWK, + Signature: signature, + } + return signed, nil +} + +// BuildAndSignAgentCard is a one-shot helper combining BuildUnsignedAgentCard +// and SignAgentCard. +func BuildAndSignAgentCard( + url string, + declaration *types.AgentDeclaration, + privateKeyPEM, kid, agentpinEndpoint string, + opts BuildOptions, +) (types.A2aAgentCard, error) { + unsigned := BuildUnsignedAgentCard(url, declaration, opts) + return SignAgentCard(unsigned, privateKeyPEM, kid, agentpinEndpoint) +} + +// VerifyAgentpinExtension verifies the signature in card.Agentpin against the +// JWK embedded in the same extension. Returns nil on success or a +// VerificationError(ErrDiscoveryInvalid) on any failure (missing extension, +// malformed JWK, signature mismatch). +// +// This proves only that the card has not been tampered with relative to the +// key inside its own extension. The caller still has to verify the JWK +// chains back to a trusted AgentPin discovery document. +func VerifyAgentpinExtension(card *types.A2aAgentCard) error { + if card == nil || card.Agentpin == nil { + return types.NewVerificationError( + types.ErrDiscoveryInvalid, + "AgentCard has no agentpin extension", + ) + } + withoutExt := *card + withoutExt.Agentpin = nil + canonical, err := canonicalizeCard(withoutExt) + if err != nil { + return err + } + pubPEM, err := jwk.JWKToPEM(&card.Agentpin.PublicKeyJWK) + if err != nil { + return err + } + ok, err := crypto.VerifySignature(pubPEM, canonical, card.Agentpin.Signature) + if err != nil { + return err + } + if !ok { + return types.NewVerificationError( + types.ErrDiscoveryInvalid, + "A2A AgentCard signature did not verify against extension JWK", + ) + } + return nil +} + +// ExtensionKeyThumbprint returns the JWK thumbprint of the public key +// embedded in an AgentpinExtension. +func ExtensionKeyThumbprint(ext *types.AgentpinExtension) string { + return jwk.JWKThumbprint(&ext.PublicKeyJWK) +} + +// CanonicalizeForSigning produces the byte-identical canonical signing input +// for an AgentCard: sorted-key JSON, compact separators, "null"/empty fields +// dropped via the struct's omitempty annotations. Cross-language byte-equal +// to the Rust SDK's canonicalisation. +func CanonicalizeForSigning(value interface{}) ([]byte, error) { + return canonicalize(value) +} + +// canonicalizeCard re-serialises an AgentCard with sorted object keys so the +// signature input is byte-identical with the Rust, JS, and Python SDKs. +func canonicalizeCard(card types.A2aAgentCard) ([]byte, error) { + return canonicalize(card) +} + +// canonicalize re-marshals the value through map[string]interface{}, which +// Go's encoding/json sorts alphabetically by key — producing byte-identical +// output to the Rust SDK's BTreeMap-based canonicalisation. +func canonicalize(value interface{}) ([]byte, error) { + raw, err := json.Marshal(value) + if err != nil { + return nil, fmt.Errorf("marshal: %w", err) + } + var generic interface{} + if err := json.Unmarshal(raw, &generic); err != nil { + return nil, fmt.Errorf("unmarshal for canonicalisation: %w", err) + } + out, err := json.Marshal(generic) + if err != nil { + return nil, fmt.Errorf("re-marshal: %w", err) + } + return out, nil +} + +// ErrNoExtension is reported by helpers that require a signed card. Kept as a +// distinct sentinel for callers who prefer errors.Is to ErrorCode matching. +var ErrNoExtension = errors.New("AgentCard has no agentpin extension") diff --git a/go/pkg/a2a/a2a_test.go b/go/pkg/a2a/a2a_test.go new file mode 100644 index 0000000..1cb4b60 --- /dev/null +++ b/go/pkg/a2a/a2a_test.go @@ -0,0 +1,187 @@ +package a2a + +import ( + "encoding/json" + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func declaration(caps []string, allowed []string) *types.AgentDeclaration { + capabilities := make([]types.Capability, 0, len(caps)) + for _, c := range caps { + capabilities = append(capabilities, types.Capability(c)) + } + decl := &types.AgentDeclaration{ + AgentID: "urn:agentpin:example.com:test", + Name: "Test Agent", + Description: "test", + Version: "1.0.0", + Capabilities: capabilities, + Status: types.AgentActive, + } + if allowed != nil { + decl.Constraints = &types.Constraints{AllowedDomains: allowed} + } + return decl +} + +func TestCapabilityToSkillMapsStrings(t *testing.T) { + skill := types.CapabilityToSkill(types.Capability("read:customers/*")) + if skill.ID != "read:customers/*" || skill.Name != "read:customers/*" { + t.Fatalf("unexpected skill: %+v", skill) + } +} + +func TestBuildUnsignedAgentCardMapsCapabilities(t *testing.T) { + card := BuildUnsignedAgentCard("https://example.com/agent", + declaration([]string{"read:customers", "write:invoices"}, nil), + BuildOptions{}) + if len(card.Skills) != 2 { + t.Fatalf("want 2 skills, got %d", len(card.Skills)) + } + if card.Skills[0].ID != "read:customers" || card.Skills[1].ID != "write:invoices" { + t.Fatalf("skill IDs wrong: %+v", card.Skills) + } + if card.Agentpin != nil { + t.Fatalf("unsigned card should have no agentpin extension") + } +} + +func TestBuildUnsignedAgentCardMapsAllowedDomains(t *testing.T) { + card := BuildUnsignedAgentCard("https://example.com/agent", + declaration([]string{"read:*"}, []string{"a.com", "b.com"}), + BuildOptions{}) + if got, want := card.Capabilities.AllowedDomains, []string{"a.com", "b.com"}; !equalSlices(got, want) { + t.Fatalf("allowed_domains wrong: %v", got) + } +} + +func TestBuildUnsignedAgentCardOmitsAllowedDomainsWhenUnrestricted(t *testing.T) { + card := BuildUnsignedAgentCard("https://example.com/agent", + declaration([]string{"read:*"}, nil), BuildOptions{}) + if card.Capabilities.AllowedDomains != nil { + t.Fatalf("expected unrestricted allowed_domains, got %v", card.Capabilities.AllowedDomains) + } +} + +func TestSignRequiresAgentpinEndpoint(t *testing.T) { + kp, err := crypto.GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + unsigned := BuildUnsignedAgentCard("https://example.com/agent", declaration([]string{"read:*"}, nil), BuildOptions{}) + if _, err := SignAgentCard(unsigned, kp.PrivateKeyPEM, "kid-1", ""); err == nil { + t.Fatal("expected error when agentpin endpoint is empty") + } +} + +func TestSignedCardRoundTripsAndVerifies(t *testing.T) { + kp, err := crypto.GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + card, err := BuildAndSignAgentCard( + "https://example.com/agent", + declaration([]string{"read:customers", "write:invoices"}, []string{"partner.com"}), + kp.PrivateKeyPEM, "kid-1", + "https://example.com/.well-known/agent-identity.json", + BuildOptions{Streaming: true}, + ) + if err != nil { + t.Fatal(err) + } + if card.Agentpin == nil { + t.Fatal("expected signed card to have agentpin extension") + } + if err := VerifyAgentpinExtension(&card); err != nil { + t.Fatalf("verify failed: %v", err) + } + raw, err := json.Marshal(&card) + if err != nil { + t.Fatal(err) + } + var roundtrip types.A2aAgentCard + if err := json.Unmarshal(raw, &roundtrip); err != nil { + t.Fatal(err) + } + if err := VerifyAgentpinExtension(&roundtrip); err != nil { + t.Fatalf("verify after roundtrip failed: %v", err) + } +} + +func TestVerifyFailsWhenExtensionMissing(t *testing.T) { + card := BuildUnsignedAgentCard("https://example.com/agent", declaration([]string{"read:*"}, nil), BuildOptions{}) + if err := VerifyAgentpinExtension(&card); err == nil { + t.Fatal("expected error for card without extension") + } +} + +func TestVerifyFailsWhenCardTampered(t *testing.T) { + kp, err := crypto.GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + card, err := BuildAndSignAgentCard( + "https://example.com/agent", + declaration([]string{"read:customers"}, nil), + kp.PrivateKeyPEM, "kid-1", + "https://example.com/.well-known/agent-identity.json", + BuildOptions{}, + ) + if err != nil { + t.Fatal(err) + } + card.URL = "https://attacker.example/agent" + if err := VerifyAgentpinExtension(&card); err == nil { + t.Fatal("expected verify failure on tampered card") + } +} + +func TestExtensionKeyThumbprintMatchesJWKThumbprint(t *testing.T) { + kp, err := crypto.GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + card, err := BuildAndSignAgentCard( + "https://example.com/agent", + declaration([]string{"read:*"}, nil), + kp.PrivateKeyPEM, "kid-1", + "https://example.com/.well-known/agent-identity.json", + BuildOptions{}, + ) + if err != nil { + t.Fatal(err) + } + got := ExtensionKeyThumbprint(card.Agentpin) + want := jwk.JWKThumbprint(&card.Agentpin.PublicKeyJWK) + if got != want { + t.Fatalf("thumbprint mismatch: got=%s want=%s", got, want) + } +} + +func TestCanonicalizeSortsKeysAndDropsNullishOmitempty(t *testing.T) { + value := map[string]interface{}{"b": 1.0, "a": map[string]interface{}{"d": 4.0, "c": 3.0}} + out, err := CanonicalizeForSigning(value) + if err != nil { + t.Fatal(err) + } + want := `{"a":{"c":3,"d":4},"b":1}` + if string(out) != want { + t.Fatalf("canonical mismatch: got=%s want=%s", out, want) + } +} + +func equalSlices(a, b []string) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if a[i] != b[i] { + return false + } + } + return true +} diff --git a/go/pkg/bundle/bundle.go b/go/pkg/bundle/bundle.go new file mode 100644 index 0000000..8c4c533 --- /dev/null +++ b/go/pkg/bundle/bundle.go @@ -0,0 +1,29 @@ +// Package bundle provides helpers for constructing and querying AgentPin +// trust bundles for offline / air-gapped verification. +package bundle + +import ( + "github.com/ThirdKeyAi/agentpin/go/internal/version" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// NewTrustBundle creates an empty trust bundle with the given creation +// timestamp. +func NewTrustBundle(createdAt string) types.TrustBundle { + return types.TrustBundle{ + AgentpinBundleVersion: version.BundleVersion, + CreatedAt: createdAt, + Documents: []types.DiscoveryDocument{}, + Revocations: []types.RevocationDocument{}, + } +} + +// FindBundleDiscovery returns the first discovery document in b matching domain. +func FindBundleDiscovery(b *types.TrustBundle, domain string) *types.DiscoveryDocument { + return b.FindDiscovery(domain) +} + +// FindBundleRevocation returns the first revocation document in b matching domain. +func FindBundleRevocation(b *types.TrustBundle, domain string) *types.RevocationDocument { + return b.FindRevocation(domain) +} diff --git a/go/pkg/bundle/bundle_test.go b/go/pkg/bundle/bundle_test.go new file mode 100644 index 0000000..4918c78 --- /dev/null +++ b/go/pkg/bundle/bundle_test.go @@ -0,0 +1,42 @@ +package bundle + +import ( + "encoding/json" + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func TestNewTrustBundle(t *testing.T) { + b := NewTrustBundle("2026-02-10T00:00:00Z") + if b.AgentpinBundleVersion != "0.1" { + t.Fatal("bundle version") + } + if len(b.Documents) != 0 || len(b.Revocations) != 0 { + t.Fatal("should start empty") + } +} + +func TestFindHelpers(t *testing.T) { + b := types.TrustBundle{ + Documents: []types.DiscoveryDocument{{Entity: "example.com"}}, + } + if FindBundleDiscovery(&b, "example.com") == nil { + t.Fatal("hit") + } + if FindBundleDiscovery(&b, "missing") != nil { + t.Fatal("miss") + } +} + +func TestBundleSerializesEmpty(t *testing.T) { + b := NewTrustBundle("2026-02-10T00:00:00Z") + data, err := json.Marshal(b) + if err != nil { + t.Fatal(err) + } + var b2 types.TrustBundle + if err := json.Unmarshal(data, &b2); err != nil { + t.Fatal(err) + } +} diff --git a/go/pkg/credential/credential.go b/go/pkg/credential/credential.go new file mode 100644 index 0000000..ef83b54 --- /dev/null +++ b/go/pkg/credential/credential.go @@ -0,0 +1,64 @@ +// Package credential issues AgentPin credential JWTs and validates credential +// capability declarations against discovery documents. +package credential + +import ( + "crypto/ecdsa" + "time" + + "github.com/google/uuid" + + "github.com/ThirdKeyAi/agentpin/go/internal/version" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwt" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// IssueCredential issues a new agent credential JWT signed by priv. The +// returned compact JWT carries header alg=ES256 / typ=agentpin-credential+jwt +// — anything else would be rejected by the verifier. +// +// audience may be empty (omits the "aud" claim). delegationChain may be nil. +func IssueCredential( + priv *ecdsa.PrivateKey, + kid string, + issuer string, + agentID string, + audience string, + capabilities []types.Capability, + constraints *types.Constraints, + delegationChain []types.DelegationAttestation, + ttlSecs uint64, +) (string, error) { + now := time.Now().Unix() + header := &types.JWTHeader{ + Alg: jwt.RequiredAlg, + Typ: jwt.RequiredTyp, + Kid: kid, + } + payload := &types.JWTPayload{ + Iss: issuer, + Sub: agentID, + Aud: audience, + Iat: now, + Exp: now + int64(ttlSecs), + Jti: uuid.NewString(), + AgentpinVersion: version.ProtocolVersion, + Capabilities: capabilities, + Constraints: constraints, + DelegationChain: delegationChain, + } + return jwt.EncodeJWT(header, payload, priv) +} + +// ValidateCredentialAgainstDiscovery checks that the credential's capability +// declarations are a subset of (covered by) the agent's discovery +// declarations. +func ValidateCredentialAgainstDiscovery(credentialCaps, discoveryCaps []types.Capability) error { + if !types.CapabilitiesSubset(discoveryCaps, credentialCaps) { + return types.NewVerificationError( + types.ErrCapabilityExceeded, + "Credential capabilities exceed discovery document", + ) + } + return nil +} diff --git a/go/pkg/credential/credential_test.go b/go/pkg/credential/credential_test.go new file mode 100644 index 0000000..2bae874 --- /dev/null +++ b/go/pkg/credential/credential_test.go @@ -0,0 +1,51 @@ +package credential + +import ( + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwt" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func TestIssueCredential(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + + cred, err := IssueCredential( + priv, "test-2026-01", "example.com", "urn:agentpin:example.com:agent", + "verifier.com", + []types.Capability{"read:data"}, + nil, nil, 3600, + ) + if err != nil { + t.Fatal(err) + } + header, payload, err := jwt.VerifyJWT(cred, pub) + if err != nil { + t.Fatal(err) + } + if header.Kid != "test-2026-01" { + t.Fatalf("kid: %s", header.Kid) + } + if payload.Iss != "example.com" || payload.Sub != "urn:agentpin:example.com:agent" { + t.Fatal("iss/sub mismatch") + } + if payload.Aud != "verifier.com" || payload.AgentpinVersion != "0.1" { + t.Fatal("aud/version mismatch") + } + if payload.Exp <= payload.Iat { + t.Fatal("exp must be > iat") + } +} + +func TestValidateCredentialAgainstDiscovery(t *testing.T) { + disc := []types.Capability{"read:*", "write:report"} + if err := ValidateCredentialAgainstDiscovery([]types.Capability{"read:data"}, disc); err != nil { + t.Fatalf("subset should pass: %v", err) + } + if err := ValidateCredentialAgainstDiscovery([]types.Capability{"delete:data"}, []types.Capability{"read:data"}); err == nil { + t.Fatal("delete > read should fail") + } +} diff --git a/go/pkg/crypto/crypto.go b/go/pkg/crypto/crypto.go new file mode 100644 index 0000000..fc8b5ac --- /dev/null +++ b/go/pkg/crypto/crypto.go @@ -0,0 +1,187 @@ +// Package crypto provides ECDSA P-256 key generation and DER-signature +// helpers used by the AgentPin Go SDK. Wire format and key encoding match +// the Rust, JavaScript, and Python ports. +package crypto + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/sha256" + "crypto/x509" + "encoding/base64" + "encoding/hex" + "encoding/pem" + "errors" + "fmt" +) + +// KeyPair holds a generated ECDSA P-256 key pair in PEM (PKCS#8 / SPKI) form. +type KeyPair struct { + PrivateKeyPEM string + PublicKeyPEM string +} + +// GenerateKeyPair generates a new ECDSA P-256 key pair and returns it as PEM +// (PKCS#8 private + SPKI public). +func GenerateKeyPair() (*KeyPair, error) { + priv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) + if err != nil { + return nil, fmt.Errorf("ecdsa generate: %w", err) + } + privPEM, err := MarshalPrivateKeyPEM(priv) + if err != nil { + return nil, err + } + pubPEM, err := MarshalPublicKeyPEM(&priv.PublicKey) + if err != nil { + return nil, err + } + return &KeyPair{PrivateKeyPEM: privPEM, PublicKeyPEM: pubPEM}, nil +} + +// MarshalPrivateKeyPEM serializes an ECDSA P-256 private key as PKCS#8 PEM +// using LF line endings to match the Rust SDK. +func MarshalPrivateKeyPEM(priv *ecdsa.PrivateKey) (string, error) { + der, err := x509.MarshalPKCS8PrivateKey(priv) + if err != nil { + return "", fmt.Errorf("marshal pkcs8: %w", err) + } + return encodePEM("PRIVATE KEY", der), nil +} + +// MarshalPublicKeyPEM serializes an ECDSA P-256 public key as SPKI PEM with +// LF line endings. +func MarshalPublicKeyPEM(pub *ecdsa.PublicKey) (string, error) { + der, err := x509.MarshalPKIXPublicKey(pub) + if err != nil { + return "", fmt.Errorf("marshal spki: %w", err) + } + return encodePEM("PUBLIC KEY", der), nil +} + +// encodePEM writes a PEM block with LF line endings to match the Rust port's +// EncodeMemoryLF behaviour. +func encodePEM(blockType string, der []byte) string { + const lineLen = 64 + b64 := base64.StdEncoding.EncodeToString(der) + out := "-----BEGIN " + blockType + "-----\n" + for i := 0; i < len(b64); i += lineLen { + end := i + lineLen + if end > len(b64) { + end = len(b64) + } + out += b64[i:end] + "\n" + } + out += "-----END " + blockType + "-----\n" + return out +} + +// LoadPrivateKey parses a PEM-encoded ECDSA P-256 private key (PKCS#8 or +// SEC1 EC). +func LoadPrivateKey(pemStr string) (*ecdsa.PrivateKey, error) { + block, _ := pem.Decode([]byte(pemStr)) + if block == nil { + return nil, errors.New("invalid PEM block") + } + if key, err := x509.ParsePKCS8PrivateKey(block.Bytes); err == nil { + ec, ok := key.(*ecdsa.PrivateKey) + if !ok { + return nil, errors.New("not an ECDSA private key") + } + return ec, nil + } + ec, err := x509.ParseECPrivateKey(block.Bytes) + if err != nil { + return nil, fmt.Errorf("parse private key: %w", err) + } + return ec, nil +} + +// LoadPublicKey parses a PEM-encoded ECDSA P-256 public key (SPKI). +func LoadPublicKey(pemStr string) (*ecdsa.PublicKey, error) { + block, _ := pem.Decode([]byte(pemStr)) + if block == nil { + return nil, errors.New("invalid PEM block") + } + pub, err := x509.ParsePKIXPublicKey(block.Bytes) + if err != nil { + return nil, fmt.Errorf("parse public key: %w", err) + } + ec, ok := pub.(*ecdsa.PublicKey) + if !ok { + return nil, errors.New("not an ECDSA public key") + } + if ec.Curve != elliptic.P256() { + return nil, errors.New("not a P-256 key") + } + return ec, nil +} + +// SignBytes signs raw bytes with priv and returns the DER-encoded signature +// base64-encoded (standard, padded — matches the Rust `sign_data`/`sign_bytes` +// helpers). +func SignBytes(priv *ecdsa.PrivateKey, data []byte) (string, error) { + der, err := ecdsa.SignASN1(rand.Reader, priv, hash(data)) + if err != nil { + return "", fmt.Errorf("sign: %w", err) + } + return base64.StdEncoding.EncodeToString(der), nil +} + +// VerifyBytes verifies a base64-standard-encoded DER signature against data. +func VerifyBytes(pub *ecdsa.PublicKey, data []byte, sigB64 string) (bool, error) { + sig, err := base64.StdEncoding.DecodeString(sigB64) + if err != nil { + return false, fmt.Errorf("decode signature: %w", err) + } + return ecdsa.VerifyASN1(pub, hash(data), sig), nil +} + +// SignData signs data with a PEM-encoded private key. Returns base64-encoded +// DER signature. +func SignData(privPEM string, data []byte) (string, error) { + priv, err := LoadPrivateKey(privPEM) + if err != nil { + return "", err + } + return SignBytes(priv, data) +} + +// VerifySignature verifies a base64-standard signature against data using a +// PEM-encoded public key. +func VerifySignature(pubPEM string, data []byte, sigB64 string) (bool, error) { + pub, err := LoadPublicKey(pubPEM) + if err != nil { + return false, err + } + return VerifyBytes(pub, data, sigB64) +} + +// GenerateKeyID returns the SHA-256 hex digest of the SPKI DER bytes of a +// PEM-encoded public key. Matches the Rust `generate_key_id` output. +func GenerateKeyID(pubPEM string) (string, error) { + pub, err := LoadPublicKey(pubPEM) + if err != nil { + return "", err + } + der, err := x509.MarshalPKIXPublicKey(pub) + if err != nil { + return "", err + } + sum := sha256.Sum256(der) + return hex.EncodeToString(sum[:]), nil +} + +// SHA256Hex returns the SHA-256 hex digest of data. +func SHA256Hex(data []byte) string { + sum := sha256.Sum256(data) + return hex.EncodeToString(sum[:]) +} + +// hash returns the SHA-256 digest of data; used as the message hash for +// ECDSA-P256 signing per the AgentPin spec (ES256). +func hash(data []byte) []byte { + sum := sha256.Sum256(data) + return sum[:] +} diff --git a/go/pkg/crypto/crypto_test.go b/go/pkg/crypto/crypto_test.go new file mode 100644 index 0000000..014d860 --- /dev/null +++ b/go/pkg/crypto/crypto_test.go @@ -0,0 +1,75 @@ +package crypto + +import ( + "strings" + "testing" +) + +func TestGenerateKeyPair(t *testing.T) { + kp, err := GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + if !strings.HasPrefix(kp.PrivateKeyPEM, "-----BEGIN PRIVATE KEY-----") { + t.Fatalf("private PEM bad prefix: %q", kp.PrivateKeyPEM[:30]) + } + if !strings.HasPrefix(kp.PublicKeyPEM, "-----BEGIN PUBLIC KEY-----") { + t.Fatalf("public PEM bad prefix: %q", kp.PublicKeyPEM[:30]) + } +} + +func TestSignAndVerify(t *testing.T) { + kp, err := GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + data := []byte("hello agentpin") + sig, err := SignData(kp.PrivateKeyPEM, data) + if err != nil { + t.Fatal(err) + } + ok, err := VerifySignature(kp.PublicKeyPEM, data, sig) + if err != nil { + t.Fatal(err) + } + if !ok { + t.Fatal("signature should verify") + } + ok, _ = VerifySignature(kp.PublicKeyPEM, []byte("wrong"), sig) + if ok { + t.Fatal("wrong data must not verify") + } +} + +func TestWrongKeyRejection(t *testing.T) { + kp1, _ := GenerateKeyPair() + kp2, _ := GenerateKeyPair() + sig, _ := SignData(kp1.PrivateKeyPEM, []byte("test")) + ok, _ := VerifySignature(kp2.PublicKeyPEM, []byte("test"), sig) + if ok { + t.Fatal("wrong key should not verify") + } +} + +func TestGenerateKeyID(t *testing.T) { + kp, _ := GenerateKeyPair() + kid, err := GenerateKeyID(kp.PublicKeyPEM) + if err != nil { + t.Fatal(err) + } + if len(kid) != 64 { + t.Fatalf("kid length %d, want 64", len(kid)) + } + kid2, _ := GenerateKeyID(kp.PublicKeyPEM) + if kid != kid2 { + t.Fatal("GenerateKeyID must be deterministic") + } +} + +func TestSHA256Hex(t *testing.T) { + got := SHA256Hex([]byte("test")) + want := "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08" + if got != want { + t.Fatalf("SHA256Hex(test) = %q, want %q", got, want) + } +} diff --git a/go/pkg/delegation/delegation.go b/go/pkg/delegation/delegation.go new file mode 100644 index 0000000..14fad4b --- /dev/null +++ b/go/pkg/delegation/delegation.go @@ -0,0 +1,100 @@ +// Package delegation creates and verifies AgentPin delegation attestations +// that bind a delegating party (maker or deployer) to a delegatee for a +// fixed capability set. +package delegation + +import ( + "crypto/ecdsa" + "fmt" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// CanonicalAttestationInput returns the canonical signing input for a +// delegation attestation. Format: +// +// {domain}|{role}|{agent_id}|{delegatee_domain}|{delegatee_agent_id}|{capabilities_hash} +// +// The role is serialized as "maker" or "deployer" and the capabilities hash +// is computed via types.CapabilitiesHash. +func CanonicalAttestationInput( + domain string, + role types.DelegationRole, + agentID string, + delegateeDomain string, + delegateeAgentID string, + capabilities []types.Capability, +) string { + return fmt.Sprintf("%s|%s|%s|%s|%s|%s", + domain, role, agentID, delegateeDomain, delegateeAgentID, + types.CapabilitiesHash(capabilities)) +} + +// CreateAttestation builds and signs a delegation attestation. +func CreateAttestation( + priv *ecdsa.PrivateKey, + kid string, + domain string, + role types.DelegationRole, + agentID string, + delegateeDomain string, + delegateeAgentID string, + capabilities []types.Capability, +) (*types.DelegationAttestation, error) { + input := CanonicalAttestationInput(domain, role, agentID, delegateeDomain, delegateeAgentID, capabilities) + sig, err := crypto.SignBytes(priv, []byte(input)) + if err != nil { + return nil, err + } + return &types.DelegationAttestation{ + Domain: domain, + Role: role, + AgentID: agentID, + Kid: kid, + Attestation: sig, + }, nil +} + +// VerifyAttestation verifies a delegation attestation signature. +func VerifyAttestation( + att *types.DelegationAttestation, + pub *ecdsa.PublicKey, + delegateeDomain string, + delegateeAgentID string, + capabilities []types.Capability, +) error { + input := CanonicalAttestationInput(att.Domain, att.Role, att.AgentID, delegateeDomain, delegateeAgentID, capabilities) + ok, err := crypto.VerifyBytes(pub, []byte(input), att.Attestation) + if err != nil { + return err + } + if !ok { + return types.NewVerificationError( + types.ErrDelegationInvalid, + fmt.Sprintf("Delegation attestation from %s failed signature verification", att.Domain), + ) + } + return nil +} + +// VerifyChainDepth verifies a delegation chain length does not exceed the +// minimum max_delegation_depth across the participating discovery documents. +func VerifyChainDepth(chainLen int, maxDepths []uint8) error { + min := uint8(0) + if len(maxDepths) > 0 { + min = maxDepths[0] + for _, d := range maxDepths[1:] { + if d < min { + min = d + } + } + } + if chainLen > int(min) { + return types.NewVerificationError( + types.ErrDelegationDepthExceeded, + fmt.Sprintf("Delegation chain depth %d exceeds minimum max_delegation_depth %d", chainLen, min), + ) + } + return nil +} diff --git a/go/pkg/delegation/delegation_test.go b/go/pkg/delegation/delegation_test.go new file mode 100644 index 0000000..1300b03 --- /dev/null +++ b/go/pkg/delegation/delegation_test.go @@ -0,0 +1,63 @@ +package delegation + +import ( + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func TestCreateAndVerifyAttestation(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + caps := []types.Capability{"read:data", "write:report"} + + att, err := CreateAttestation(priv, "k", "maker.com", types.RoleMaker, + "urn:maker:type", "deployer.com", "urn:deployer:inst", caps) + if err != nil { + t.Fatal(err) + } + if att.Domain != "maker.com" || att.Role != types.RoleMaker { + t.Fatal("att fields wrong") + } + if err := VerifyAttestation(att, pub, "deployer.com", "urn:deployer:inst", caps); err != nil { + t.Fatalf("verify failed: %v", err) + } +} + +func TestVerifyAttestationWrongKey(t *testing.T) { + k1, _ := crypto.GenerateKeyPair() + k2, _ := crypto.GenerateKeyPair() + p1, _ := crypto.LoadPrivateKey(k1.PrivateKeyPEM) + pub2, _ := crypto.LoadPublicKey(k2.PublicKeyPEM) + caps := []types.Capability{"read:data"} + att, _ := CreateAttestation(p1, "k", "d", types.RoleMaker, "a", "dd", "da", caps) + if err := VerifyAttestation(att, pub2, "dd", "da", caps); err == nil { + t.Fatal("wrong key should fail") + } +} + +func TestVerifyAttestationWrongCaps(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + caps := []types.Capability{"read:data"} + bad := []types.Capability{"write:data"} + att, _ := CreateAttestation(priv, "k", "d", types.RoleMaker, "a", "dd", "da", caps) + if err := VerifyAttestation(att, pub, "dd", "da", bad); err == nil { + t.Fatal("wrong caps should fail") + } +} + +func TestVerifyChainDepth(t *testing.T) { + if err := VerifyChainDepth(1, []uint8{2, 3}); err != nil { + t.Fatal(err) + } + if err := VerifyChainDepth(2, []uint8{2, 3}); err != nil { + t.Fatal(err) + } + if err := VerifyChainDepth(3, []uint8{2, 3}); err == nil { + t.Fatal("3 > min(2,3) should fail") + } +} diff --git a/go/pkg/discovery/discovery.go b/go/pkg/discovery/discovery.go new file mode 100644 index 0000000..dd95b60 --- /dev/null +++ b/go/pkg/discovery/discovery.go @@ -0,0 +1,119 @@ +// Package discovery builds and validates AgentPin discovery documents and +// (optionally) fetches them over HTTPS from the standard `.well-known` +// endpoint. +package discovery + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "time" + + "github.com/ThirdKeyAi/agentpin/go/internal/version" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// BuildDiscoveryDocument creates a new discovery document populated with the +// given keys/agents and the standard revocation endpoint URL. +func BuildDiscoveryDocument( + entity string, + entityType types.EntityType, + publicKeys []types.JWK, + agents []types.AgentDeclaration, + maxDelegationDepth uint8, + updatedAt string, +) types.DiscoveryDocument { + return types.DiscoveryDocument{ + AgentpinVersion: version.ProtocolVersion, + Entity: entity, + EntityType: entityType, + PublicKeys: publicKeys, + Agents: agents, + RevocationEndpoint: fmt.Sprintf("https://%s/.well-known/agent-identity-revocations.json", entity), + MaxDelegationDepth: maxDelegationDepth, + UpdatedAt: updatedAt, + } +} + +// ValidateDiscoveryDocument checks the basic structural requirements of a +// discovery document and that its entity matches the expected domain. +func ValidateDiscoveryDocument(doc *types.DiscoveryDocument, expectedEntity string) error { + if doc.AgentpinVersion != version.ProtocolVersion { + return fmt.Errorf("unsupported version: %s", doc.AgentpinVersion) + } + if doc.Entity != expectedEntity { + return types.NewVerificationError( + types.ErrDomainMismatch, + fmt.Sprintf("Discovery entity '%s' does not match expected '%s'", doc.Entity, expectedEntity), + ) + } + if len(doc.PublicKeys) == 0 { + return errors.New("discovery document must have at least one public key") + } + if doc.MaxDelegationDepth > 3 { + return errors.New("max_delegation_depth must be 0-3") + } + return nil +} + +// FindKeyByKid returns a pointer to the JWK in doc whose kid matches, or nil. +func FindKeyByKid(doc *types.DiscoveryDocument, kid string) *types.JWK { + for i := range doc.PublicKeys { + if doc.PublicKeys[i].Kid == kid { + return &doc.PublicKeys[i] + } + } + return nil +} + +// FindAgentByID returns a pointer to the agent in doc whose AgentID matches, +// or nil. +func FindAgentByID(doc *types.DiscoveryDocument, agentID string) *types.AgentDeclaration { + for i := range doc.Agents { + if doc.Agents[i].AgentID == agentID { + return &doc.Agents[i] + } + } + return nil +} + +// FetchDiscoveryDocument fetches `https://{domain}/.well-known/agent-identity.json` +// and validates it. Redirects are rejected: the discovery endpoint MUST be +// served directly from the canonical hostname. +func FetchDiscoveryDocument(client *http.Client, domain string) (*types.DiscoveryDocument, error) { + if client == nil { + client = &http.Client{ + Timeout: 10 * time.Second, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + }, + } + } + url := fmt.Sprintf("https://%s/.well-known/agent-identity.json", domain) + resp, err := client.Get(url) + if err != nil { + return nil, fmt.Errorf("failed to fetch %s: %w", url, err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode >= 300 && resp.StatusCode < 400 { + return nil, fmt.Errorf("redirect detected fetching %s (status %d). Redirects are not allowed", url, resp.StatusCode) + } + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return nil, fmt.Errorf("HTTP %d fetching %s", resp.StatusCode, url) + } + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("read body: %w", err) + } + var doc types.DiscoveryDocument + if err := json.Unmarshal(body, &doc); err != nil { + return nil, fmt.Errorf("invalid JSON from %s: %w", url, err) + } + if err := ValidateDiscoveryDocument(&doc, domain); err != nil { + return nil, err + } + return &doc, nil +} diff --git a/go/pkg/discovery/discovery_test.go b/go/pkg/discovery/discovery_test.go new file mode 100644 index 0000000..f7f967d --- /dev/null +++ b/go/pkg/discovery/discovery_test.go @@ -0,0 +1,88 @@ +package discovery + +import ( + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func makeDoc() types.DiscoveryDocument { + return BuildDiscoveryDocument( + "example.com", + types.EntityMaker, + []types.JWK{{Kid: "example-2026-01", Kty: "EC", Crv: "P-256", X: "x", Y: "y", Use: "sig"}}, + []types.AgentDeclaration{ + { + AgentID: "urn:agentpin:example.com:agent", + Name: "Test Agent", + Capabilities: []types.Capability{"read:*"}, + Status: types.AgentActive, + }, + }, + 2, + "2026-01-15T00:00:00Z", + ) +} + +func TestValidateDiscoveryDocument(t *testing.T) { + d := makeDoc() + if err := ValidateDiscoveryDocument(&d, "example.com"); err != nil { + t.Fatal(err) + } +} + +func TestValidateDomainMismatch(t *testing.T) { + d := makeDoc() + err := ValidateDiscoveryDocument(&d, "other.com") + if err == nil { + t.Fatal("expected mismatch error") + } + ve, ok := types.AsVerificationError(err) + if !ok || ve.Code != types.ErrDomainMismatch { + t.Fatalf("expected DomainMismatch, got %v", err) + } +} + +func TestFindKeyByKid(t *testing.T) { + d := makeDoc() + if FindKeyByKid(&d, "example-2026-01") == nil { + t.Fatal("hit expected") + } + if FindKeyByKid(&d, "missing") != nil { + t.Fatal("miss expected") + } +} + +func TestFindAgentByID(t *testing.T) { + d := makeDoc() + if FindAgentByID(&d, "urn:agentpin:example.com:agent") == nil { + t.Fatal("hit expected") + } + if FindAgentByID(&d, "urn:agentpin:example.com:other") != nil { + t.Fatal("miss expected") + } +} + +func TestBuildDiscoveryDocumentRevocationEndpoint(t *testing.T) { + d := makeDoc() + want := "https://example.com/.well-known/agent-identity-revocations.json" + if d.RevocationEndpoint != want { + t.Fatalf("RevocationEndpoint = %s, want %s", d.RevocationEndpoint, want) + } +} + +func TestRejectMaxDelegationDepthOver3(t *testing.T) { + d := makeDoc() + d.MaxDelegationDepth = 4 + if err := ValidateDiscoveryDocument(&d, "example.com"); err == nil { + t.Fatal("max_delegation_depth > 3 should be rejected") + } +} + +func TestRejectEmptyKeys(t *testing.T) { + d := makeDoc() + d.PublicKeys = nil + if err := ValidateDiscoveryDocument(&d, "example.com"); err == nil { + t.Fatal("empty keys should be rejected") + } +} diff --git a/go/pkg/dns/dns.go b/go/pkg/dns/dns.go new file mode 100644 index 0000000..dbe561d --- /dev/null +++ b/go/pkg/dns/dns.go @@ -0,0 +1,174 @@ +// Package dns implements DNS TXT cross-verification at _agentpin.{domain} +// (v0.3.0). +// +// Wire format mirrors SchemaPin's _schemapin record exactly with the version +// tag changed: +// +// _agentpin.example.com. 3600 IN TXT "v=agentpin1; kid=acme-2026-04; fp=sha256:a1b2c3..." +// +// Semantics: +// - Absent record -> no effect (DNS TXT is purely additive) +// - Present matching -> verification succeeds +// - Present mismatching / malformed -> hard failure +// +// Mismatch is fail-closed because a publisher who *intentionally* published +// a TXT record has signaled DNS is part of their trust chain. +package dns + +import ( + "context" + "errors" + "fmt" + "net" + "strings" + + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +const ( + version = "agentpin1" + fpPrefix = "sha256:" +) + +// TxtRecord is a parsed `_agentpin.{domain}` TXT record. +type TxtRecord struct { + Version string + Kid string // empty when unspecified + Fingerprint string // lower-case, "sha256:" +} + +// ParseTxtRecord parses a raw TXT record value such as +// "v=agentpin1; kid=acme-2026-04; fp=sha256:abcd1234". +// +// Whitespace around ';' and '=' is tolerated. Field order is not significant. +// Unknown fields are ignored for forward compatibility. Returns an error if +// the record is missing the required v or fp fields, the version is not +// agentpin1, or the fingerprint is malformed. +func ParseTxtRecord(value string) (*TxtRecord, error) { + var ver, kid, fp string + + for _, raw := range strings.Split(value, ";") { + part := strings.TrimSpace(raw) + if part == "" { + continue + } + eq := strings.IndexByte(part, '=') + if eq < 0 { + return nil, types.NewVerificationError( + types.ErrDiscoveryInvalid, + fmt.Sprintf("DNS TXT field missing '=': %s", part), + ) + } + k := strings.ToLower(strings.TrimSpace(part[:eq])) + v := strings.TrimSpace(part[eq+1:]) + switch k { + case "v": + ver = v + case "kid": + kid = v + case "fp": + fp = strings.ToLower(v) + default: + // Forward-compat: ignore unknown fields. + } + } + + if ver == "" { + return nil, types.NewVerificationError( + types.ErrDiscoveryInvalid, + "DNS TXT record missing required 'v' field", + ) + } + if ver != version { + return nil, types.NewVerificationError( + types.ErrDiscoveryInvalid, + fmt.Sprintf("DNS TXT unsupported version: %s", ver), + ) + } + if fp == "" { + return nil, types.NewVerificationError( + types.ErrDiscoveryInvalid, + "DNS TXT record missing required 'fp' field", + ) + } + if !strings.HasPrefix(fp, fpPrefix) { + return nil, types.NewVerificationError( + types.ErrDiscoveryInvalid, + fmt.Sprintf("DNS TXT 'fp' must be sha256:: %s", fp), + ) + } + return &TxtRecord{Version: ver, Kid: kid, Fingerprint: fp}, nil +} + +// VerifyDnsMatch cross-checks a parsed TXT record against a discovery +// document. Returns nil on success or a typed VerificationError on mismatch. +// +// When the TXT specifies a kid, the matching key MUST also carry the same +// kid. Multi-key discovery documents only need one key to match. +func VerifyDnsMatch(discovery *types.DiscoveryDocument, txt *TxtRecord) error { + if discovery == nil || txt == nil { + return types.NewVerificationError( + types.ErrDiscoveryInvalid, + "verify_dns_match: nil discovery or TXT", + ) + } + target := strings.ToLower(txt.Fingerprint) + for i := range discovery.PublicKeys { + k := &discovery.PublicKeys[i] + computed := strings.ToLower(jwk.JWKThumbprint(k)) + if !strings.HasPrefix(computed, fpPrefix) { + computed = fpPrefix + computed + } + if computed != target { + continue + } + if txt.Kid != "" && k.Kid != txt.Kid { + continue + } + return nil + } + return types.NewVerificationError( + types.ErrDiscoveryInvalid, + fmt.Sprintf("DNS TXT fingerprint %s does not match any key in the discovery document", target), + ) +} + +// TxtRecordName returns the DNS lookup name for an AgentPin domain +// ("_agentpin.{domain}", trailing dot stripped). +func TxtRecordName(domain string) string { + return "_agentpin." + strings.TrimRight(domain, ".") +} + +// LookupTxt performs the DNS TXT lookup at _agentpin.{domain} via the given +// resolver (use net.DefaultResolver for the system resolver). +// +// Returns (nil, nil) when no _agentpin record exists for the domain (or the +// resolver reports NXDOMAIN / no answer). Returns a parse error when the +// record exists but is malformed. +// +// When the resolver returns multiple TXT records at the same name, the first +// whose value contains "v=agentpin1" is used. +func LookupTxt(ctx context.Context, resolver *net.Resolver, domain string) (*TxtRecord, error) { + if resolver == nil { + resolver = net.DefaultResolver + } + name := TxtRecordName(domain) + records, err := resolver.LookupTXT(ctx, name) + if err != nil { + var dnsErr *net.DNSError + if errors.As(err, &dnsErr) && (dnsErr.IsNotFound || dnsErr.Err == "no such host") { + return nil, nil + } + return nil, types.NewVerificationError( + types.ErrDiscoveryFetchFailed, + fmt.Sprintf("DNS TXT lookup failed for %s: %s", name, err), + ) + } + for _, value := range records { + if strings.Contains(value, "v=agentpin1") { + return ParseTxtRecord(value) + } + } + return nil, nil +} diff --git a/go/pkg/dns/dns_test.go b/go/pkg/dns/dns_test.go new file mode 100644 index 0000000..3f9208f --- /dev/null +++ b/go/pkg/dns/dns_test.go @@ -0,0 +1,150 @@ +package dns + +import ( + "strings" + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func discoveryDoc(jwks []types.JWK) *types.DiscoveryDocument { + return &types.DiscoveryDocument{ + AgentpinVersion: "0.3", + Entity: "example.com", + EntityType: types.EntityMaker, + PublicKeys: jwks, + MaxDelegationDepth: 0, + UpdatedAt: "2026-05-01T00:00:00Z", + } +} + +func TestParseFullRecord(t *testing.T) { + r, err := ParseTxtRecord("v=agentpin1; kid=acme-2026-04; fp=sha256:abcd1234") + if err != nil { + t.Fatal(err) + } + if r.Version != "agentpin1" || r.Kid != "acme-2026-04" || r.Fingerprint != "sha256:abcd1234" { + t.Fatalf("unexpected: %+v", r) + } +} + +func TestParseMinimalRecord(t *testing.T) { + r, err := ParseTxtRecord("v=agentpin1;fp=sha256:abc") + if err != nil { + t.Fatal(err) + } + if r.Kid != "" || r.Fingerprint != "sha256:abc" { + t.Fatalf("unexpected: %+v", r) + } +} + +func TestParseLowercasesFingerprint(t *testing.T) { + r, err := ParseTxtRecord("v=agentpin1; fp=SHA256:ABCDEF") + if err != nil { + t.Fatal(err) + } + if r.Fingerprint != "sha256:abcdef" { + t.Fatalf("expected lowercased fp, got %q", r.Fingerprint) + } +} + +func TestParseToleratesWhitespaceAndOrder(t *testing.T) { + r, err := ParseTxtRecord(" fp = sha256:beef ; v = agentpin1 ") + if err != nil { + t.Fatal(err) + } + if r.Fingerprint != "sha256:beef" || r.Version != "agentpin1" { + t.Fatalf("unexpected: %+v", r) + } +} + +func TestParseIgnoresUnknownFields(t *testing.T) { + r, err := ParseTxtRecord("v=agentpin1; fp=sha256:abc; future=ignoreme") + if err != nil { + t.Fatal(err) + } + if r.Fingerprint != "sha256:abc" { + t.Fatalf("unexpected: %+v", r) + } +} + +func TestParseFailures(t *testing.T) { + cases := []string{ + "fp=sha256:abc", // missing v + "v=agentpin1", // missing fp + "v=agentpin99; fp=sha256:abc", // bad version + "v=agentpin1; fp=abc", // bad fp prefix + "v=agentpin1; broken", // missing '=' + "v=schemapin1; fp=sha256:abc", // SchemaPin format + } + for _, c := range cases { + if _, err := ParseTxtRecord(c); err == nil { + t.Fatalf("expected error for %q", c) + } + } +} + +func fpFor(j types.JWK) string { + t := strings.ToLower(jwk.JWKThumbprint(&j)) + if !strings.HasPrefix(t, "sha256:") { + t = "sha256:" + t + } + return t +} + +func TestVerifyMatchAgainstSingleKey(t *testing.T) { + kp, err := crypto.GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + j, err := jwk.PEMToJWK(kp.PublicKeyPEM, "kid-1") + if err != nil { + t.Fatal(err) + } + if err := VerifyDnsMatch(discoveryDoc([]types.JWK{j}), + &TxtRecord{Fingerprint: fpFor(j)}); err != nil { + t.Fatal(err) + } +} + +func TestVerifyMatchAgainstOneOfMultipleKeys(t *testing.T) { + kp1, _ := crypto.GenerateKeyPair() + kp2, _ := crypto.GenerateKeyPair() + j1, _ := jwk.PEMToJWK(kp1.PublicKeyPEM, "kid-a") + j2, _ := jwk.PEMToJWK(kp2.PublicKeyPEM, "kid-b") + doc := discoveryDoc([]types.JWK{j1, j2}) + if err := VerifyDnsMatch(doc, &TxtRecord{Kid: "kid-b", Fingerprint: fpFor(j2)}); err != nil { + t.Fatal(err) + } +} + +func TestVerifyKidMismatchFailsEvenWhenFpMatches(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + j, _ := jwk.PEMToJWK(kp.PublicKeyPEM, "kid-real") + doc := discoveryDoc([]types.JWK{j}) + if err := VerifyDnsMatch(doc, &TxtRecord{Kid: "kid-different", Fingerprint: fpFor(j)}); err == nil { + t.Fatal("expected mismatch error") + } +} + +func TestVerifyFingerprintMismatch(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + j, _ := jwk.PEMToJWK(kp.PublicKeyPEM, "kid-1") + doc := discoveryDoc([]types.JWK{j}) + if err := VerifyDnsMatch(doc, &TxtRecord{ + Fingerprint: "sha256:0000000000000000000000000000000000000000000000000000000000000000", + }); err == nil { + t.Fatal("expected mismatch error") + } +} + +func TestTxtRecordNameStripsTrailingDot(t *testing.T) { + if got := TxtRecordName("example.com"); got != "_agentpin.example.com" { + t.Fatalf("got %q", got) + } + if got := TxtRecordName("example.com."); got != "_agentpin.example.com" { + t.Fatalf("got %q", got) + } +} diff --git a/go/pkg/jwk/jwk.go b/go/pkg/jwk/jwk.go new file mode 100644 index 0000000..ad33c89 --- /dev/null +++ b/go/pkg/jwk/jwk.go @@ -0,0 +1,97 @@ +// Package jwk converts ECDSA P-256 keys to/from AgentPin JWK and computes +// RFC 7638 JWK thumbprints byte-identically to the Rust SDK. +package jwk + +import ( + "crypto/ecdsa" + "crypto/elliptic" + "encoding/base64" + "errors" + "fmt" + "math/big" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// VerifyingKeyToJWK converts a P-256 public key into the AgentPin JWK form. +// +// The "use" field is fixed to "sig" and key_ops to ["verify"], matching the +// Rust port verbatim so wire format stays compatible. +func VerifyingKeyToJWK(pub *ecdsa.PublicKey, kid string) types.JWK { + x := padTo32(pub.X.Bytes()) + y := padTo32(pub.Y.Bytes()) + return types.JWK{ + Kid: kid, + Kty: "EC", + Crv: "P-256", + X: base64.RawURLEncoding.EncodeToString(x), + Y: base64.RawURLEncoding.EncodeToString(y), + Use: "sig", + KeyOps: []string{"verify"}, + } +} + +// JWKToVerifyingKey converts an AgentPin JWK back into a P-256 public key. +func JWKToVerifyingKey(j *types.JWK) (*ecdsa.PublicKey, error) { + if j.Kty != "EC" || j.Crv != "P-256" { + return nil, errors.New("invalid key format: kty/crv not EC/P-256") + } + xBytes, err := base64.RawURLEncoding.DecodeString(j.X) + if err != nil { + return nil, fmt.Errorf("invalid key format: %w", err) + } + yBytes, err := base64.RawURLEncoding.DecodeString(j.Y) + if err != nil { + return nil, fmt.Errorf("invalid key format: %w", err) + } + if len(xBytes) != 32 || len(yBytes) != 32 { + return nil, errors.New("invalid key format: x/y must be 32 bytes") + } + pub := &ecdsa.PublicKey{ + Curve: elliptic.P256(), + X: new(big.Int).SetBytes(xBytes), + Y: new(big.Int).SetBytes(yBytes), + } + if !pub.Curve.IsOnCurve(pub.X, pub.Y) { + return nil, errors.New("invalid key format: point not on P-256") + } + return pub, nil +} + +// PEMToJWK converts a PEM SPKI public key to a JWK with the given kid. +func PEMToJWK(pubPEM, kid string) (types.JWK, error) { + pub, err := crypto.LoadPublicKey(pubPEM) + if err != nil { + return types.JWK{}, err + } + return VerifyingKeyToJWK(pub, kid), nil +} + +// JWKToPEM converts a JWK to PEM SPKI form. +func JWKToPEM(j *types.JWK) (string, error) { + pub, err := JWKToVerifyingKey(j) + if err != nil { + return "", err + } + return crypto.MarshalPublicKeyPEM(pub) +} + +// JWKThumbprint computes the RFC 7638 JWK thumbprint: +// SHA-256 of {"crv":"P-256","kty":"EC","x":"","y":""} (sorted keys, no +// whitespace), hex-encoded. Wire format must match the Rust port byte-for-byte. +func JWKThumbprint(j *types.JWK) string { + canonical := fmt.Sprintf(`{"crv":"%s","kty":"%s","x":"%s","y":"%s"}`, j.Crv, j.Kty, j.X, j.Y) + return crypto.SHA256Hex([]byte(canonical)) +} + +// padTo32 left-pads a big-endian byte slice to length 32 with zeros, as +// required by JWK encoding for P-256 coordinates. +func padTo32(b []byte) []byte { + if len(b) >= 32 { + return b + } + out := make([]byte, 32) + copy(out[32-len(b):], b) + return out +} diff --git a/go/pkg/jwk/jwk_test.go b/go/pkg/jwk/jwk_test.go new file mode 100644 index 0000000..cf226dd --- /dev/null +++ b/go/pkg/jwk/jwk_test.go @@ -0,0 +1,80 @@ +package jwk + +import ( + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func TestJWKRoundTrip(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + j, err := PEMToJWK(kp.PublicKeyPEM, "test-key-01") + if err != nil { + t.Fatal(err) + } + if j.Kty != "EC" || j.Crv != "P-256" || j.Kid != "test-key-01" || j.Use != "sig" { + t.Fatalf("JWK fields wrong: %+v", j) + } + pem, err := JWKToPEM(&j) + if err != nil { + t.Fatal(err) + } + if pem != kp.PublicKeyPEM { + t.Fatalf("PEM roundtrip mismatch:\n%s\nvs\n%s", pem, kp.PublicKeyPEM) + } +} + +func TestJWKThumbprintDeterministic(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + j, _ := PEMToJWK(kp.PublicKeyPEM, "kid-1") + t1 := JWKThumbprint(&j) + t2 := JWKThumbprint(&j) + if t1 != t2 { + t.Fatal("thumbprint not deterministic") + } + if len(t1) != 64 { + t.Fatalf("thumbprint length %d, want 64", len(t1)) + } +} + +func TestJWKThumbprintKnownVector(t *testing.T) { + // RFC 7638 Section 3.1 example, adapted for our hex output. + // {"crv":"P-256","kty":"EC","x":"...","y":"..."} → SHA-256 hex. + // Use a known fixed JWK to lock down byte-for-byte parity with Rust. + j := types.JWK{ + Kty: "EC", + Crv: "P-256", + X: "MKBCTNIcKUSDii11ySs3526iDZ8AiTo7Tu6KPAqv7D4", + Y: "4Etl6SRW2YiLUrN5vfvVHuhp7x8PxltmWWlbbM4IFyM", + } + got := JWKThumbprint(&j) + // Computed canonical: {"crv":"P-256","kty":"EC","x":"MKBCTNIcKUSDii11ySs3526iDZ8AiTo7Tu6KPAqv7D4","y":"4Etl6SRW2YiLUrN5vfvVHuhp7x8PxltmWWlbbM4IFyM"} + // SHA-256 hex (matches the AgentPin Rust SDK on the same input). + want := "727f88fd634c0a57a1895a79d62ff4569384356d6ea447ab03cb046a6e619feb" + if got != want { + t.Fatalf("JWKThumbprint mismatch:\ngot %s\nwant %s", got, want) + } +} + +func TestInvalidJWKRejected(t *testing.T) { + j := types.JWK{Kty: "RSA", Crv: "P-256", X: "AAAA", Y: "BBBB"} + if _, err := JWKToVerifyingKey(&j); err == nil { + t.Fatal("invalid JWK should be rejected") + } +} + +func TestVerifyingKeyToJWKShape(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + j := VerifyingKeyToJWK(pub, "kid-1") + if j.Kty != "EC" || j.Crv != "P-256" { + t.Fatal("kty/crv wrong") + } + if len(j.KeyOps) != 1 || j.KeyOps[0] != "verify" { + t.Fatalf("key_ops = %v", j.KeyOps) + } + if j.Use != "sig" { + t.Fatal("use should be sig") + } +} diff --git a/go/pkg/jwt/jwt.go b/go/pkg/jwt/jwt.go new file mode 100644 index 0000000..841c884 --- /dev/null +++ b/go/pkg/jwt/jwt.go @@ -0,0 +1,140 @@ +// Package jwt implements ES256-only JWT encoding, decoding, and verification +// for AgentPin credentials. +// +// SECURITY: This package is intentionally NOT a general-purpose JWT library. +// It rejects every algorithm except ES256 and every type except +// "agentpin-credential+jwt" up front, before any signature work happens. It +// uses Go stdlib crypto/ecdsa directly so there is no third-party JWT +// dependency with permissive `alg` defaults. +// +// Wire compatibility: signatures are DER-encoded ECDSA bytes, base64url-no-pad +// encoded. This matches the Rust SDK (signature.to_der().as_bytes()), the +// JavaScript SDK, and the Python SDK — it differs from RFC 7515 ES256, which +// would use raw r||s. AgentPin spec uses DER for cross-language uniformity +// across native ECDSA libraries. Do not change this without coordinating +// every SDK simultaneously. +package jwt + +import ( + "crypto/ecdsa" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "strings" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +const ( + // RequiredAlg is the only signature algorithm AgentPin accepts. + RequiredAlg = "ES256" + // RequiredTyp is the only token type AgentPin credentials may carry. + RequiredTyp = "agentpin-credential+jwt" +) + +// Base64URLEncode encodes data with the unpadded base64url alphabet. +func Base64URLEncode(data []byte) string { + return base64.RawURLEncoding.EncodeToString(data) +} + +// Base64URLDecode decodes an unpadded base64url string. +func Base64URLDecode(s string) ([]byte, error) { + return base64.RawURLEncoding.DecodeString(s) +} + +// EncodeJWT serializes header + payload, signs the "
." +// signing input with priv, and returns the compact JWT string. The header's +// alg/typ fields are NOT overridden — the caller is responsible for setting +// them to ES256 / agentpin-credential+jwt; the verifier will reject anything +// else. +func EncodeJWT(header *types.JWTHeader, payload *types.JWTPayload, priv *ecdsa.PrivateKey) (string, error) { + hb, err := json.Marshal(header) + if err != nil { + return "", fmt.Errorf("marshal header: %w", err) + } + pb, err := json.Marshal(payload) + if err != nil { + return "", fmt.Errorf("marshal payload: %w", err) + } + signingInput := Base64URLEncode(hb) + "." + Base64URLEncode(pb) + + sigB64, err := crypto.SignBytes(priv, []byte(signingInput)) + if err != nil { + return "", err + } + // crypto.SignBytes uses standard base64; convert raw DER to base64url. + derSig, err := base64.StdEncoding.DecodeString(sigB64) + if err != nil { + return "", fmt.Errorf("internal: re-decode signature: %w", err) + } + return signingInput + "." + Base64URLEncode(derSig), nil +} + +// DecodeJWTUnverified parses and validates only the JWT shape, alg, and typ. +// It does NOT verify the signature — callers must call VerifyJWT for that. +// +// Returns the parsed header, payload, and the raw base64url signature +// segment. Rejects anything other than ES256 / agentpin-credential+jwt with +// a descriptive error. +func DecodeJWTUnverified(jwt string) (*types.JWTHeader, *types.JWTPayload, string, error) { + parts := strings.Split(jwt, ".") + if len(parts) != 3 { + return nil, nil, "", errors.New("JWT must have 3 parts") + } + + hb, err := Base64URLDecode(parts[0]) + if err != nil { + return nil, nil, "", fmt.Errorf("decode header: %w", err) + } + pb, err := Base64URLDecode(parts[1]) + if err != nil { + return nil, nil, "", fmt.Errorf("decode payload: %w", err) + } + + var header types.JWTHeader + if err := json.Unmarshal(hb, &header); err != nil { + return nil, nil, "", fmt.Errorf("invalid JWT header: %w", err) + } + var payload types.JWTPayload + if err := json.Unmarshal(pb, &payload); err != nil { + return nil, nil, "", fmt.Errorf("invalid JWT payload: %w", err) + } + + // SECURITY: reject any algorithm except ES256 BEFORE verifying. + if header.Alg != RequiredAlg { + return nil, nil, "", fmt.Errorf("algorithm '%s' rejected, must be '%s'", header.Alg, RequiredAlg) + } + if header.Typ != RequiredTyp { + return nil, nil, "", fmt.Errorf("token type '%s' rejected, must be '%s'", header.Typ, RequiredTyp) + } + + return &header, &payload, parts[2], nil +} + +// VerifyJWT verifies a JWT signature with pub. Returns the decoded header +// and payload on success. +func VerifyJWT(jwt string, pub *ecdsa.PublicKey) (*types.JWTHeader, *types.JWTPayload, error) { + header, payload, sigB64URL, err := DecodeJWTUnverified(jwt) + if err != nil { + return nil, nil, err + } + parts := strings.Split(jwt, ".") + signingInput := parts[0] + "." + parts[1] + + sig, err := Base64URLDecode(sigB64URL) + if err != nil { + return nil, nil, fmt.Errorf("invalid signature encoding: %w", err) + } + // Convert DER bytes back to standard base64 for crypto.VerifyBytes. + stdSig := base64.StdEncoding.EncodeToString(sig) + ok, err := crypto.VerifyBytes(pub, []byte(signingInput), stdSig) + if err != nil { + return nil, nil, fmt.Errorf("verify signature: %w", err) + } + if !ok { + return nil, nil, errors.New("signature verification failed") + } + return header, payload, nil +} diff --git a/go/pkg/jwt/jwt_test.go b/go/pkg/jwt/jwt_test.go new file mode 100644 index 0000000..0b4bb2d --- /dev/null +++ b/go/pkg/jwt/jwt_test.go @@ -0,0 +1,139 @@ +package jwt + +import ( + "crypto/ecdsa" + "encoding/base64" + "encoding/json" + "strings" + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func makeKey(t *testing.T) *ecdsa.PrivateKey { + t.Helper() + kp, err := crypto.GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + priv, err := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + if err != nil { + t.Fatal(err) + } + return priv +} + +func makeTestJWT(t *testing.T, priv *ecdsa.PrivateKey, kid string) (string, *types.JWTHeader, *types.JWTPayload) { + t.Helper() + header := &types.JWTHeader{Alg: RequiredAlg, Typ: RequiredTyp, Kid: kid} + payload := &types.JWTPayload{ + Iss: "example.com", + Sub: "urn:agentpin:example.com:agent", + Aud: "verifier.com", + Iat: 1738300800, + Exp: 1738304400, + Jti: "test-jti-001", + AgentpinVersion: "0.1", + Capabilities: []types.Capability{"read:data"}, + } + jwt, err := EncodeJWT(header, payload, priv) + if err != nil { + t.Fatal(err) + } + return jwt, header, payload +} + +func TestJWTEncodeDecodeRoundTrip(t *testing.T) { + priv := makeKey(t) + pub := &priv.PublicKey + jwt, _, _ := makeTestJWT(t, priv, "k") + header, payload, err := VerifyJWT(jwt, pub) + if err != nil { + t.Fatal(err) + } + if header.Alg != RequiredAlg { + t.Fatalf("alg: %s", header.Alg) + } + if payload.Iss != "example.com" { + t.Fatalf("iss: %s", payload.Iss) + } +} + +func TestJWTWrongKeyRejected(t *testing.T) { + p1 := makeKey(t) + p2 := makeKey(t) + jwt, _, _ := makeTestJWT(t, p1, "k") + if _, _, err := VerifyJWT(jwt, &p2.PublicKey); err == nil { + t.Fatal("wrong key should fail") + } +} + +// craftBadAlgJWT manually crafts a JWT with the given header alg/typ values +// to test that DecodeJWTUnverified rejects each one before any signature work. +func craftBadAlgJWT(alg, typ string) string { + hdr, _ := json.Marshal(types.JWTHeader{Alg: alg, Typ: typ, Kid: "k"}) + pld, _ := json.Marshal(types.JWTPayload{Iss: "e.com", Sub: "s", Iat: 1, Exp: 2, Jti: "j"}) + hb := base64.RawURLEncoding.EncodeToString(hdr) + pb := base64.RawURLEncoding.EncodeToString(pld) + return hb + "." + pb + ".sig" +} + +func TestJWTAlgorithmRejectionNone(t *testing.T) { + _, _, _, err := DecodeJWTUnverified(craftBadAlgJWT("none", RequiredTyp)) + if err == nil { + t.Fatal("alg=none must be rejected") + } + if !strings.Contains(err.Error(), "rejected") { + t.Fatalf("error should mention rejection, got: %v", err) + } +} + +func TestJWTAlgorithmRejectionHS256(t *testing.T) { + if _, _, _, err := DecodeJWTUnverified(craftBadAlgJWT("HS256", RequiredTyp)); err == nil { + t.Fatal("alg=HS256 must be rejected") + } +} + +func TestJWTAlgorithmRejectionRS256(t *testing.T) { + if _, _, _, err := DecodeJWTUnverified(craftBadAlgJWT("RS256", RequiredTyp)); err == nil { + t.Fatal("alg=RS256 must be rejected") + } +} + +func TestJWTAlgorithmRejectionES384(t *testing.T) { + if _, _, _, err := DecodeJWTUnverified(craftBadAlgJWT("ES384", RequiredTyp)); err == nil { + t.Fatal("alg=ES384 must be rejected") + } +} + +func TestJWTAlgorithmRejectionEmpty(t *testing.T) { + if _, _, _, err := DecodeJWTUnverified(craftBadAlgJWT("", RequiredTyp)); err == nil { + t.Fatal("alg='' must be rejected") + } +} + +func TestJWTWrongTypRejected(t *testing.T) { + if _, _, _, err := DecodeJWTUnverified(craftBadAlgJWT(RequiredAlg, "JWT")); err == nil { + t.Fatal("typ=JWT must be rejected") + } +} + +func TestJWTMalformedRejected(t *testing.T) { + if _, _, _, err := DecodeJWTUnverified("not.a.jwt.token"); err == nil { + t.Fatal("4-part jwt should be rejected") + } + if _, _, _, err := DecodeJWTUnverified("only-one-part"); err == nil { + t.Fatal("1-part jwt should be rejected") + } +} + +func TestBase64URLEncode(t *testing.T) { + if got := Base64URLEncode([]byte("hello")); got != "aGVsbG8" { + t.Fatalf("Base64URLEncode = %q", got) + } + dec, err := Base64URLDecode("aGVsbG8") + if err != nil || string(dec) != "hello" { + t.Fatalf("Base64URLDecode roundtrip failed: %v %s", err, dec) + } +} diff --git a/go/pkg/mutual/mutual.go b/go/pkg/mutual/mutual.go new file mode 100644 index 0000000..8e05e06 --- /dev/null +++ b/go/pkg/mutual/mutual.go @@ -0,0 +1,85 @@ +// Package mutual implements the AgentPin challenge / response mutual-auth +// flow with 128-bit random nonces. +package mutual + +import ( + "crypto/ecdsa" + "crypto/rand" + "encoding/base64" + "errors" + "fmt" + "time" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/nonce" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +const nonceExpirySecs = int64(60) + +// CreateChallenge produces a fresh challenge with a 128-bit random nonce. +// verifierCredential may be empty to omit the field. +func CreateChallenge(verifierCredential string) (*types.Challenge, error) { + var n [16]byte + if _, err := rand.Read(n[:]); err != nil { + return nil, fmt.Errorf("read random nonce: %w", err) + } + return &types.Challenge{ + Type: "agentpin-challenge", + Nonce: base64.RawURLEncoding.EncodeToString(n[:]), + Timestamp: time.Now().UTC().Format(time.RFC3339), + VerifierCredential: verifierCredential, + }, nil +} + +// CreateResponse signs the challenge nonce with priv and returns a Response. +func CreateResponse(challenge *types.Challenge, priv *ecdsa.PrivateKey, kid string) (*types.Response, error) { + sig, err := crypto.SignBytes(priv, []byte(challenge.Nonce)) + if err != nil { + return nil, err + } + return &types.Response{ + Type: "agentpin-response", + Nonce: challenge.Nonce, + Signature: sig, + Kid: kid, + }, nil +} + +// VerifyResponse checks the response nonce, expiry, and signature. It does +// not perform replay protection — see VerifyResponseWithStore for that. +func VerifyResponse(response *types.Response, challenge *types.Challenge, pub *ecdsa.PublicKey) (bool, error) { + return VerifyResponseWithStore(response, challenge, pub, nil) +} + +// VerifyResponseWithStore checks the response and (optionally) records the +// nonce in store to reject replays. Returns false (and no error) when the +// nonce simply doesn't match; returns an error for expired nonce, replay, +// or signature decode failure. +func VerifyResponseWithStore(response *types.Response, challenge *types.Challenge, pub *ecdsa.PublicKey, store nonce.Store) (bool, error) { + if response.Nonce != challenge.Nonce { + return false, nil + } + + if challenge.Timestamp != "" { + ts, err := time.Parse(time.RFC3339, challenge.Timestamp) + if err == nil { + elapsed := time.Now().Unix() - ts.Unix() + if elapsed > nonceExpirySecs { + return false, fmt.Errorf("Challenge nonce expired (%d seconds old, max %d)", elapsed, nonceExpirySecs) + } + } + } + + if store != nil { + fresh, err := store.CheckAndRecord(response.Nonce, time.Duration(nonceExpirySecs)*time.Second) + if err != nil { + return false, err + } + if !fresh { + return false, errors.New("Nonce has already been used") + } + } + + return crypto.VerifyBytes(pub, []byte(challenge.Nonce), response.Signature) +} diff --git a/go/pkg/mutual/mutual_test.go b/go/pkg/mutual/mutual_test.go new file mode 100644 index 0000000..18d8a0c --- /dev/null +++ b/go/pkg/mutual/mutual_test.go @@ -0,0 +1,89 @@ +package mutual + +import ( + "encoding/base64" + "testing" + "time" + + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/nonce" +) + +func TestChallengeResponseRoundTrip(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + c, err := CreateChallenge("") + if err != nil { + t.Fatal(err) + } + r, err := CreateResponse(c, priv, "k") + if err != nil { + t.Fatal(err) + } + ok, err := VerifyResponse(r, c, pub) + if err != nil || !ok { + t.Fatalf("verify: ok=%v err=%v", ok, err) + } +} + +func TestNonceIs128Bits(t *testing.T) { + c, _ := CreateChallenge("") + b, err := base64.RawURLEncoding.DecodeString(c.Nonce) + if err != nil || len(b) != 16 { + t.Fatalf("nonce len = %d, want 16 (128 bits)", len(b)) + } +} + +func TestWrongKeyRejected(t *testing.T) { + k1, _ := crypto.GenerateKeyPair() + k2, _ := crypto.GenerateKeyPair() + p1, _ := crypto.LoadPrivateKey(k1.PrivateKeyPEM) + pub2, _ := crypto.LoadPublicKey(k2.PublicKeyPEM) + c, _ := CreateChallenge("") + r, _ := CreateResponse(c, p1, "k") + ok, _ := VerifyResponse(r, c, pub2) + if ok { + t.Fatal("wrong key should not verify") + } +} + +func TestNonceMismatchRejected(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + c, _ := CreateChallenge("") + r, _ := CreateResponse(c, priv, "k") + r.Nonce = "wrong" + ok, _ := VerifyResponse(r, c, pub) + if ok { + t.Fatal("nonce mismatch should not verify") + } +} + +func TestExpiredNonceRejected(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + c, _ := CreateChallenge("") + c.Timestamp = time.Now().Add(-2 * time.Minute).UTC().Format(time.RFC3339) + r, _ := CreateResponse(c, priv, "k") + if _, err := VerifyResponse(r, c, pub); err == nil { + t.Fatal("expired challenge should error") + } +} + +func TestNonceStoreReplayProtection(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + store := nonce.NewInMemoryStore() + c, _ := CreateChallenge("") + r, _ := CreateResponse(c, priv, "k") + if _, err := VerifyResponseWithStore(r, c, pub, store); err != nil { + t.Fatalf("first verify should succeed: %v", err) + } + if _, err := VerifyResponseWithStore(r, c, pub, store); err == nil { + t.Fatal("replay should be rejected") + } +} diff --git a/go/pkg/nonce/nonce.go b/go/pkg/nonce/nonce.go new file mode 100644 index 0000000..d87416c --- /dev/null +++ b/go/pkg/nonce/nonce.go @@ -0,0 +1,48 @@ +// Package nonce provides a nonce deduplication store interface and an +// in-memory implementation used to defend against challenge-response replay. +package nonce + +import ( + "sync" + "time" +) + +// Store is the dedup interface implemented by nonce stores. +// +// CheckAndRecord returns true if nonce has not been seen before (and records +// it with ttl). Returns false if it has been seen — i.e. this is a replay. +type Store interface { + CheckAndRecord(nonce string, ttl time.Duration) (bool, error) +} + +// InMemoryStore is a goroutine-safe in-memory nonce store with lazy expiry. +type InMemoryStore struct { + mu sync.Mutex + entries map[string]time.Time +} + +// NewInMemoryStore creates a new in-memory nonce store. +func NewInMemoryStore() *InMemoryStore { + return &InMemoryStore{entries: make(map[string]time.Time)} +} + +// CheckAndRecord implements Store. +func (s *InMemoryStore) CheckAndRecord(nonce string, ttl time.Duration) (bool, error) { + s.mu.Lock() + defer s.mu.Unlock() + + now := time.Now() + + // Lazy cleanup: drop expired entries. + for k, exp := range s.entries { + if !exp.After(now) { + delete(s.entries, k) + } + } + + if _, ok := s.entries[nonce]; ok { + return false, nil + } + s.entries[nonce] = now.Add(ttl) + return true, nil +} diff --git a/go/pkg/nonce/nonce_test.go b/go/pkg/nonce/nonce_test.go new file mode 100644 index 0000000..fb5fbe4 --- /dev/null +++ b/go/pkg/nonce/nonce_test.go @@ -0,0 +1,33 @@ +package nonce + +import ( + "testing" + "time" +) + +func TestFreshNonceAccepted(t *testing.T) { + s := NewInMemoryStore() + ok, err := s.CheckAndRecord("n1", time.Minute) + if err != nil || !ok { + t.Fatalf("first call: ok=%v err=%v", ok, err) + } +} + +func TestDuplicateNonceRejected(t *testing.T) { + s := NewInMemoryStore() + _, _ = s.CheckAndRecord("dup", time.Minute) + ok, _ := s.CheckAndRecord("dup", time.Minute) + if ok { + t.Fatal("duplicate should be rejected") + } +} + +func TestExpiredNonceReusable(t *testing.T) { + s := NewInMemoryStore() + _, _ = s.CheckAndRecord("exp", time.Millisecond) + time.Sleep(10 * time.Millisecond) + ok, _ := s.CheckAndRecord("exp", time.Minute) + if !ok { + t.Fatal("expired nonce should be reusable") + } +} diff --git a/go/pkg/pinning/pinning.go b/go/pkg/pinning/pinning.go new file mode 100644 index 0000000..08b3bd7 --- /dev/null +++ b/go/pkg/pinning/pinning.go @@ -0,0 +1,166 @@ +// Package pinning provides a TOFU (trust-on-first-use) key pin store keyed +// by domain. The store persists by JSON serialization and is used by the +// AgentPin verifier to detect key changes. +package pinning + +import ( + "encoding/json" + "fmt" + "sync" + "time" + + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// Result is the outcome of a CheckAndPin call. +type Result int + +const ( + // ResultFirstUse indicates the domain was unknown and the key is now pinned. + ResultFirstUse Result = iota + // ResultMatched indicates the key matches a previously pinned key. + ResultMatched + // ResultChanged indicates the domain was known but the key does not match + // any pinned key — likely key rotation or attack. + ResultChanged +) + +// String returns a stable label for r. +func (r Result) String() string { + switch r { + case ResultFirstUse: + return "first_use" + case ResultMatched: + return "pinned" + case ResultChanged: + return "changed" + } + return "unknown" +} + +// KeyPinStore is a concurrent-safe in-memory TOFU pin store. +type KeyPinStore struct { + mu sync.Mutex + domains map[string]*types.PinnedDomain +} + +// NewKeyPinStore creates a new empty store. +func NewKeyPinStore() *KeyPinStore { + return &KeyPinStore{domains: make(map[string]*types.PinnedDomain)} +} + +// CheckAndPin checks j against the pinned keys for domain. On first use it +// pins j and returns ResultFirstUse. +func (s *KeyPinStore) CheckAndPin(domain string, j *types.JWK) Result { + s.mu.Lock() + defer s.mu.Unlock() + + hash := jwk.JWKThumbprint(j) + now := time.Now().UTC().Format(time.RFC3339) + + if pd, ok := s.domains[domain]; ok { + for i := range pd.PinnedKeys { + if pd.PinnedKeys[i].PublicKeyHash == hash { + pd.PinnedKeys[i].LastSeen = now + return ResultMatched + } + } + return ResultChanged + } + + s.domains[domain] = &types.PinnedDomain{ + Domain: domain, + PinnedKeys: []types.PinnedKey{ + { + Kid: j.Kid, + PublicKeyHash: hash, + FirstSeen: now, + LastSeen: now, + TrustLevel: types.TrustTOFU, + }, + }, + } + return ResultFirstUse +} + +// AddKey adds an additional pinned key to a domain (e.g., during key rotation). +func (s *KeyPinStore) AddKey(domain string, j *types.JWK) { + s.mu.Lock() + defer s.mu.Unlock() + + hash := jwk.JWKThumbprint(j) + now := time.Now().UTC().Format(time.RFC3339) + + pd, ok := s.domains[domain] + if !ok { + pd = &types.PinnedDomain{Domain: domain} + s.domains[domain] = pd + } + for _, k := range pd.PinnedKeys { + if k.PublicKeyHash == hash { + return + } + } + pd.PinnedKeys = append(pd.PinnedKeys, types.PinnedKey{ + Kid: j.Kid, + PublicKeyHash: hash, + FirstSeen: now, + LastSeen: now, + TrustLevel: types.TrustTOFU, + }) +} + +// GetDomain returns the pinned-domain entry for domain, or nil. +func (s *KeyPinStore) GetDomain(domain string) *types.PinnedDomain { + s.mu.Lock() + defer s.mu.Unlock() + pd, ok := s.domains[domain] + if !ok { + return nil + } + cp := *pd + cp.PinnedKeys = append([]types.PinnedKey(nil), pd.PinnedKeys...) + return &cp +} + +// MarshalJSON serializes the store as a JSON array of PinnedDomain (matching +// the Rust port's `to_json` shape). +func (s *KeyPinStore) MarshalJSON() ([]byte, error) { + s.mu.Lock() + defer s.mu.Unlock() + domains := make([]*types.PinnedDomain, 0, len(s.domains)) + for _, d := range s.domains { + domains = append(domains, d) + } + return json.MarshalIndent(domains, "", " ") +} + +// LoadFromJSON populates s from the JSON array produced by MarshalJSON. +func (s *KeyPinStore) LoadFromJSON(data []byte) error { + var domains []types.PinnedDomain + if err := json.Unmarshal(data, &domains); err != nil { + return err + } + s.mu.Lock() + defer s.mu.Unlock() + s.domains = make(map[string]*types.PinnedDomain, len(domains)) + for i := range domains { + d := domains[i] + s.domains[d.Domain] = &d + } + return nil +} + +// CheckPinning runs CheckAndPin and returns ResultChanged as a typed +// VerificationError so verifiers can fail closed cleanly. +func CheckPinning(store *KeyPinStore, domain string, j *types.JWK) (Result, error) { + r := store.CheckAndPin(domain, j) + if r == ResultChanged { + return r, types.NewVerificationError( + types.ErrKeyPinMismatch, + fmt.Sprintf("Key for domain '%s' has changed since last pinned (kid: '%s')", domain, j.Kid), + ) + } + return r, nil +} diff --git a/go/pkg/pinning/pinning_test.go b/go/pkg/pinning/pinning_test.go new file mode 100644 index 0000000..9f8003a --- /dev/null +++ b/go/pkg/pinning/pinning_test.go @@ -0,0 +1,77 @@ +package pinning + +import ( + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func makeJWK(kid, x string) *types.JWK { + return &types.JWK{ + Kid: kid, Kty: "EC", Crv: "P-256", + X: x, Y: "test-y", Use: "sig", + } +} + +func TestFirstUsePinsKey(t *testing.T) { + store := NewKeyPinStore() + j := makeJWK("k", "x1") + if r := store.CheckAndPin("example.com", j); r != ResultFirstUse { + t.Fatalf("first call should be FirstUse, got %v", r) + } + if r := store.CheckAndPin("example.com", j); r != ResultMatched { + t.Fatalf("second call should be Matched, got %v", r) + } +} + +func TestKeyChangeDetected(t *testing.T) { + store := NewKeyPinStore() + store.CheckAndPin("example.com", makeJWK("k", "x1")) + if r := store.CheckAndPin("example.com", makeJWK("k", "x2")); r != ResultChanged { + t.Fatalf("changed key should be Changed, got %v", r) + } +} + +func TestAddKeyAllowsRotation(t *testing.T) { + store := NewKeyPinStore() + j1 := makeJWK("k1", "x1") + j2 := makeJWK("k2", "x2") + store.CheckAndPin("example.com", j1) + store.AddKey("example.com", j2) + if r := store.CheckAndPin("example.com", j2); r != ResultMatched { + t.Fatalf("rotated key should match, got %v", r) + } +} + +func TestPinJSONRoundTrip(t *testing.T) { + store := NewKeyPinStore() + store.CheckAndPin("example.com", makeJWK("k", "x")) + data, err := store.MarshalJSON() + if err != nil { + t.Fatal(err) + } + store2 := NewKeyPinStore() + if err := store2.LoadFromJSON(data); err != nil { + t.Fatal(err) + } + if pd := store2.GetDomain("example.com"); pd == nil || len(pd.PinnedKeys) != 1 { + t.Fatalf("after roundtrip: %+v", pd) + } +} + +func TestCheckPinningError(t *testing.T) { + store := NewKeyPinStore() + store.CheckAndPin("example.com", makeJWK("k", "x1")) + if _, err := CheckPinning(store, "example.com", makeJWK("k", "x2")); err == nil { + t.Fatal("expected pin error") + } +} + +func TestDifferentDomainsIndependent(t *testing.T) { + store := NewKeyPinStore() + store.CheckAndPin("a.com", makeJWK("k", "xa")) + store.CheckAndPin("b.com", makeJWK("k", "xb")) + if r := store.CheckAndPin("a.com", makeJWK("k", "xb")); r != ResultChanged { + t.Fatalf("cross-domain: got %v", r) + } +} diff --git a/go/pkg/resolver/a2a_card.go b/go/pkg/resolver/a2a_card.go new file mode 100644 index 0000000..c17d446 --- /dev/null +++ b/go/pkg/resolver/a2a_card.go @@ -0,0 +1,129 @@ +package resolver + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "sync" + "time" + + "github.com/ThirdKeyAi/agentpin/go/pkg/a2a" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// A2aAgentCardPath is the standard well-known URL suffix for an A2A +// AgentCard. +const A2aAgentCardPath = "/.well-known/agent-card.json" + +// A2aAgentCardResolver fetches A2A AgentCards over HTTPS and exposes both the +// original card and the derived DiscoveryDocument. +// +// GET https://{domain}/.well-known/agent-card.json +// -> verify AgentPin extension signature +// -> cross-check that the embedded agentpin_endpoint host == fetched domain +// -> derive a DiscoveryDocument +type A2aAgentCardResolver struct { + // Client is the HTTP client used for fetches. When nil, a client with a + // 10s timeout and no redirects is constructed on first use. + Client *http.Client + + mu sync.RWMutex + lastCard *types.A2aAgentCard + lastFor string +} + +// NewA2aAgentCardResolver returns a resolver using the default HTTP client. +func NewA2aAgentCardResolver() *A2aAgentCardResolver { + return &A2aAgentCardResolver{} +} + +// LastCard returns the most recently resolved AgentCard for domain, or nil. +func (r *A2aAgentCardResolver) LastCard(domain string) *types.A2aAgentCard { + r.mu.RLock() + defer r.mu.RUnlock() + if r.lastFor != domain || r.lastCard == nil { + return nil + } + cp := *r.lastCard + return &cp +} + +// ResolveDiscovery fetches and verifies the AgentCard at the standard +// .well-known endpoint for domain, then returns the derived +// DiscoveryDocument. +func (r *A2aAgentCardResolver) ResolveDiscovery(domain string) (*types.DiscoveryDocument, error) { + client := r.Client + if client == nil { + client = &http.Client{ + Timeout: 10 * time.Second, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + }, + } + } + url := fmt.Sprintf("https://%s%s", domain, A2aAgentCardPath) + resp, err := client.Get(url) + if err != nil { + return nil, types.NewVerificationError( + types.ErrDiscoveryFetchFailed, + fmt.Sprintf("Failed to fetch %s: %s", url, err), + ) + } + defer func() { _ = resp.Body.Close() }() + if resp.StatusCode >= 300 && resp.StatusCode < 400 { + return nil, types.NewVerificationError( + types.ErrDiscoveryFetchFailed, + fmt.Sprintf("Redirect detected fetching %s (status %d). Redirects are not allowed.", url, resp.StatusCode), + ) + } + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return nil, types.NewVerificationError( + types.ErrDiscoveryFetchFailed, + fmt.Sprintf("Failed to fetch %s: HTTP %d", url, resp.StatusCode), + ) + } + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, types.NewVerificationError( + types.ErrDiscoveryInvalid, + fmt.Sprintf("Failed to read AgentCard at %s: %s", url, err), + ) + } + var card types.A2aAgentCard + if err := json.Unmarshal(body, &card); err != nil { + return nil, types.NewVerificationError( + types.ErrDiscoveryInvalid, + fmt.Sprintf("Failed to parse AgentCard at %s: %s", url, err), + ) + } + if err := a2a.VerifyAgentpinExtension(&card); err != nil { + return nil, err + } + endpointHost, err := CardEndpointHost(&card) + if err != nil { + return nil, err + } + if endpointHost != domain { + return nil, types.NewVerificationError( + types.ErrDomainMismatch, + fmt.Sprintf("AgentCard at %s declares agentpin endpoint host %s (mismatch)", domain, endpointHost), + ) + } + doc, err := DeriveDiscoveryFromCard(&card) + if err != nil { + return nil, err + } + r.mu.Lock() + r.lastCard = &card + r.lastFor = domain + r.mu.Unlock() + return &doc, nil +} + +// ResolveRevocation always returns (nil, nil) — A2A AgentCards do not carry +// revocation data. Pair with a separate revocation resolver via +// ChainResolver if revocation is required. +func (r *A2aAgentCardResolver) ResolveRevocation(_ string, _ *types.DiscoveryDocument) (*types.RevocationDocument, error) { + return nil, nil +} diff --git a/go/pkg/resolver/a2a_card_test.go b/go/pkg/resolver/a2a_card_test.go new file mode 100644 index 0000000..6e63a3e --- /dev/null +++ b/go/pkg/resolver/a2a_card_test.go @@ -0,0 +1,148 @@ +package resolver + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/a2a" + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func signedCardForDomain(t *testing.T, domain string) types.A2aAgentCard { + t.Helper() + kp, err := crypto.GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + decl := &types.AgentDeclaration{ + AgentID: "urn:agentpin:" + domain + ":test", + Name: "Test Agent", + Description: "desc", + Version: "1.0.0", + Capabilities: []types.Capability{"read:*"}, + Status: types.AgentActive, + } + card, err := a2a.BuildAndSignAgentCard( + "https://"+domain+"/agent", + decl, + kp.PrivateKeyPEM, "kid-1", + "https://"+domain+"/.well-known/agent-identity.json", + a2a.BuildOptions{}, + ) + if err != nil { + t.Fatal(err) + } + return card +} + +func serveCard(card types.A2aAgentCard, status int) *httptest.Server { + return httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + if status == http.StatusOK { + _ = json.NewEncoder(w).Encode(card) + } + })) +} + +// resolverFor builds an A2aAgentCardResolver whose HTTP client trusts the +// given test server's TLS cert and rewrites all requests to point at it. +func resolverFor(server *httptest.Server) *A2aAgentCardResolver { + client := server.Client() + transport := client.Transport.(*http.Transport).Clone() + originalDial := transport.DialContext + _ = originalDial + client.Transport = &rewriteTransport{base: transport, target: server.URL} + client.CheckRedirect = func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + } + return &A2aAgentCardResolver{Client: client} +} + +// rewriteTransport rewrites the host of every request to the test server's +// host so resolving "https://example.com/..." actually hits the local TLS +// server. +type rewriteTransport struct { + base http.RoundTripper + target string +} + +func (t *rewriteTransport) RoundTrip(req *http.Request) (*http.Response, error) { + // Replace scheme+host with the test server's; preserve the original path. + clone := req.Clone(req.Context()) + // httptest server URL contains scheme + host; reuse via url.Parse. + parsed, err := req.URL.Parse(t.target + req.URL.RequestURI()) + if err != nil { + return nil, err + } + clone.URL = parsed + clone.Host = parsed.Host + return t.base.RoundTrip(clone) +} + +func TestA2aResolverResolvesAndVerifies(t *testing.T) { + card := signedCardForDomain(t, "example.com") + server := serveCard(card, http.StatusOK) + defer server.Close() + resolver := resolverFor(server) + doc, err := resolver.ResolveDiscovery("example.com") + if err != nil { + t.Fatal(err) + } + if doc.Entity != "example.com" { + t.Fatalf("entity %q", doc.Entity) + } + if got := resolver.LastCard("example.com"); got == nil || got.Name != "Test Agent" { + t.Fatalf("last card unexpected: %+v", got) + } +} + +func TestA2aResolverRejectsHttpError(t *testing.T) { + server := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + resolver := resolverFor(server) + if _, err := resolver.ResolveDiscovery("example.com"); err == nil { + t.Fatal("expected HTTP 404 error") + } +} + +func TestA2aResolverRejectsTamperedCard(t *testing.T) { + card := signedCardForDomain(t, "example.com") + card.URL = "https://attacker.example/agent" + server := serveCard(card, http.StatusOK) + defer server.Close() + resolver := resolverFor(server) + if _, err := resolver.ResolveDiscovery("example.com"); err == nil { + t.Fatal("expected tamper rejection") + } +} + +func TestA2aResolverRejectsEndpointHostMismatch(t *testing.T) { + card := signedCardForDomain(t, "other.com") + server := serveCard(card, http.StatusOK) + defer server.Close() + resolver := resolverFor(server) + if _, err := resolver.ResolveDiscovery("example.com"); err == nil { + t.Fatal("expected mismatch error") + } +} + +func TestA2aResolverRevocationReturnsNil(t *testing.T) { + card := signedCardForDomain(t, "example.com") + server := serveCard(card, http.StatusOK) + defer server.Close() + resolver := resolverFor(server) + doc, err := resolver.ResolveDiscovery("example.com") + if err != nil { + t.Fatal(err) + } + rev, err := resolver.ResolveRevocation("example.com", doc) + if err != nil || rev != nil { + t.Fatalf("expected nil rev, got rev=%v err=%v", rev, err) + } +} diff --git a/go/pkg/resolver/local_card.go b/go/pkg/resolver/local_card.go new file mode 100644 index 0000000..42f226e --- /dev/null +++ b/go/pkg/resolver/local_card.go @@ -0,0 +1,202 @@ +package resolver + +import ( + "fmt" + "net/url" + "strings" + "sync" + "time" + + "github.com/ThirdKeyAi/agentpin/go/pkg/a2a" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// LocalAgentCardStore is an in-memory store of pre-registered A2A AgentCards +// keyed by their AgentPin discovery domain (v0.3.0). +// +// Mirrors the Rust agentpin::resolver_local::LocalAgentCardStore. Cards are +// added via Register (after the extension signature is verified) and looked +// up via ResolveDiscovery. Backs Symbiont's push-based external-agent +// registration flow. +type LocalAgentCardStore struct { + mu sync.RWMutex + cards map[string]types.A2aAgentCard + docs map[string]types.DiscoveryDocument +} + +// NewLocalAgentCardStore constructs an empty store. +func NewLocalAgentCardStore() *LocalAgentCardStore { + return &LocalAgentCardStore{ + cards: make(map[string]types.A2aAgentCard), + docs: make(map[string]types.DiscoveryDocument), + } +} + +// Register verifies the extension signature on card and stores it keyed by +// the host of its agentpin_endpoint. Re-registering an existing domain +// replaces the prior entry — useful for handling key rotation. +func (s *LocalAgentCardStore) Register(card types.A2aAgentCard) error { + if err := a2a.VerifyAgentpinExtension(&card); err != nil { + return err + } + domain, err := CardEndpointHost(&card) + if err != nil { + return err + } + doc, err := DeriveDiscoveryFromCard(&card) + if err != nil { + return err + } + s.mu.Lock() + defer s.mu.Unlock() + s.cards[domain] = card + s.docs[domain] = doc + return nil +} + +// Len returns the number of registered AgentCards. +func (s *LocalAgentCardStore) Len() int { + s.mu.RLock() + defer s.mu.RUnlock() + return len(s.cards) +} + +// IsEmpty reports whether no AgentCards are registered. +func (s *LocalAgentCardStore) IsEmpty() bool { + return s.Len() == 0 +} + +// ResolveCard returns the original AgentCard for domain, or (zero, false). +func (s *LocalAgentCardStore) ResolveCard(domain string) (types.A2aAgentCard, bool) { + s.mu.RLock() + defer s.mu.RUnlock() + c, ok := s.cards[domain] + return c, ok +} + +// ResolveDiscovery returns the derived DiscoveryDocument for domain. Returns +// a typed VerificationError(ErrDiscoveryInvalid) when the domain isn't +// registered. +func (s *LocalAgentCardStore) ResolveDiscovery(domain string) (*types.DiscoveryDocument, error) { + s.mu.RLock() + doc, ok := s.docs[domain] + s.mu.RUnlock() + if !ok { + return nil, types.NewVerificationError( + types.ErrDiscoveryInvalid, + fmt.Sprintf("Domain '%s' not in LocalAgentCardStore", domain), + ) + } + cp := doc + return &cp, nil +} + +// ResolveRevocation always returns (nil, nil) — the store carries no +// revocation data. Pair with another resolver via ChainResolver if revocation +// is required. +func (s *LocalAgentCardStore) ResolveRevocation(_ string, _ *types.DiscoveryDocument) (*types.RevocationDocument, error) { + return nil, nil +} + +// Remove drops a registered AgentCard. Returns true when one was removed. +func (s *LocalAgentCardStore) Remove(domain string) bool { + s.mu.Lock() + defer s.mu.Unlock() + _, ok := s.cards[domain] + delete(s.cards, domain) + delete(s.docs, domain) + return ok +} + +// CardEndpointHost extracts the host portion of an AgentCard's +// agentpin_endpoint URL. +func CardEndpointHost(card *types.A2aAgentCard) (string, error) { + if card == nil || card.Agentpin == nil { + return "", types.NewVerificationError( + types.ErrDiscoveryInvalid, + "AgentCard has no agentpin extension", + ) + } + u, err := url.Parse(card.Agentpin.AgentpinEndpoint) + if err != nil { + return "", types.NewVerificationError( + types.ErrDiscoveryInvalid, + fmt.Sprintf("Invalid agentpin_endpoint URL: %s", err), + ) + } + if u.Hostname() == "" { + return "", types.NewVerificationError( + types.ErrDiscoveryInvalid, + "agentpin_endpoint URL has no host", + ) + } + return u.Hostname(), nil +} + +// DeriveDiscoveryFromCard turns a signed A2A AgentCard into a minimal +// DiscoveryDocument so the rest of the AgentPin verification stack runs +// against AgentCards unchanged. +func DeriveDiscoveryFromCard(card *types.A2aAgentCard) (types.DiscoveryDocument, error) { + if card == nil || card.Agentpin == nil { + return types.DiscoveryDocument{}, types.NewVerificationError( + types.ErrDiscoveryInvalid, + "AgentCard has no agentpin extension", + ) + } + domain, err := CardEndpointHost(card) + if err != nil { + return types.DiscoveryDocument{}, err + } + + caps := make([]types.Capability, 0, len(card.Skills)) + for _, s := range card.Skills { + caps = append(caps, types.Capability(s.ID)) + } + + var constraints *types.Constraints + if !types.AllowedDomainsHelper.IsUnrestricted(card.Capabilities.AllowedDomains) { + constraints = &types.Constraints{ + AllowedDomains: append([]string{}, card.Capabilities.AllowedDomains...), + } + } + + agentID := fmt.Sprintf("urn:agentpin:%s:%s", domain, slug(card.Name)) + agent := types.AgentDeclaration{ + AgentID: agentID, + Name: card.Name, + Description: card.Description, + Version: card.Version, + Capabilities: caps, + Constraints: constraints, + Status: types.AgentActive, + } + + return types.DiscoveryDocument{ + AgentpinVersion: "0.3", + Entity: domain, + EntityType: types.EntityBoth, + PublicKeys: []types.JWK{card.Agentpin.PublicKeyJWK}, + Agents: []types.AgentDeclaration{agent}, + A2aEndpoint: card.Agentpin.AgentpinEndpoint, + MaxDelegationDepth: 0, + UpdatedAt: time.Now().UTC().Format("2006-01-02T15:04:05Z"), + }, nil +} + +// slug lower-cases and replaces non-alphanumeric ASCII chars with '-', then +// strips leading/trailing '-'. Mirrors the Rust helper. +func slug(input string) string { + var b strings.Builder + b.Grow(len(input)) + for _, r := range input { + switch { + case r >= 'a' && r <= 'z', r >= '0' && r <= '9': + b.WriteRune(r) + case r >= 'A' && r <= 'Z': + b.WriteRune(r + ('a' - 'A')) + default: + b.WriteRune('-') + } + } + return strings.Trim(b.String(), "-") +} diff --git a/go/pkg/resolver/local_card_test.go b/go/pkg/resolver/local_card_test.go new file mode 100644 index 0000000..d574e61 --- /dev/null +++ b/go/pkg/resolver/local_card_test.go @@ -0,0 +1,163 @@ +package resolver + +import ( + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/a2a" + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func signedCard(t *testing.T) types.A2aAgentCard { + t.Helper() + kp, err := crypto.GenerateKeyPair() + if err != nil { + t.Fatal(err) + } + decl := &types.AgentDeclaration{ + AgentID: "urn:agentpin:example.com:tester", + Name: "Tester", + Description: "Test agent", + Version: "1.0.0", + Capabilities: []types.Capability{"read:*"}, + Constraints: &types.Constraints{AllowedDomains: []string{"partner.com"}}, + Status: types.AgentActive, + } + card, err := a2a.BuildAndSignAgentCard( + "https://example.com/agent", + decl, + kp.PrivateKeyPEM, "kid-1", + "https://example.com/.well-known/agent-identity.json", + a2a.BuildOptions{}, + ) + if err != nil { + t.Fatal(err) + } + return card +} + +func TestCardEndpointHost(t *testing.T) { + card := signedCard(t) + host, err := CardEndpointHost(&card) + if err != nil { + t.Fatal(err) + } + if host != "example.com" { + t.Fatalf("got %q", host) + } +} + +func TestCardEndpointHostWithoutExtension(t *testing.T) { + card := types.A2aAgentCard{Name: "x"} + if _, err := CardEndpointHost(&card); err == nil { + t.Fatal("expected error for card without extension") + } +} + +func TestDeriveDiscoveryFromCard(t *testing.T) { + card := signedCard(t) + doc, err := DeriveDiscoveryFromCard(&card) + if err != nil { + t.Fatal(err) + } + if doc.Entity != "example.com" { + t.Fatalf("entity %q", doc.Entity) + } + if len(doc.PublicKeys) != 1 { + t.Fatalf("expected 1 key, got %d", len(doc.PublicKeys)) + } + if len(doc.Agents) != 1 { + t.Fatalf("expected 1 agent, got %d", len(doc.Agents)) + } + if doc.Agents[0].Name != "Tester" { + t.Fatalf("name %q", doc.Agents[0].Name) + } + if doc.A2aEndpoint != "https://example.com/.well-known/agent-identity.json" { + t.Fatalf("a2a_endpoint %q", doc.A2aEndpoint) + } + if doc.Agents[0].Constraints == nil || len(doc.Agents[0].Constraints.AllowedDomains) != 1 { + t.Fatalf("missing constraints") + } +} + +func TestLocalAgentCardStoreRegisterThenResolve(t *testing.T) { + store := NewLocalAgentCardStore() + if err := store.Register(signedCard(t)); err != nil { + t.Fatal(err) + } + if store.Len() != 1 { + t.Fatalf("len=%d", store.Len()) + } + doc, err := store.ResolveDiscovery("example.com") + if err != nil { + t.Fatal(err) + } + if doc.Entity != "example.com" { + t.Fatalf("entity %q", doc.Entity) + } +} + +func TestLocalAgentCardStoreRegisterPropagatesSignatureFailure(t *testing.T) { + card := signedCard(t) + card.URL = "https://attacker.example/agent" // tamper + store := NewLocalAgentCardStore() + if err := store.Register(card); err == nil { + t.Fatal("expected signature failure") + } + if !store.IsEmpty() { + t.Fatal("store should be empty after failed register") + } +} + +func TestLocalAgentCardStoreResolveMissing(t *testing.T) { + store := NewLocalAgentCardStore() + if _, err := store.ResolveDiscovery("missing.com"); err == nil { + t.Fatal("expected error for missing domain") + } +} + +func TestLocalAgentCardStoreReRegisterReplaces(t *testing.T) { + store := NewLocalAgentCardStore() + if err := store.Register(signedCard(t)); err != nil { + t.Fatal(err) + } + if err := store.Register(signedCard(t)); err != nil { + t.Fatal(err) + } + if store.Len() != 1 { + t.Fatalf("expected re-register to replace, len=%d", store.Len()) + } +} + +func TestLocalAgentCardStoreRemove(t *testing.T) { + store := NewLocalAgentCardStore() + _ = store.Register(signedCard(t)) + if !store.Remove("example.com") { + t.Fatal("expected remove to return true") + } + if !store.IsEmpty() { + t.Fatal("store should be empty") + } + if store.Remove("example.com") { + t.Fatal("expected second remove to return false") + } +} + +func TestLocalAgentCardStoreResolveCard(t *testing.T) { + store := NewLocalAgentCardStore() + _ = store.Register(signedCard(t)) + card, ok := store.ResolveCard("example.com") + if !ok || card.Name != "Tester" { + t.Fatalf("unexpected: ok=%v card=%+v", ok, card) + } +} + +func TestLocalAgentCardStoreResolveRevocationReturnsNil(t *testing.T) { + store := NewLocalAgentCardStore() + _ = store.Register(signedCard(t)) + doc, _ := store.ResolveDiscovery("example.com") + rev, err := store.ResolveRevocation("example.com", doc) + if err != nil || rev != nil { + t.Fatalf("expected nil rev, got rev=%v err=%v", rev, err) + } +} diff --git a/go/pkg/resolver/resolver.go b/go/pkg/resolver/resolver.go new file mode 100644 index 0000000..cb7b517 --- /dev/null +++ b/go/pkg/resolver/resolver.go @@ -0,0 +1,185 @@ +// Package resolver provides discovery / revocation resolution strategies +// (well-known HTTPS, local file, trust bundle, chain). +package resolver + +import ( + "encoding/json" + "errors" + "fmt" + "net/http" + "os" + "path/filepath" + + "github.com/ThirdKeyAi/agentpin/go/pkg/discovery" + "github.com/ThirdKeyAi/agentpin/go/pkg/revocation" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// DiscoveryResolver resolves discovery and revocation documents for a +// domain. Implementations may fetch from HTTPS, the local filesystem, a +// pre-loaded trust bundle, or anything else. +type DiscoveryResolver interface { + ResolveDiscovery(domain string) (*types.DiscoveryDocument, error) + ResolveRevocation(domain string, discovery *types.DiscoveryDocument) (*types.RevocationDocument, error) +} + +// WellKnownResolver fetches documents from the standard `.well-known` HTTPS +// endpoint. +type WellKnownResolver struct { + Client *http.Client +} + +// NewWellKnownResolver returns a resolver using the default HTTP client. +func NewWellKnownResolver() *WellKnownResolver { return &WellKnownResolver{} } + +// ResolveDiscovery implements DiscoveryResolver. +func (r *WellKnownResolver) ResolveDiscovery(domain string) (*types.DiscoveryDocument, error) { + return discovery.FetchDiscoveryDocument(r.Client, domain) +} + +// ResolveRevocation implements DiscoveryResolver. +func (r *WellKnownResolver) ResolveRevocation(_ string, doc *types.DiscoveryDocument) (*types.RevocationDocument, error) { + if doc == nil || doc.RevocationEndpoint == "" { + return nil, nil + } + return revocation.FetchRevocationDocument(r.Client, doc.RevocationEndpoint) +} + +// LocalFileResolver reads discovery documents from a local directory in +// `{domain}.json` form, and (optionally) revocation documents from +// `{domain}.revocations.json`. +type LocalFileResolver struct { + DiscoveryDir string + RevocationDir string +} + +// NewLocalFileResolver returns a resolver rooted at discoveryDir. If +// revocationDir is empty, revocations are read from discoveryDir. +func NewLocalFileResolver(discoveryDir, revocationDir string) *LocalFileResolver { + return &LocalFileResolver{DiscoveryDir: discoveryDir, RevocationDir: revocationDir} +} + +// ResolveDiscovery implements DiscoveryResolver. +func (r *LocalFileResolver) ResolveDiscovery(domain string) (*types.DiscoveryDocument, error) { + path := filepath.Join(r.DiscoveryDir, domain+".json") + data, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("cannot read %s: %w", path, err) + } + var doc types.DiscoveryDocument + if err := json.Unmarshal(data, &doc); err != nil { + return nil, err + } + return &doc, nil +} + +// ResolveRevocation implements DiscoveryResolver. +func (r *LocalFileResolver) ResolveRevocation(domain string, _ *types.DiscoveryDocument) (*types.RevocationDocument, error) { + dir := r.RevocationDir + if dir == "" { + dir = r.DiscoveryDir + } + path := filepath.Join(dir, domain+".revocations.json") + data, err := os.ReadFile(path) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil, nil + } + return nil, fmt.Errorf("cannot read %s: %w", path, err) + } + var doc types.RevocationDocument + if err := json.Unmarshal(data, &doc); err != nil { + return nil, err + } + return &doc, nil +} + +// TrustBundleResolver resolves documents from a pre-loaded TrustBundle. +type TrustBundleResolver struct { + discovery map[string]*types.DiscoveryDocument + revocations map[string]*types.RevocationDocument +} + +// NewTrustBundleResolver indexes b by entity for O(1) lookup. +func NewTrustBundleResolver(b *types.TrustBundle) *TrustBundleResolver { + r := &TrustBundleResolver{ + discovery: make(map[string]*types.DiscoveryDocument, len(b.Documents)), + revocations: make(map[string]*types.RevocationDocument, len(b.Revocations)), + } + for i := range b.Documents { + d := b.Documents[i] + r.discovery[d.Entity] = &d + } + for i := range b.Revocations { + rev := b.Revocations[i] + r.revocations[rev.Entity] = &rev + } + return r +} + +// TrustBundleResolverFromJSON builds a TrustBundleResolver from a JSON-encoded +// TrustBundle. +func TrustBundleResolverFromJSON(data []byte) (*TrustBundleResolver, error) { + var b types.TrustBundle + if err := json.Unmarshal(data, &b); err != nil { + return nil, err + } + return NewTrustBundleResolver(&b), nil +} + +// ResolveDiscovery implements DiscoveryResolver. +func (r *TrustBundleResolver) ResolveDiscovery(domain string) (*types.DiscoveryDocument, error) { + d, ok := r.discovery[domain] + if !ok { + return nil, fmt.Errorf("domain '%s' not in trust bundle", domain) + } + cp := *d + return &cp, nil +} + +// ResolveRevocation implements DiscoveryResolver. +func (r *TrustBundleResolver) ResolveRevocation(domain string, _ *types.DiscoveryDocument) (*types.RevocationDocument, error) { + rev, ok := r.revocations[domain] + if !ok { + return nil, nil + } + cp := *rev + return &cp, nil +} + +// ChainResolver tries a sequence of resolvers in order until one succeeds. +type ChainResolver struct { + Resolvers []DiscoveryResolver +} + +// NewChainResolver builds a ChainResolver from a slice of resolvers. +func NewChainResolver(resolvers []DiscoveryResolver) *ChainResolver { + return &ChainResolver{Resolvers: resolvers} +} + +// ResolveDiscovery implements DiscoveryResolver. +func (r *ChainResolver) ResolveDiscovery(domain string) (*types.DiscoveryDocument, error) { + var lastErr error = errors.New("no resolvers configured") + for _, sub := range r.Resolvers { + doc, err := sub.ResolveDiscovery(domain) + if err == nil { + return doc, nil + } + lastErr = err + } + return nil, lastErr +} + +// ResolveRevocation implements DiscoveryResolver. +func (r *ChainResolver) ResolveRevocation(domain string, doc *types.DiscoveryDocument) (*types.RevocationDocument, error) { + for _, sub := range r.Resolvers { + rev, err := sub.ResolveRevocation(domain, doc) + if err != nil { + continue + } + if rev != nil { + return rev, nil + } + } + return nil, nil +} diff --git a/go/pkg/resolver/resolver_test.go b/go/pkg/resolver/resolver_test.go new file mode 100644 index 0000000..a942963 --- /dev/null +++ b/go/pkg/resolver/resolver_test.go @@ -0,0 +1,144 @@ +package resolver + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/discovery" + "github.com/ThirdKeyAi/agentpin/go/pkg/revocation" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func makeDiscovery(domain string) types.DiscoveryDocument { + return discovery.BuildDiscoveryDocument( + domain, + types.EntityMaker, + []types.JWK{{Kid: "test-key", Kty: "EC", Crv: "P-256", X: "x", Y: "y", Use: "sig"}}, + nil, + 2, + "2026-01-15T00:00:00Z", + ) +} + +func TestTrustBundleResolverHit(t *testing.T) { + b := types.TrustBundle{ + AgentpinBundleVersion: "0.1", + Documents: []types.DiscoveryDocument{makeDiscovery("example.com")}, + } + r := NewTrustBundleResolver(&b) + d, err := r.ResolveDiscovery("example.com") + if err != nil { + t.Fatal(err) + } + if d.Entity != "example.com" { + t.Fatal("entity wrong") + } +} + +func TestTrustBundleResolverMiss(t *testing.T) { + r := NewTrustBundleResolver(&types.TrustBundle{}) + if _, err := r.ResolveDiscovery("missing.com"); err == nil { + t.Fatal("expected error") + } +} + +func TestTrustBundleResolverRevocation(t *testing.T) { + rev := revocation.BuildRevocationDocument("example.com") + b := types.TrustBundle{ + AgentpinBundleVersion: "0.1", + Documents: []types.DiscoveryDocument{makeDiscovery("example.com")}, + Revocations: []types.RevocationDocument{rev}, + } + r := NewTrustBundleResolver(&b) + d, _ := r.ResolveDiscovery("example.com") + got, err := r.ResolveRevocation("example.com", d) + if err != nil { + t.Fatal(err) + } + if got == nil { + t.Fatal("expected revocation document") + } +} + +func TestTrustBundleResolverFromJSON(t *testing.T) { + b := types.TrustBundle{ + AgentpinBundleVersion: "0.1", + Documents: []types.DiscoveryDocument{makeDiscovery("example.com")}, + } + data, _ := json.Marshal(b) + r, err := TrustBundleResolverFromJSON(data) + if err != nil { + t.Fatal(err) + } + if _, err := r.ResolveDiscovery("example.com"); err != nil { + t.Fatal(err) + } +} + +func TestLocalFileResolver(t *testing.T) { + dir := t.TempDir() + d := makeDiscovery("local.example.com") + data, _ := json.MarshalIndent(d, "", " ") + if err := os.WriteFile(filepath.Join(dir, "local.example.com.json"), data, 0o644); err != nil { + t.Fatal(err) + } + r := NewLocalFileResolver(dir, "") + got, err := r.ResolveDiscovery("local.example.com") + if err != nil { + t.Fatal(err) + } + if got.Entity != "local.example.com" { + t.Fatal("entity wrong") + } +} + +func TestLocalFileResolverMissing(t *testing.T) { + r := NewLocalFileResolver(t.TempDir(), "") + if _, err := r.ResolveDiscovery("missing.com"); err == nil { + t.Fatal("expected error") + } +} + +func TestLocalFileResolverRevocation(t *testing.T) { + dir := t.TempDir() + d := makeDiscovery("local.example.com") + rev := revocation.BuildRevocationDocument("local.example.com") + dData, _ := json.Marshal(d) + rData, _ := json.Marshal(rev) + _ = os.WriteFile(filepath.Join(dir, "local.example.com.json"), dData, 0o644) + _ = os.WriteFile(filepath.Join(dir, "local.example.com.revocations.json"), rData, 0o644) + r := NewLocalFileResolver(dir, "") + got, err := r.ResolveRevocation("local.example.com", &d) + if err != nil { + t.Fatal(err) + } + if got == nil { + t.Fatal("expected revocation") + } +} + +func TestChainResolverFirstWins(t *testing.T) { + a := &TrustBundleResolver{discovery: map[string]*types.DiscoveryDocument{}} + d := makeDiscovery("a.com") + a.discovery["a.com"] = &d + b := &TrustBundleResolver{discovery: map[string]*types.DiscoveryDocument{}} + chain := NewChainResolver([]DiscoveryResolver{a, b}) + if _, err := chain.ResolveDiscovery("a.com"); err != nil { + t.Fatal(err) + } + if _, err := chain.ResolveDiscovery("c.com"); err == nil { + t.Fatal("expected miss") + } +} + +func TestChainResolverFallthrough(t *testing.T) { + empty := &TrustBundleResolver{discovery: map[string]*types.DiscoveryDocument{}} + d := makeDiscovery("example.com") + with := &TrustBundleResolver{discovery: map[string]*types.DiscoveryDocument{"example.com": &d}} + chain := NewChainResolver([]DiscoveryResolver{empty, with}) + if _, err := chain.ResolveDiscovery("example.com"); err != nil { + t.Fatal(err) + } +} diff --git a/go/pkg/revocation/revocation.go b/go/pkg/revocation/revocation.go new file mode 100644 index 0000000..5b7fda9 --- /dev/null +++ b/go/pkg/revocation/revocation.go @@ -0,0 +1,118 @@ +// Package revocation builds revocation documents and provides revocation +// checks against credential / agent / key identifiers. +package revocation + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/ThirdKeyAi/agentpin/go/internal/version" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// BuildRevocationDocument creates an empty revocation document for entity. +func BuildRevocationDocument(entity string) types.RevocationDocument { + return types.RevocationDocument{ + AgentpinVersion: version.ProtocolVersion, + Entity: entity, + UpdatedAt: time.Now().UTC().Format(time.RFC3339), + RevokedCredentials: []types.RevokedCredential{}, + RevokedAgents: []types.RevokedAgent{}, + RevokedKeys: []types.RevokedKey{}, + } +} + +// AddRevokedCredential records jti as revoked with the given reason. +func AddRevokedCredential(doc *types.RevocationDocument, jti string, reason types.RevocationReason) { + now := time.Now().UTC().Format(time.RFC3339) + doc.RevokedCredentials = append(doc.RevokedCredentials, types.RevokedCredential{ + Jti: jti, + RevokedAt: now, + Reason: reason, + }) + doc.UpdatedAt = now +} + +// AddRevokedAgent records agentID as revoked with the given reason. +func AddRevokedAgent(doc *types.RevocationDocument, agentID string, reason types.RevocationReason) { + now := time.Now().UTC().Format(time.RFC3339) + doc.RevokedAgents = append(doc.RevokedAgents, types.RevokedAgent{ + AgentID: agentID, + RevokedAt: now, + Reason: reason, + }) + doc.UpdatedAt = now +} + +// AddRevokedKey records kid as revoked with the given reason. +func AddRevokedKey(doc *types.RevocationDocument, kid string, reason types.RevocationReason) { + now := time.Now().UTC().Format(time.RFC3339) + doc.RevokedKeys = append(doc.RevokedKeys, types.RevokedKey{ + Kid: kid, + RevokedAt: now, + Reason: reason, + }) + doc.UpdatedAt = now +} + +// CheckRevocation returns a typed VerificationError if the credential, agent, +// or key is on the revocation list. +func CheckRevocation(doc *types.RevocationDocument, jti, agentID, kid string) error { + for _, rc := range doc.RevokedCredentials { + if rc.Jti == jti { + return types.NewVerificationError( + types.ErrCredentialRevoked, + fmt.Sprintf("Credential %s revoked: %s", jti, rc.Reason), + ) + } + } + for _, ra := range doc.RevokedAgents { + if ra.AgentID == agentID { + return types.NewVerificationError( + types.ErrAgentInactive, + fmt.Sprintf("Agent %s revoked: %s", agentID, ra.Reason), + ) + } + } + for _, rk := range doc.RevokedKeys { + if rk.Kid == kid { + return types.NewVerificationError( + types.ErrKeyRevoked, + fmt.Sprintf("Key %s revoked: %s", kid, rk.Reason), + ) + } + } + return nil +} + +// FetchRevocationDocument fetches a revocation document from url. +func FetchRevocationDocument(client *http.Client, url string) (*types.RevocationDocument, error) { + if client == nil { + client = &http.Client{ + Timeout: 10 * time.Second, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + }, + } + } + resp, err := client.Get(url) + if err != nil { + return nil, fmt.Errorf("failed to fetch %s: %w", url, err) + } + defer func() { _ = resp.Body.Close() }() + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return nil, fmt.Errorf("HTTP %d fetching %s", resp.StatusCode, url) + } + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("read body: %w", err) + } + var doc types.RevocationDocument + if err := json.Unmarshal(body, &doc); err != nil { + return nil, fmt.Errorf("invalid JSON from %s: %w", url, err) + } + return &doc, nil +} diff --git a/go/pkg/revocation/revocation_test.go b/go/pkg/revocation/revocation_test.go new file mode 100644 index 0000000..84d99f1 --- /dev/null +++ b/go/pkg/revocation/revocation_test.go @@ -0,0 +1,59 @@ +package revocation + +import ( + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +func TestBuildAndAddRevocations(t *testing.T) { + d := BuildRevocationDocument("example.com") + if d.Entity != "example.com" { + t.Fatal("entity") + } + if len(d.RevokedCredentials) != 0 { + t.Fatal("should start empty") + } + AddRevokedCredential(&d, "jti-1", types.ReasonKeyCompromise) + AddRevokedAgent(&d, "agent-1", types.ReasonPolicyViolation) + AddRevokedKey(&d, "kid-1", types.ReasonSuperseded) + if len(d.RevokedCredentials) != 1 || len(d.RevokedAgents) != 1 || len(d.RevokedKeys) != 1 { + t.Fatal("expected 1 each") + } +} + +func TestCheckRevocationClean(t *testing.T) { + d := BuildRevocationDocument("example.com") + if err := CheckRevocation(&d, "j", "a", "k"); err != nil { + t.Fatalf("clean check should pass: %v", err) + } +} + +func TestCheckRevocationCredentialRevoked(t *testing.T) { + d := BuildRevocationDocument("example.com") + AddRevokedCredential(&d, "jti-bad", types.ReasonKeyCompromise) + err := CheckRevocation(&d, "jti-bad", "a", "k") + if err == nil { + t.Fatal("revoked jti should fail") + } + ve, ok := types.AsVerificationError(err) + if !ok || ve.Code != types.ErrCredentialRevoked { + t.Fatalf("expected CredentialRevoked, got %v", err) + } +} + +func TestCheckRevocationAgentRevoked(t *testing.T) { + d := BuildRevocationDocument("example.com") + AddRevokedAgent(&d, "bad-agent", types.ReasonPrivilegeWithdrawn) + if err := CheckRevocation(&d, "j", "bad-agent", "k"); err == nil { + t.Fatal("revoked agent should fail") + } +} + +func TestCheckRevocationKeyRevoked(t *testing.T) { + d := BuildRevocationDocument("example.com") + AddRevokedKey(&d, "bad-key", types.ReasonSuperseded) + if err := CheckRevocation(&d, "j", "a", "bad-key"); err == nil { + t.Fatal("revoked key should fail") + } +} diff --git a/go/pkg/types/a2a.go b/go/pkg/types/a2a.go new file mode 100644 index 0000000..252163d --- /dev/null +++ b/go/pkg/types/a2a.go @@ -0,0 +1,68 @@ +package types + +// A2A AgentCard extension types (v0.3.0). +// +// AgentPin extends the Google A2A AgentCard format with cryptographic +// identity verification. The AgentpinExtension payload carries the AgentPin +// endpoint URL, the entity's public key in JWK form, and a detached ECDSA +// signature over the rest of the AgentCard. +// +// Mirrors the Rust agentpin::types::a2a module — the wire format is +// byte-identical so cards signed in any of Rust/JS/Python/Go verify in the +// others. + +// A2aAgentCard is the minimal subset of the A2A AgentCard that AgentPin +// populates or reads. Additional upstream fields are not modeled here while +// the A2A spec is still draft; once it stabilises this can be re-exported +// from an upstream a2a-types package. +type A2aAgentCard struct { + Name string `json:"name"` + Description string `json:"description,omitempty"` + Version string `json:"version,omitempty"` + URL string `json:"url"` + Capabilities A2aAgentCapabilities `json:"capabilities"` + Skills []A2aAgentSkill `json:"skills"` + // Agentpin is the AgentPin extension payload. Present when the card is + // signed and resolvable via the AgentPin protocol. + Agentpin *AgentpinExtension `json:"agentpin,omitempty"` +} + +// A2aAgentCapabilities mirrors the A2A "AgentCapabilities" shape with one +// AgentPin-specific addition: AllowedDomains, propagated from the source +// Constraints so cross-protocol A2A peers can scope tool verification. +type A2aAgentCapabilities struct { + Streaming bool `json:"streaming"` + PushNotifications bool `json:"pushNotifications"` + // AllowedDomains is omitted (rather than emitted as `null` or `[]`) when + // the agent is unrestricted, matching the Rust SDK's serde behaviour and + // the "empty list = unrestricted" convention. + AllowedDomains []string `json:"allowed_domains,omitempty"` +} + +// A2aAgentSkill mirrors the A2A "AgentSkill" shape. AgentPin's Capability +// strings (e.g. "read:customers/*") map directly to the skill `id`. +type A2aAgentSkill struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description,omitempty"` +} + +// AgentpinExtension is the AgentPin extension carried inside an A2A +// AgentCard's `agentpin` field. +// +// The signature is a detached ECDSA P-256 signature over the canonical bytes +// of the AgentCard with this extension field cleared. Verifiers reconstruct +// that canonical input and check the signature against PublicKeyJWK. +type AgentpinExtension struct { + AgentpinEndpoint string `json:"agentpin_endpoint"` + PublicKeyJWK JWK `json:"public_key_jwk"` + Signature string `json:"signature"` +} + +// CapabilityToSkill maps an AgentPin Capability to a minimal A2aAgentSkill. +// The capability string itself becomes both the skill `id` and the default +// `name`. +func CapabilityToSkill(cap Capability) A2aAgentSkill { + id := string(cap) + return A2aAgentSkill{ID: id, Name: id} +} diff --git a/go/pkg/types/allowed_domains.go b/go/pkg/types/allowed_domains.go new file mode 100644 index 0000000..0e8cce0 --- /dev/null +++ b/go/pkg/types/allowed_domains.go @@ -0,0 +1,78 @@ +package types + +// AllowedDomains is a helper namespace for the "allowed_domains" constraint +// treated as a typed allow-list (v0.3.0). +// +// Convention: an empty list means *unrestricted* (all domains trusted); a +// non-empty list restricts the agent to exactly those domains. Mirrors the +// AllowedDomains type in the Rust SDK. +// +// Lists are plain []string; the package exposes only static helpers so callers +// can keep using slices directly. +type AllowedDomains struct{} + +// Unrestricted returns an empty (unrestricted) allow-list. +func (AllowedDomains) Unrestricted() []string { + return nil +} + +// FromDomains constructs an allow-list from a slice of strings. +func (AllowedDomains) FromDomains(domains []string) []string { + out := make([]string, len(domains)) + copy(out, domains) + return out +} + +// IsUnrestricted reports whether the list is empty (no restriction). +func (AllowedDomains) IsUnrestricted(list []string) bool { + return len(list) == 0 +} + +// Allows reports whether domain is permitted under the allow-list. An empty +// list allows everything. +func (a AllowedDomains) Allows(list []string, domain string) bool { + if a.IsUnrestricted(list) { + return true + } + for _, d := range list { + if d == domain { + return true + } + } + return false +} + +// Intersect returns the intersection of two allow-lists. Following the +// convention that empty = unrestricted: unrestricted ∩ X = X. +func (a AllowedDomains) Intersect(lhs, rhs []string) []string { + if a.IsUnrestricted(lhs) { + return append([]string{}, rhs...) + } + if a.IsUnrestricted(rhs) { + return append([]string{}, lhs...) + } + rhsSet := make(map[string]struct{}, len(rhs)) + for _, d := range rhs { + rhsSet[d] = struct{}{} + } + out := make([]string, 0) + for _, d := range lhs { + if _, ok := rhsSet[d]; ok { + out = append(out, d) + } + } + return out +} + +// FromConstraints extracts the allow-list from a Constraints value. Returns +// Unrestricted() when constraints is nil or has no allowed_domains. +func (a AllowedDomains) FromConstraints(c *Constraints) []string { + if c == nil || len(c.AllowedDomains) == 0 { + return a.Unrestricted() + } + return a.FromDomains(c.AllowedDomains) +} + +// AllowedDomainsHelper is the singleton instance used to access the helper +// methods, e.g. types.AllowedDomainsHelper.Intersect(a, b). +var AllowedDomainsHelper AllowedDomains diff --git a/go/pkg/types/allowed_domains_test.go b/go/pkg/types/allowed_domains_test.go new file mode 100644 index 0000000..5202d24 --- /dev/null +++ b/go/pkg/types/allowed_domains_test.go @@ -0,0 +1,77 @@ +package types + +import "testing" + +func TestAllowedDomainsUnrestrictedAcceptsAnything(t *testing.T) { + h := AllowedDomainsHelper + ad := h.Unrestricted() + if !h.IsUnrestricted(ad) { + t.Fatal("expected unrestricted") + } + if !h.Allows(ad, "anything.com") { + t.Fatal("unrestricted should allow everything") + } +} + +func TestAllowedDomainsRestrictedFilters(t *testing.T) { + h := AllowedDomainsHelper + ad := h.FromDomains([]string{"a.com", "b.com"}) + if h.IsUnrestricted(ad) { + t.Fatal("expected restricted") + } + if !h.Allows(ad, "a.com") { + t.Fatal("should allow a.com") + } + if h.Allows(ad, "c.com") { + t.Fatal("should reject c.com") + } +} + +func TestAllowedDomainsIntersectWithUnrestrictedReturnsOther(t *testing.T) { + h := AllowedDomainsHelper + unrestricted := h.Unrestricted() + restricted := h.FromDomains([]string{"a.com", "b.com"}) + if got := h.Intersect(unrestricted, restricted); !sliceEqual(got, restricted) { + t.Fatalf("got %v", got) + } + if got := h.Intersect(restricted, unrestricted); !sliceEqual(got, restricted) { + t.Fatalf("got %v", got) + } +} + +func TestAllowedDomainsIntersectReturnsOverlap(t *testing.T) { + h := AllowedDomainsHelper + lhs := h.FromDomains([]string{"a.com", "b.com", "c.com"}) + rhs := h.FromDomains([]string{"b.com", "c.com", "d.com"}) + got := h.Intersect(lhs, rhs) + want := []string{"b.com", "c.com"} + if !sliceEqual(got, want) { + t.Fatalf("got %v want %v", got, want) + } +} + +func TestAllowedDomainsFromConstraints(t *testing.T) { + h := AllowedDomainsHelper + c := &Constraints{AllowedDomains: []string{"a.com"}} + if got := h.FromConstraints(c); !sliceEqual(got, []string{"a.com"}) { + t.Fatalf("got %v", got) + } + if got := h.FromConstraints(nil); !h.IsUnrestricted(got) { + t.Fatalf("got %v", got) + } + if got := h.FromConstraints(&Constraints{}); !h.IsUnrestricted(got) { + t.Fatalf("got %v", got) + } +} + +func sliceEqual(a, b []string) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if a[i] != b[i] { + return false + } + } + return true +} diff --git a/go/pkg/types/bundle.go b/go/pkg/types/bundle.go new file mode 100644 index 0000000..e9c4bb1 --- /dev/null +++ b/go/pkg/types/bundle.go @@ -0,0 +1,32 @@ +package types + +// TrustBundle is a pre-shared collection of discovery and revocation +// documents for offline / air-gapped verification. +type TrustBundle struct { + AgentpinBundleVersion string `json:"agentpin_bundle_version"` + CreatedAt string `json:"created_at"` + Documents []DiscoveryDocument `json:"documents"` + Revocations []RevocationDocument `json:"revocations"` +} + +// FindDiscovery returns the first discovery document whose entity matches +// domain, or nil. +func (b *TrustBundle) FindDiscovery(domain string) *DiscoveryDocument { + for i := range b.Documents { + if b.Documents[i].Entity == domain { + return &b.Documents[i] + } + } + return nil +} + +// FindRevocation returns the first revocation document whose entity matches +// domain, or nil. +func (b *TrustBundle) FindRevocation(domain string) *RevocationDocument { + for i := range b.Revocations { + if b.Revocations[i].Entity == domain { + return &b.Revocations[i] + } + } + return nil +} diff --git a/go/pkg/types/bundle_test.go b/go/pkg/types/bundle_test.go new file mode 100644 index 0000000..5e7a755 --- /dev/null +++ b/go/pkg/types/bundle_test.go @@ -0,0 +1,54 @@ +package types + +import ( + "encoding/json" + "testing" +) + +func TestBundleFindHelpers(t *testing.T) { + b := TrustBundle{ + AgentpinBundleVersion: "0.1", + CreatedAt: "2026-02-10T00:00:00Z", + Documents: []DiscoveryDocument{ + { + Entity: "example.com", + AgentpinVersion: "0.1", + EntityType: EntityMaker, + MaxDelegationDepth: 2, + PublicKeys: []JWK{{Kid: "k", Kty: "EC", Crv: "P-256"}}, + Agents: []AgentDeclaration{}, + UpdatedAt: "x", + }, + }, + Revocations: []RevocationDocument{}, + } + if b.FindDiscovery("example.com") == nil { + t.Fatal("FindDiscovery hit expected") + } + if b.FindDiscovery("missing.com") != nil { + t.Fatal("FindDiscovery miss expected") + } + if b.FindRevocation("example.com") != nil { + t.Fatal("FindRevocation should be nil") + } +} + +func TestBundleJSONRoundTrip(t *testing.T) { + b := TrustBundle{ + AgentpinBundleVersion: "0.1", + CreatedAt: "2026-02-10T00:00:00Z", + Documents: []DiscoveryDocument{}, + Revocations: []RevocationDocument{}, + } + data, err := json.Marshal(b) + if err != nil { + t.Fatal(err) + } + var b2 TrustBundle + if err := json.Unmarshal(data, &b2); err != nil { + t.Fatal(err) + } + if b2.AgentpinBundleVersion != "0.1" { + t.Fatal("bundle version mismatch") + } +} diff --git a/go/pkg/types/capability.go b/go/pkg/types/capability.go new file mode 100644 index 0000000..a195f62 --- /dev/null +++ b/go/pkg/types/capability.go @@ -0,0 +1,160 @@ +package types + +import ( + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "sort" + "strings" +) + +// Capability is an AgentPin capability in the canonical "action:resource" +// format. It serializes as a JSON string for wire compatibility with the +// Rust, JavaScript, and Python SDKs. +type Capability string + +// NewCapability constructs a capability from an action/resource pair. +func NewCapability(action, resource string) Capability { + return Capability(action + ":" + resource) +} + +// Action returns the action component of the capability, or "" if the +// capability is malformed. +func (c Capability) Action() string { + a, _, ok := c.split() + if !ok { + return "" + } + return a +} + +// Resource returns the resource component of the capability, or "" if the +// capability is malformed. +func (c Capability) Resource() string { + _, r, ok := c.split() + if !ok { + return "" + } + return r +} + +// String returns the canonical "action:resource" representation. +func (c Capability) String() string { return string(c) } + +func (c Capability) split() (string, string, bool) { + idx := strings.IndexByte(string(c), ':') + if idx < 0 { + return "", "", false + } + return string(c)[:idx], string(c)[idx+1:], true +} + +// Matches reports whether a declared capability covers a requested capability. +// +// Wildcard resources ("*") match any resource for the same action. Scoped +// resources match if the requested resource starts with the declared resource +// followed by "." (e.g., "read:codebase" matches +// "read:codebase.github.com/org/repo"). +func (c Capability) Matches(requested Capability) bool { + declAction, declRes, ok := c.split() + if !ok { + return false + } + reqAction, reqRes, ok := requested.split() + if !ok { + return false + } + if declAction != reqAction { + return false + } + if declRes == "*" || declRes == reqRes { + return true + } + if strings.HasPrefix(reqRes, declRes) && len(reqRes) > len(declRes) && reqRes[len(declRes)] == '.' { + return true + } + return false +} + +// CoreActions lists the AgentPin core action verbs. +var CoreActions = []string{"read", "write", "execute", "admin", "delegate"} + +// isReverseDomain reports whether action looks like a reverse-domain prefix +// (e.g., "com.example.scan"). It requires at least two non-empty +// dot-separated segments. +func isReverseDomain(action string) bool { + parts := strings.Split(action, ".") + if len(parts) < 2 { + return false + } + for _, p := range parts { + if p == "" { + return false + } + } + return true +} + +// ValidateCapability validates a capability against the AgentPin taxonomy. +// +// - Must be in "action:resource" format. +// - Core actions ("read", "write", "execute", "admin", "delegate") are +// always valid (with any resource), with one exception: +// - "admin:*" is rejected — admin must be explicitly scoped. +// - Custom (non-core) actions MUST use a reverse-domain prefix (e.g., +// "com.example.scan:target"). +func ValidateCapability(c Capability) error { + action, resource, ok := c.split() + if !ok { + return fmt.Errorf("capability must be in 'action:resource' format") + } + if action == "admin" && resource == "*" { + return fmt.Errorf("admin:* wildcard is not allowed; admin capabilities must be explicitly scoped") + } + for _, ca := range CoreActions { + if ca == action { + return nil + } + } + if !isReverseDomain(action) { + return fmt.Errorf("custom action '%s' must use reverse-domain prefix (e.g., com.example.%s)", action, action) + } + return nil +} + +// CapabilitiesSubset reports whether every requested capability is covered by +// at least one declared capability. +func CapabilitiesSubset(declared, requested []Capability) bool { + for _, req := range requested { + matched := false + for _, decl := range declared { + if decl.Matches(req) { + matched = true + break + } + } + if !matched { + return false + } + } + return true +} + +// CapabilitiesHash hashes a list of capabilities deterministically by sorting +// them alphabetically, JSON-encoding the sorted array, and SHA-256 hashing +// the result. Used by the delegation attestation flow. +func CapabilitiesHash(caps []Capability) string { + strs := make([]string, len(caps)) + for i, c := range caps { + strs[i] = string(c) + } + sort.Strings(strs) + b, err := json.Marshal(strs) + if err != nil { + // json.Marshal on []string never errors in practice. + panic(err) + } + sum := sha256.Sum256(b) + return hex.EncodeToString(sum[:]) +} diff --git a/go/pkg/types/capability_test.go b/go/pkg/types/capability_test.go new file mode 100644 index 0000000..f93ccd6 --- /dev/null +++ b/go/pkg/types/capability_test.go @@ -0,0 +1,99 @@ +package types + +import ( + "encoding/json" + "testing" +) + +func TestCapabilityParse(t *testing.T) { + c := Capability("read:codebase") + if a := c.Action(); a != "read" { + t.Fatalf("Action() = %q, want %q", a, "read") + } + if r := c.Resource(); r != "codebase" { + t.Fatalf("Resource() = %q, want %q", r, "codebase") + } +} + +func TestCapabilityMatchesWildcard(t *testing.T) { + wild := Capability("read:*") + if !wild.Matches("read:codebase") { + t.Fatal("read:* should match read:codebase") + } + if !wild.Matches("read:database") { + t.Fatal("read:* should match read:database") + } + if wild.Matches("write:codebase") { + t.Fatal("read:* should NOT match write:codebase") + } +} + +func TestCapabilityMatchesScoped(t *testing.T) { + cap := Capability("read:codebase") + if !cap.Matches("read:codebase.github.com/org/repo") { + t.Fatal("scoped match expected") + } + if cap.Matches("read:codebase_other") { + t.Fatal("non-dot suffix must not match") + } +} + +func TestCapabilitiesSubset(t *testing.T) { + declared := []Capability{"read:*", "write:report"} + requested := []Capability{"read:codebase", "write:report"} + if !CapabilitiesSubset(declared, requested) { + t.Fatal("requested should be a subset") + } + bad := []Capability{"delete:database"} + if CapabilitiesSubset(declared, bad) { + t.Fatal("delete:database should not be covered") + } +} + +func TestCapabilitiesHashOrderIndependent(t *testing.T) { + a := []Capability{"read:codebase", "write:report"} + b := []Capability{"write:report", "read:codebase"} + if CapabilitiesHash(a) != CapabilitiesHash(b) { + t.Fatal("CapabilitiesHash must be order-independent") + } +} + +func TestValidateCapability(t *testing.T) { + cases := []struct { + cap Capability + valid bool + }{ + {"read:codebase", true}, + {"read:*", true}, + {"admin:*", false}, + {"admin:users", true}, + {"com.example.scan:target", true}, + {"scan:target", false}, + {"readcodebase", false}, + } + for _, tc := range cases { + err := ValidateCapability(tc.cap) + got := err == nil + if got != tc.valid { + t.Errorf("ValidateCapability(%q) = %v, want valid=%v", tc.cap, err, tc.valid) + } + } +} + +func TestCapabilityJSONRoundTrip(t *testing.T) { + c := Capability("read:data") + data, err := json.Marshal(c) + if err != nil { + t.Fatal(err) + } + if string(data) != `"read:data"` { + t.Fatalf("Capability JSON = %s, want \"read:data\"", data) + } + var c2 Capability + if err := json.Unmarshal(data, &c2); err != nil { + t.Fatal(err) + } + if c != c2 { + t.Fatalf("roundtrip mismatch: %q != %q", c, c2) + } +} diff --git a/go/pkg/types/constraint.go b/go/pkg/types/constraint.go new file mode 100644 index 0000000..ae19dcb --- /dev/null +++ b/go/pkg/types/constraint.go @@ -0,0 +1,133 @@ +package types + +import ( + "strconv" + "strings" +) + +// DataClassification orders allowed data sensitivity levels from least to most +// sensitive. Lower-ordinal values are less restrictive. +type DataClassification string + +const ( + DataPublic DataClassification = "public" + DataInternal DataClassification = "internal" + DataConfidential DataClassification = "confidential" + DataRestricted DataClassification = "restricted" +) + +// Order returns the comparable rank of a classification. Higher means more +// sensitive. +func (d DataClassification) Order() int { + switch d { + case DataPublic: + return 0 + case DataInternal: + return 1 + case DataConfidential: + return 2 + case DataRestricted: + return 3 + } + return -1 +} + +// ValidHours describes a time-of-day validity window for a credential. +type ValidHours struct { + Start string `json:"start"` + End string `json:"end"` + Timezone string `json:"timezone"` +} + +// Constraints describes optional usage constraints attached to a credential +// or to a discovery agent declaration. +type Constraints struct { + AllowedDomains []string `json:"allowed_domains,omitempty"` + DeniedDomains []string `json:"denied_domains,omitempty"` + RateLimit string `json:"rate_limit,omitempty"` + DataClassificationMax *DataClassification `json:"data_classification_max,omitempty"` + IPAllowlist []string `json:"ip_allowlist,omitempty"` + ValidHours *ValidHours `json:"valid_hours,omitempty"` +} + +// ConstraintsSubsetOf reports whether a credential's constraints are equal to +// or more restrictive than the discovery declaration's constraints. +// +// nil discovery constraints permit anything; nil credential constraints inherit +// the discovery defaults. +func ConstraintsSubsetOf(discovery, credential *Constraints) bool { + if discovery == nil { + return true + } + if credential == nil { + return true + } + + if discovery.DataClassificationMax != nil && credential.DataClassificationMax != nil { + if credential.DataClassificationMax.Order() > discovery.DataClassificationMax.Order() { + return false + } + } + + if discovery.RateLimit != "" && credential.RateLimit != "" { + dRate, dOK := parseRateLimit(discovery.RateLimit) + cRate, cOK := parseRateLimit(credential.RateLimit) + if dOK && cOK && cRate > dRate { + return false + } + } + + if len(discovery.AllowedDomains) > 0 && len(credential.AllowedDomains) > 0 { + for _, cd := range credential.AllowedDomains { + matched := false + for _, dd := range discovery.AllowedDomains { + if domainPatternMatches(dd, cd) { + matched = true + break + } + } + if !matched { + return false + } + } + } + + return true +} + +// parseRateLimit converts a rate-limit string like "100/hour" into requests +// per hour. +func parseRateLimit(rate string) (uint64, bool) { + parts := strings.SplitN(rate, "/", 2) + if len(parts) != 2 { + return 0, false + } + n, err := strconv.ParseUint(parts[0], 10, 64) + if err != nil { + return 0, false + } + switch parts[1] { + case "second": + return n * 3600, true + case "minute": + return n * 60, true + case "hour": + return n, true + } + return 0, false +} + +// domainPatternMatches reports whether a "*.suffix" or exact-match domain +// pattern matches a domain. +func domainPatternMatches(pattern, domain string) bool { + if pattern == domain { + return true + } + if strings.HasPrefix(pattern, "*.") { + suffix := pattern[2:] + return strings.HasSuffix(domain, suffix) && + len(domain) > len(suffix) && + domain[len(domain)-len(suffix)-1] == '.' + } + return false +} diff --git a/go/pkg/types/constraint_test.go b/go/pkg/types/constraint_test.go new file mode 100644 index 0000000..450ec73 --- /dev/null +++ b/go/pkg/types/constraint_test.go @@ -0,0 +1,98 @@ +package types + +import ( + "encoding/json" + "testing" +) + +func TestDataClassificationOrdering(t *testing.T) { + if DataPublic.Order() >= DataInternal.Order() { + t.Fatal("public should be < internal") + } + if DataInternal.Order() >= DataConfidential.Order() { + t.Fatal("internal should be < confidential") + } + if DataConfidential.Order() >= DataRestricted.Order() { + t.Fatal("confidential should be < restricted") + } +} + +func TestParseRateLimit(t *testing.T) { + cases := []struct { + in string + out uint64 + ok bool + }{ + {"100/hour", 100, true}, + {"10/minute", 600, true}, + {"1/second", 3600, true}, + {"bad", 0, false}, + {"100/year", 0, false}, + } + for _, tc := range cases { + got, ok := parseRateLimit(tc.in) + if got != tc.out || ok != tc.ok { + t.Errorf("parseRateLimit(%q) = (%d,%v), want (%d,%v)", tc.in, got, ok, tc.out, tc.ok) + } + } +} + +func TestDomainPatternMatches(t *testing.T) { + if !domainPatternMatches("example.com", "example.com") { + t.Fatal("exact match") + } + if !domainPatternMatches("*.example.com", "sub.example.com") { + t.Fatal("wildcard subdomain") + } + if domainPatternMatches("*.example.com", "example.com") { + t.Fatal("wildcard does not match bare") + } + if domainPatternMatches("other.com", "example.com") { + t.Fatal("different domains must not match") + } +} + +func TestConstraintsSubsetOf(t *testing.T) { + conf := DataConfidential + intl := DataInternal + rest := DataRestricted + + disc := &Constraints{ + DataClassificationMax: &conf, + RateLimit: "100/hour", + } + credOK := &Constraints{ + DataClassificationMax: &intl, + RateLimit: "50/hour", + } + if !ConstraintsSubsetOf(disc, credOK) { + t.Fatal("credOK should be a subset") + } + + credBad := &Constraints{ + DataClassificationMax: &rest, + } + if ConstraintsSubsetOf(disc, credBad) { + t.Fatal("restricted > confidential should fail") + } +} + +func TestConstraintsJSONRoundTrip(t *testing.T) { + intl := DataInternal + c := Constraints{ + AllowedDomains: []string{"*.example.com"}, + RateLimit: "50/hour", + DataClassificationMax: &intl, + } + data, err := json.Marshal(c) + if err != nil { + t.Fatal(err) + } + var c2 Constraints + if err := json.Unmarshal(data, &c2); err != nil { + t.Fatal(err) + } + if c2.RateLimit != c.RateLimit || *c2.DataClassificationMax != *c.DataClassificationMax { + t.Fatal("constraints roundtrip mismatch") + } +} diff --git a/go/pkg/types/credential.go b/go/pkg/types/credential.go new file mode 100644 index 0000000..8485939 --- /dev/null +++ b/go/pkg/types/credential.go @@ -0,0 +1,41 @@ +package types + +// DelegationRole identifies which role a delegating party plays in a chain. +type DelegationRole string + +const ( + RoleMaker DelegationRole = "maker" + RoleDeployer DelegationRole = "deployer" +) + +// DelegationAttestation is one entry in a credential's delegation chain. +type DelegationAttestation struct { + Domain string `json:"domain"` + Role DelegationRole `json:"role"` + AgentID string `json:"agent_id"` + Kid string `json:"kid"` + Attestation string `json:"attestation"` +} + +// JWTHeader is the AgentPin credential JWT header. +type JWTHeader struct { + Alg string `json:"alg"` + Typ string `json:"typ"` + Kid string `json:"kid"` +} + +// JWTPayload is the AgentPin credential JWT body. +type JWTPayload struct { + Iss string `json:"iss"` + Sub string `json:"sub"` + Aud string `json:"aud,omitempty"` + Iat int64 `json:"iat"` + Exp int64 `json:"exp"` + Nbf *int64 `json:"nbf,omitempty"` + Jti string `json:"jti"` + AgentpinVersion string `json:"agentpin_version"` + Capabilities []Capability `json:"capabilities"` + Constraints *Constraints `json:"constraints,omitempty"` + DelegationChain []DelegationAttestation `json:"delegation_chain,omitempty"` + Nonce string `json:"nonce,omitempty"` +} diff --git a/go/pkg/types/discovery.go b/go/pkg/types/discovery.go new file mode 100644 index 0000000..ea4c8ee --- /dev/null +++ b/go/pkg/types/discovery.go @@ -0,0 +1,54 @@ +package types + +// EntityType identifies whether a domain acts as a maker, deployer, or both. +type EntityType string + +const ( + EntityMaker EntityType = "maker" + EntityDeployer EntityType = "deployer" + EntityBoth EntityType = "both" +) + +// AgentStatus describes the operational state of an agent declaration. +type AgentStatus string + +const ( + AgentActive AgentStatus = "active" + AgentSuspended AgentStatus = "suspended" + AgentDeprecated AgentStatus = "deprecated" +) + +// AgentDeclaration is one agent entry inside a discovery document. +type AgentDeclaration struct { + AgentID string `json:"agent_id"` + AgentType string `json:"agent_type,omitempty"` + Name string `json:"name"` + Description string `json:"description,omitempty"` + Version string `json:"version,omitempty"` + Capabilities []Capability `json:"capabilities"` + Constraints *Constraints `json:"constraints,omitempty"` + MakerAttestation string `json:"maker_attestation,omitempty"` + CredentialTTLMax *uint64 `json:"credential_ttl_max,omitempty"` + Status AgentStatus `json:"status"` + // DirectoryListing, when set to false, signals that this agent SHOULD NOT + // be included in public agent directories. Defaults to true if omitted. + DirectoryListing *bool `json:"directory_listing,omitempty"` +} + +// DiscoveryDocument is the top-level `.well-known/agent-identity.json` +// document published by a domain. +type DiscoveryDocument struct { + AgentpinVersion string `json:"agentpin_version"` + Entity string `json:"entity"` + EntityType EntityType `json:"entity_type"` + PublicKeys []JWK `json:"public_keys"` + Agents []AgentDeclaration `json:"agents"` + RevocationEndpoint string `json:"revocation_endpoint,omitempty"` + PolicyURL string `json:"policy_url,omitempty"` + SchemapinEndpoint string `json:"schemapin_endpoint,omitempty"` + // A2aEndpoint (v0.3.0) optionally identifies the URL of the entity's A2A + // AgentCard endpoint, enabling cross-protocol discovery. + A2aEndpoint string `json:"a2a_endpoint,omitempty"` + MaxDelegationDepth uint8 `json:"max_delegation_depth"` + UpdatedAt string `json:"updated_at"` +} diff --git a/go/pkg/types/discovery_test.go b/go/pkg/types/discovery_test.go new file mode 100644 index 0000000..7b1fb4e --- /dev/null +++ b/go/pkg/types/discovery_test.go @@ -0,0 +1,68 @@ +package types + +import ( + "encoding/json" + "strings" + "testing" +) + +func TestEntityTypeJSON(t *testing.T) { + cases := []struct { + v EntityType + want string + }{ + {EntityMaker, `"maker"`}, + {EntityDeployer, `"deployer"`}, + {EntityBoth, `"both"`}, + } + for _, tc := range cases { + got, _ := json.Marshal(tc.v) + if string(got) != tc.want { + t.Errorf("EntityType %q = %s, want %s", tc.v, got, tc.want) + } + } +} + +func TestAgentStatusJSON(t *testing.T) { + got, _ := json.Marshal(AgentActive) + if string(got) != `"active"` { + t.Fatalf("AgentActive = %s", got) + } +} + +func TestDiscoveryDocumentJSONRoundTrip(t *testing.T) { + ttl := uint64(3600) + doc := DiscoveryDocument{ + AgentpinVersion: "0.1", + Entity: "example.com", + EntityType: EntityMaker, + PublicKeys: []JWK{}, + Agents: []AgentDeclaration{ + { + AgentID: "urn:agentpin:example.com:test-agent", + Name: "Test Agent", + Description: "A test agent", + Capabilities: []Capability{"read:*"}, + Status: AgentActive, + CredentialTTLMax: &ttl, + }, + }, + RevocationEndpoint: "https://example.com/.well-known/agent-identity-revocations.json", + MaxDelegationDepth: 2, + UpdatedAt: "2026-01-15T00:00:00Z", + } + data, err := json.Marshal(doc) + if err != nil { + t.Fatal(err) + } + if !strings.Contains(string(data), `"entity":"example.com"`) { + t.Fatalf("missing entity field in JSON: %s", data) + } + var doc2 DiscoveryDocument + if err := json.Unmarshal(data, &doc2); err != nil { + t.Fatal(err) + } + if doc2.Entity != doc.Entity || doc2.EntityType != doc.EntityType { + t.Fatal("roundtrip mismatch") + } +} diff --git a/go/pkg/types/errors.go b/go/pkg/types/errors.go new file mode 100644 index 0000000..a35a3ed --- /dev/null +++ b/go/pkg/types/errors.go @@ -0,0 +1,78 @@ +// Package types contains shared AgentPin types used across the SDK packages: +// JWK, capabilities, constraints, discovery / credential / revocation / +// trust-bundle / pinning / mutual-auth structures, and the unified ErrorCode +// taxonomy from spec section 6.7. +package types + +import ( + "encoding/json" + "errors" + "fmt" +) + +// ErrorCode enumerates the verification failure codes from the AgentPin spec +// (section 6.7). They serialize as their canonical SCREAMING_SNAKE_CASE +// strings so JSON output matches the Rust/JavaScript/Python ports verbatim. +type ErrorCode string + +const ( + ErrSignatureInvalid ErrorCode = "SIGNATURE_INVALID" + ErrKeyNotFound ErrorCode = "KEY_NOT_FOUND" + ErrKeyExpired ErrorCode = "KEY_EXPIRED" + ErrKeyRevoked ErrorCode = "KEY_REVOKED" + ErrCredentialExpired ErrorCode = "CREDENTIAL_EXPIRED" + ErrCredentialRevoked ErrorCode = "CREDENTIAL_REVOKED" + ErrAgentNotFound ErrorCode = "AGENT_NOT_FOUND" + ErrAgentInactive ErrorCode = "AGENT_INACTIVE" + ErrCapabilityExceeded ErrorCode = "CAPABILITY_EXCEEDED" + ErrConstraintViolation ErrorCode = "CONSTRAINT_VIOLATION" + ErrDelegationInvalid ErrorCode = "DELEGATION_INVALID" + ErrDelegationDepthExceeded ErrorCode = "DELEGATION_DEPTH_EXCEEDED" + ErrDiscoveryFetchFailed ErrorCode = "DISCOVERY_FETCH_FAILED" + ErrDiscoveryInvalid ErrorCode = "DISCOVERY_INVALID" + ErrDomainMismatch ErrorCode = "DOMAIN_MISMATCH" + ErrAudienceMismatch ErrorCode = "AUDIENCE_MISMATCH" + ErrAlgorithmRejected ErrorCode = "ALGORITHM_REJECTED" + ErrKeyPinMismatch ErrorCode = "KEY_PIN_MISMATCH" +) + +// VerificationError is returned by verification helpers that need to surface +// a typed failure code alongside a human-readable message. +type VerificationError struct { + Code ErrorCode + Message string +} + +// Error implements the error interface. +func (e *VerificationError) Error() string { + return fmt.Sprintf("%s: %s", e.Code, e.Message) +} + +// NewVerificationError constructs a VerificationError. +func NewVerificationError(code ErrorCode, msg string) *VerificationError { + return &VerificationError{Code: code, Message: msg} +} + +// AsVerificationError extracts a *VerificationError from err if present. +func AsVerificationError(err error) (*VerificationError, bool) { + var ve *VerificationError + if errors.As(err, &ve) { + return ve, true + } + return nil, false +} + +// MarshalJSON renders the ErrorCode as a JSON string. +func (c ErrorCode) MarshalJSON() ([]byte, error) { + return json.Marshal(string(c)) +} + +// UnmarshalJSON reads an ErrorCode from a JSON string. +func (c *ErrorCode) UnmarshalJSON(data []byte) error { + var s string + if err := json.Unmarshal(data, &s); err != nil { + return err + } + *c = ErrorCode(s) + return nil +} diff --git a/go/pkg/types/jwk.go b/go/pkg/types/jwk.go new file mode 100644 index 0000000..7f0a9cd --- /dev/null +++ b/go/pkg/types/jwk.go @@ -0,0 +1,14 @@ +package types + +// JWK is the AgentPin JSON Web Key representation. It mirrors the Rust +// `agentpin::jwk::Jwk` struct field-for-field so wire format is identical. +type JWK struct { + Kid string `json:"kid"` + Kty string `json:"kty"` + Crv string `json:"crv"` + X string `json:"x"` + Y string `json:"y"` + Use string `json:"use"` + KeyOps []string `json:"key_ops,omitempty"` + Exp string `json:"exp,omitempty"` +} diff --git a/go/pkg/types/mutual.go b/go/pkg/types/mutual.go new file mode 100644 index 0000000..9c1565a --- /dev/null +++ b/go/pkg/types/mutual.go @@ -0,0 +1,17 @@ +package types + +// Challenge is the AgentPin mutual-auth challenge wire payload. +type Challenge struct { + Type string `json:"type"` + Nonce string `json:"nonce"` + Timestamp string `json:"timestamp"` + VerifierCredential string `json:"verifier_credential,omitempty"` +} + +// Response is the AgentPin mutual-auth response wire payload. +type Response struct { + Type string `json:"type"` + Nonce string `json:"nonce"` + Signature string `json:"signature"` + Kid string `json:"kid"` +} diff --git a/go/pkg/types/mutual_test.go b/go/pkg/types/mutual_test.go new file mode 100644 index 0000000..1d6dd99 --- /dev/null +++ b/go/pkg/types/mutual_test.go @@ -0,0 +1,45 @@ +package types + +import ( + "encoding/json" + "strings" + "testing" +) + +func TestChallengeJSON(t *testing.T) { + c := Challenge{ + Type: "agentpin-challenge", + Nonce: "abc", + Timestamp: "2026-01-30T00:00:00Z", + VerifierCredential: "eyJ...", + } + data, err := json.Marshal(c) + if err != nil { + t.Fatal(err) + } + if !strings.Contains(string(data), `"type":"agentpin-challenge"`) { + t.Fatalf("expected type field, got %s", data) + } + var c2 Challenge + if err := json.Unmarshal(data, &c2); err != nil { + t.Fatal(err) + } + if c2 != c { + t.Fatal("roundtrip mismatch") + } +} + +func TestResponseJSON(t *testing.T) { + r := Response{Type: "agentpin-response", Nonce: "abc", Signature: "sig", Kid: "k"} + data, err := json.Marshal(r) + if err != nil { + t.Fatal(err) + } + var r2 Response + if err := json.Unmarshal(data, &r2); err != nil { + t.Fatal(err) + } + if r2 != r { + t.Fatal("roundtrip mismatch") + } +} diff --git a/go/pkg/types/pinning.go b/go/pkg/types/pinning.go new file mode 100644 index 0000000..14fe38c --- /dev/null +++ b/go/pkg/types/pinning.go @@ -0,0 +1,25 @@ +package types + +// TrustLevel describes how a pinned key was vetted. +type TrustLevel string + +const ( + TrustTOFU TrustLevel = "tofu" + TrustVerified TrustLevel = "verified" + TrustPinned TrustLevel = "pinned" +) + +// PinnedKey represents one TOFU-pinned key for a domain. +type PinnedKey struct { + Kid string `json:"kid"` + PublicKeyHash string `json:"public_key_hash"` + FirstSeen string `json:"first_seen"` + LastSeen string `json:"last_seen"` + TrustLevel TrustLevel `json:"trust_level"` +} + +// PinnedDomain holds all pinned keys for a single domain. +type PinnedDomain struct { + Domain string `json:"domain"` + PinnedKeys []PinnedKey `json:"pinned_keys"` +} diff --git a/go/pkg/types/pinning_test.go b/go/pkg/types/pinning_test.go new file mode 100644 index 0000000..75d43e0 --- /dev/null +++ b/go/pkg/types/pinning_test.go @@ -0,0 +1,43 @@ +package types + +import ( + "encoding/json" + "testing" +) + +func TestTrustLevelJSON(t *testing.T) { + cases := []struct { + v TrustLevel + want string + }{ + {TrustTOFU, `"tofu"`}, + {TrustVerified, `"verified"`}, + {TrustPinned, `"pinned"`}, + } + for _, tc := range cases { + got, _ := json.Marshal(tc.v) + if string(got) != tc.want { + t.Errorf("%q = %s, want %s", tc.v, got, tc.want) + } + } +} + +func TestPinnedDomainRoundTrip(t *testing.T) { + pd := PinnedDomain{ + Domain: "example.com", + PinnedKeys: []PinnedKey{ + {Kid: "k", PublicKeyHash: "abcd", FirstSeen: "f", LastSeen: "l", TrustLevel: TrustTOFU}, + }, + } + data, err := json.Marshal(pd) + if err != nil { + t.Fatal(err) + } + var pd2 PinnedDomain + if err := json.Unmarshal(data, &pd2); err != nil { + t.Fatal(err) + } + if pd2.Domain != pd.Domain || pd2.PinnedKeys[0].Kid != pd.PinnedKeys[0].Kid { + t.Fatal("roundtrip mismatch") + } +} diff --git a/go/pkg/types/revocation.go b/go/pkg/types/revocation.go new file mode 100644 index 0000000..7fcdc99 --- /dev/null +++ b/go/pkg/types/revocation.go @@ -0,0 +1,45 @@ +package types + +// RevocationReason enumerates why a credential, agent, or key was revoked. +type RevocationReason string + +const ( + ReasonKeyCompromise RevocationReason = "key_compromise" + ReasonAffiliationChanged RevocationReason = "affiliation_changed" + ReasonSuperseded RevocationReason = "superseded" + ReasonCessationOfOperation RevocationReason = "cessation_of_operation" + ReasonPrivilegeWithdrawn RevocationReason = "privilege_withdrawn" + ReasonPolicyViolation RevocationReason = "policy_violation" +) + +// RevokedCredential identifies a single revoked credential by its JTI. +type RevokedCredential struct { + Jti string `json:"jti"` + RevokedAt string `json:"revoked_at"` + Reason RevocationReason `json:"reason"` +} + +// RevokedAgent identifies a single revoked agent by URN. +type RevokedAgent struct { + AgentID string `json:"agent_id"` + RevokedAt string `json:"revoked_at"` + Reason RevocationReason `json:"reason"` +} + +// RevokedKey identifies a single revoked key by KID. +type RevokedKey struct { + Kid string `json:"kid"` + RevokedAt string `json:"revoked_at"` + Reason RevocationReason `json:"reason"` +} + +// RevocationDocument is the top-level +// `.well-known/agent-identity-revocations.json` payload. +type RevocationDocument struct { + AgentpinVersion string `json:"agentpin_version"` + Entity string `json:"entity"` + UpdatedAt string `json:"updated_at"` + RevokedCredentials []RevokedCredential `json:"revoked_credentials"` + RevokedAgents []RevokedAgent `json:"revoked_agents"` + RevokedKeys []RevokedKey `json:"revoked_keys"` +} diff --git a/go/pkg/types/revocation_test.go b/go/pkg/types/revocation_test.go new file mode 100644 index 0000000..d3b613f --- /dev/null +++ b/go/pkg/types/revocation_test.go @@ -0,0 +1,43 @@ +package types + +import ( + "encoding/json" + "testing" +) + +func TestRevocationReasonJSON(t *testing.T) { + got, _ := json.Marshal(ReasonKeyCompromise) + if string(got) != `"key_compromise"` { + t.Fatalf("ReasonKeyCompromise = %s", got) + } + got, _ = json.Marshal(ReasonCessationOfOperation) + if string(got) != `"cessation_of_operation"` { + t.Fatalf("ReasonCessationOfOperation = %s", got) + } +} + +func TestRevocationDocumentRoundTrip(t *testing.T) { + doc := RevocationDocument{ + AgentpinVersion: "0.1", + Entity: "example.com", + UpdatedAt: "2026-01-30T00:00:00Z", + RevokedCredentials: []RevokedCredential{ + {Jti: "jti-1", RevokedAt: "2026-01-30T00:00:00Z", Reason: ReasonKeyCompromise}, + }, + RevokedAgents: []RevokedAgent{}, + RevokedKeys: []RevokedKey{ + {Kid: "old-key", RevokedAt: "2026-01-30T00:00:00Z", Reason: ReasonSuperseded}, + }, + } + data, err := json.Marshal(doc) + if err != nil { + t.Fatal(err) + } + var doc2 RevocationDocument + if err := json.Unmarshal(data, &doc2); err != nil { + t.Fatal(err) + } + if len(doc2.RevokedCredentials) != 1 || doc2.RevokedCredentials[0].Jti != "jti-1" { + t.Fatal("roundtrip mismatch") + } +} diff --git a/go/pkg/verification/cross_language_test.go b/go/pkg/verification/cross_language_test.go new file mode 100644 index 0000000..b269e58 --- /dev/null +++ b/go/pkg/verification/cross_language_test.go @@ -0,0 +1,209 @@ +package verification + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + + "github.com/ThirdKeyAi/agentpin/go/pkg/credential" + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwt" + "github.com/ThirdKeyAi/agentpin/go/pkg/pinning" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// These tests use Rust-generated fixtures to prove the Go SDK's wire format +// is byte-compatible with the Rust SDK. The fixtures are generated by +// `agentpin keygen` (Rust CLI) and committed under testdata/. Regenerating +// them is a manual step documented in go/README.md. + +const fixturesDir = "testdata" + +func loadFixture(t *testing.T, name string) []byte { + t.Helper() + data, err := os.ReadFile(filepath.Join(fixturesDir, name)) + if err != nil { + t.Fatalf("read fixture %s: %v", name, err) + } + return data +} + +// TestCrossLang_PEMtoJWK_MatchesRust loads a Rust-generated PEM public key, +// converts it to a JWK using the Go SDK, and asserts the result is equal to +// the JWK file generated by the Rust CLI. Wire format is a security +// guarantee: any drift here breaks cross-language interop. +func TestCrossLang_PEMtoJWK_MatchesRust(t *testing.T) { + pubPEM := string(loadFixture(t, "example-2026-01.public.pem")) + jwkData := loadFixture(t, "example-2026-01.public.jwk.json") + + var rustJWK types.JWK + if err := json.Unmarshal(jwkData, &rustJWK); err != nil { + t.Fatalf("parse rust JWK: %v", err) + } + + goJWK, err := jwk.PEMToJWK(pubPEM, "example-2026-01") + if err != nil { + t.Fatalf("Go PEMToJWK: %v", err) + } + if goJWK.Kid != rustJWK.Kid { + t.Fatalf("kid mismatch: Go=%s Rust=%s", goJWK.Kid, rustJWK.Kid) + } + if goJWK.Kty != rustJWK.Kty || goJWK.Crv != rustJWK.Crv { + t.Fatalf("kty/crv mismatch") + } + if goJWK.X != rustJWK.X || goJWK.Y != rustJWK.Y { + t.Fatalf("x/y coordinate mismatch:\nGo : x=%s y=%s\nRust: x=%s y=%s", + goJWK.X, goJWK.Y, rustJWK.X, rustJWK.Y) + } + if goJWK.Use != rustJWK.Use { + t.Fatalf("use mismatch: Go=%s Rust=%s", goJWK.Use, rustJWK.Use) + } +} + +// TestCrossLang_JWKThumbprint_MatchesRustPort confirms the Go RFC 7638 +// thumbprint matches what we'd compute on the Rust side using the same JWK. +// The expected hash is computed inline from the canonical JWK form so any +// regression in canonicalization is immediately visible. +func TestCrossLang_JWKThumbprint_MatchesRustPort(t *testing.T) { + jwkData := loadFixture(t, "example-2026-01.public.jwk.json") + var j types.JWK + if err := json.Unmarshal(jwkData, &j); err != nil { + t.Fatal(err) + } + tp := jwk.JWKThumbprint(&j) + if len(tp) != 64 { + t.Fatalf("thumbprint hex length %d, want 64", len(tp)) + } + // Recompute the canonical input independently and SHA-256 it; this + // asserts the canonical form matches RFC 7638 exactly. + canonical := `{"crv":"P-256","kty":"EC","x":"` + j.X + `","y":"` + j.Y + `"}` + if got := crypto.SHA256Hex([]byte(canonical)); got != tp { + t.Fatalf("thumbprint mismatch: %s vs %s", tp, got) + } +} + +// TestCrossLang_DiscoveryDocument_RustSerialization_RoundTrip parses a +// Rust-generated discovery document and validates it loads correctly with +// the same field names. This catches any JSON tag drift. +func TestCrossLang_DiscoveryDocument_RustSerialization_RoundTrip(t *testing.T) { + data := loadFixture(t, "discovery.json") + var doc types.DiscoveryDocument + if err := json.Unmarshal(data, &doc); err != nil { + t.Fatalf("parse discovery: %v", err) + } + if doc.Entity != "example.com" { + t.Fatalf("entity: %s", doc.Entity) + } + if doc.AgentpinVersion != "0.1" { + t.Fatalf("agentpin_version: %s", doc.AgentpinVersion) + } + if doc.EntityType != types.EntityMaker { + t.Fatalf("entity_type: %s", doc.EntityType) + } + if len(doc.PublicKeys) != 1 || doc.PublicKeys[0].Kid != "example-2026-01" { + t.Fatalf("public_keys: %+v", doc.PublicKeys) + } + if len(doc.Agents) != 1 { + t.Fatalf("agents: %+v", doc.Agents) + } + if doc.Agents[0].AgentID != "urn:agentpin:example.com:test-agent" { + t.Fatalf("agent_id: %s", doc.Agents[0].AgentID) + } + if doc.Agents[0].Status != types.AgentActive { + t.Fatalf("status: %s", doc.Agents[0].Status) + } + if doc.MaxDelegationDepth != 2 { + t.Fatalf("max_delegation_depth: %d", doc.MaxDelegationDepth) + } + if doc.RevocationEndpoint == "" { + t.Fatal("revocation_endpoint missing") + } +} + +// TestCrossLang_RustKey_GoSignVerify proves the Go SDK can load a +// Rust-generated PEM private key, issue a credential with it, and verify +// that credential against the same Rust-generated discovery document. This +// is the end-to-end interop guarantee for the issue/verify path. +func TestCrossLang_RustKey_GoSignVerify(t *testing.T) { + privPEM := string(loadFixture(t, "example-2026-01.private.pem")) + priv, err := crypto.LoadPrivateKey(privPEM) + if err != nil { + t.Fatalf("load Rust-generated private key: %v", err) + } + + discData := loadFixture(t, "discovery.json") + var disc types.DiscoveryDocument + if err := json.Unmarshal(discData, &disc); err != nil { + t.Fatal(err) + } + + // Issue a fresh credential with the Rust-generated key. + cred, err := credential.IssueCredential( + priv, + "example-2026-01", + "example.com", + "urn:agentpin:example.com:test-agent", + "verifier.com", + []types.Capability{"read:data", "write:report"}, + nil, nil, + 3600, + ) + if err != nil { + t.Fatalf("issue: %v", err) + } + + r := VerifyCredentialOffline(cred, &disc, nil, pinning.NewKeyPinStore(), "verifier.com", DefaultVerifierConfig()) + if !r.Valid { + t.Fatalf("Go-issued credential against Rust-generated discovery failed: %+v", r) + } + if r.AgentID != "urn:agentpin:example.com:test-agent" { + t.Fatalf("agent_id: %s", r.AgentID) + } +} + +// TestCrossLang_JWT_RustGenerated_GoVerify decodes a JWT generated by the +// Rust SDK against a Rust-generated public key file. This proves the JWT +// wire format (DER signature inside compact JWT) is identical across SDKs. +// +// The fixture credential lives inline because Rust-generated JWTs are short +// and embedding the bytes avoids tying CI to live Rust toolchain +// availability. The credential is signed with the testdata Rust key and +// uses iat/exp values that we override the verifier clock for. +func TestCrossLang_JWT_RustGenerated_GoDecodeAndSignatureVerify(t *testing.T) { + pubPEM := string(loadFixture(t, "example-2026-01.public.pem")) + pub, err := crypto.LoadPublicKey(pubPEM) + if err != nil { + t.Fatal(err) + } + + // Sample Rust-generated JWT (alg=ES256, typ=agentpin-credential+jwt, + // kid=example-2026-01) signed with the matching private key in + // testdata/. The JWT's exp may be in the past; we are testing only the + // header parsing + signature verification — not temporal validity. + jwtStr := string(loadFixture(t, "credential.jwt")) + + // DecodeJWTUnverified MUST accept the Rust-generated header. + header, payload, _, err := jwt.DecodeJWTUnverified(jwtStr) + if err != nil { + t.Fatalf("Go failed to decode Rust-generated JWT: %v", err) + } + if header.Alg != "ES256" || header.Typ != "agentpin-credential+jwt" { + t.Fatalf("header drift: alg=%s typ=%s", header.Alg, header.Typ) + } + if header.Kid != "example-2026-01" { + t.Fatalf("kid: %s", header.Kid) + } + if payload.Iss != "example.com" { + t.Fatalf("iss: %s", payload.Iss) + } + if payload.AgentpinVersion != "0.1" { + t.Fatalf("agentpin_version: %s", payload.AgentpinVersion) + } + + // Signature MUST verify under the Rust-generated public key. + if _, _, err := jwt.VerifyJWT(jwtStr, pub); err != nil { + t.Fatalf("Go failed to verify Rust-generated JWT signature: %v", err) + } +} diff --git a/go/pkg/verification/testdata/credential.jwt b/go/pkg/verification/testdata/credential.jwt new file mode 100644 index 0000000..ec0dcb2 --- /dev/null +++ b/go/pkg/verification/testdata/credential.jwt @@ -0,0 +1 @@ +eyJhbGciOiJFUzI1NiIsInR5cCI6ImFnZW50cGluLWNyZWRlbnRpYWwrand0Iiwia2lkIjoiZXhhbXBsZS0yMDI2LTAxIn0.eyJpc3MiOiJleGFtcGxlLmNvbSIsInN1YiI6InVybjphZ2VudHBpbjpleGFtcGxlLmNvbTp0ZXN0LWFnZW50IiwiYXVkIjoidmVyaWZpZXIuY29tIiwiaWF0IjoxNzc3NjYzNDE1LCJleHAiOjE3Nzc3NDk4MTUsImp0aSI6IjI4ODRhNzgxLWY4OTUtNDUwZi1hMGE1LWFlNmFiOWU1YzBhMiIsImFnZW50cGluX3ZlcnNpb24iOiIwLjEiLCJjYXBhYmlsaXRpZXMiOlsicmVhZDpkYXRhIiwid3JpdGU6cmVwb3J0Il19.MEUCIFmIiBfAKc84I4VR9HkwsKL9Gla_PabrVM0T0Caw6Z-lAiEA3mCEosp1kY6Zs-rigvJKYTeIcPQUxWbFraexNwqrjAA diff --git a/go/pkg/verification/testdata/discovery.json b/go/pkg/verification/testdata/discovery.json new file mode 100644 index 0000000..ff7110c --- /dev/null +++ b/go/pkg/verification/testdata/discovery.json @@ -0,0 +1,29 @@ +{ + "agentpin_version": "0.1", + "entity": "example.com", + "entity_type": "maker", + "public_keys": [ +{ + "kid": "example-2026-01", + "kty": "EC", + "crv": "P-256", + "x": "nXszbHO3-fXVL--bm8YeaeG63qOEzEjnRRHOI9ofMXY", + "y": "hvg_wGE3XQ52VLAUL_JqzhrKFqbfuPSeLVBCLPdt9IU", + "use": "sig", + "key_ops": [ + "verify" + ] +} ], + "agents": [ + { + "agent_id": "urn:agentpin:example.com:test-agent", + "name": "Cross-Language Test Agent", + "capabilities": ["read:*", "write:report"], + "credential_ttl_max": 86400, + "status": "active" + } + ], + "revocation_endpoint": "https://example.com/.well-known/agent-identity-revocations.json", + "max_delegation_depth": 2, + "updated_at": "2026-01-15T00:00:00Z" +} diff --git a/go/pkg/verification/testdata/example-2026-01.private.pem b/go/pkg/verification/testdata/example-2026-01.private.pem new file mode 100644 index 0000000..8ed83db --- /dev/null +++ b/go/pkg/verification/testdata/example-2026-01.private.pem @@ -0,0 +1,5 @@ +-----BEGIN PRIVATE KEY----- +MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg7lAe3UWURms7PAto +94hktLk1JLnr76YgDlFabDaN6v+hRANCAASdezNsc7f59dUv75ubxh5p4breo4TM +SOdFEc4j2h8xdob4P8BhN10OdlSwFC/yas4ayham37j0ni1QQiz3bfSF +-----END PRIVATE KEY----- diff --git a/go/pkg/verification/testdata/example-2026-01.public.jwk.json b/go/pkg/verification/testdata/example-2026-01.public.jwk.json new file mode 100644 index 0000000..155ce62 --- /dev/null +++ b/go/pkg/verification/testdata/example-2026-01.public.jwk.json @@ -0,0 +1,11 @@ +{ + "kid": "example-2026-01", + "kty": "EC", + "crv": "P-256", + "x": "nXszbHO3-fXVL--bm8YeaeG63qOEzEjnRRHOI9ofMXY", + "y": "hvg_wGE3XQ52VLAUL_JqzhrKFqbfuPSeLVBCLPdt9IU", + "use": "sig", + "key_ops": [ + "verify" + ] +} \ No newline at end of file diff --git a/go/pkg/verification/testdata/example-2026-01.public.pem b/go/pkg/verification/testdata/example-2026-01.public.pem new file mode 100644 index 0000000..18923ff --- /dev/null +++ b/go/pkg/verification/testdata/example-2026-01.public.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEnXszbHO3+fXVL++bm8YeaeG63qOE +zEjnRRHOI9ofMXaG+D/AYTddDnZUsBQv8mrOGsoWpt+49J4tUEIs9230hQ== +-----END PUBLIC KEY----- diff --git a/go/pkg/verification/verification.go b/go/pkg/verification/verification.go new file mode 100644 index 0000000..3f8a371 --- /dev/null +++ b/go/pkg/verification/verification.go @@ -0,0 +1,267 @@ +// Package verification implements the AgentPin 12-step credential +// verification flow defined in the spec. The flow is preserved verbatim +// from the Rust SDK: any change here should be mirrored in Rust / JS / +// Python. +// +// 12-step flow: +// +// 1. JWT structure parse +// 2. Header alg validation (ES256 only — REJECT others) +// 3. Signature verify +// 4. Issuer domain extraction +// 5. Discovery document resolution (caller-provided or via resolver) +// 6. Domain binding verify (issuer claim matches discovery entity) +// 7. Key matching (sig kid maps to discovery key) +// 8. TOFU key pinning check +// 9. Expiration validation (`exp` claim) +// 10. Revocation checking (credential id, agent id, key id) +// 11. Capability validation (credential capabilities subset of declaration) +// 12. Delegation chain verification (if `del`/`delegation_chain` present) +package verification + +import ( + "fmt" + "time" + + "github.com/ThirdKeyAi/agentpin/go/pkg/credential" + "github.com/ThirdKeyAi/agentpin/go/pkg/discovery" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwt" + "github.com/ThirdKeyAi/agentpin/go/pkg/pinning" + "github.com/ThirdKeyAi/agentpin/go/pkg/resolver" + "github.com/ThirdKeyAi/agentpin/go/pkg/revocation" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +// VerifierConfig tunes verification behaviour. +type VerifierConfig struct { + // ClockSkewSecs is the tolerance for iat/exp comparisons (default: 60). + ClockSkewSecs int64 + // MaxTTLSecs caps the maximum credential lifetime (default: 86400). + MaxTTLSecs int64 + // StrictCapabilities, when true, validates capabilities against the + // taxonomy (currently informational; reserved for future use). + StrictCapabilities bool +} + +// DefaultVerifierConfig returns the default verifier configuration. +func DefaultVerifierConfig() VerifierConfig { + return VerifierConfig{ + ClockSkewSecs: 60, + MaxTTLSecs: 86400, + StrictCapabilities: false, + } +} + +// DelegationChainEntry summarizes one delegation attestation in the result. +type DelegationChainEntry struct { + Domain string `json:"domain"` + Role string `json:"role"` + Verified bool `json:"verified"` +} + +// KeyPinningStatus summarizes the TOFU pinning outcome. +type KeyPinningStatus struct { + Status string `json:"status"` + FirstSeen string `json:"first_seen,omitempty"` +} + +// Result is the structured verification outcome. +type Result struct { + Valid bool `json:"valid"` + AgentID string `json:"agent_id,omitempty"` + Issuer string `json:"issuer,omitempty"` + Capabilities []types.Capability `json:"capabilities,omitempty"` + Constraints *types.Constraints `json:"constraints,omitempty"` + DelegationVerified *bool `json:"delegation_verified,omitempty"` + DelegationChain []DelegationChainEntry `json:"delegation_chain,omitempty"` + KeyPinning *KeyPinningStatus `json:"key_pinning,omitempty"` + ErrorCode types.ErrorCode `json:"error_code,omitempty"` + ErrorMessage string `json:"error_message,omitempty"` + Warnings []string `json:"warnings,omitempty"` +} + +func failure(code types.ErrorCode, msg string) Result { + return Result{Valid: false, ErrorCode: code, ErrorMessage: msg, Warnings: []string{}} +} + +// VerifyCredentialOffline runs the full 12-step verification flow against a +// caller-provided discovery (and optional revocation) document. +func VerifyCredentialOffline( + credentialJWT string, + disc *types.DiscoveryDocument, + rev *types.RevocationDocument, + pinStore *pinning.KeyPinStore, + audience string, + config VerifierConfig, +) Result { + // Steps 1-2: Parse JWT (alg/typ enforced inside DecodeJWTUnverified). + header, payload, _, err := jwt.DecodeJWTUnverified(credentialJWT) + if err != nil { + return failure(types.ErrAlgorithmRejected, fmt.Sprintf("JWT parse failed: %v", err)) + } + + // Step 9: Temporal validity (also covered by spec step "expiration"). + now := time.Now().Unix() + skew := config.ClockSkewSecs + + if payload.Iat > now+skew { + return failure(types.ErrCredentialExpired, "Credential issued in the future") + } + if payload.Exp <= now-skew { + return failure(types.ErrCredentialExpired, "Credential has expired") + } + if payload.Nbf != nil { + if *payload.Nbf > now+skew { + return failure(types.ErrCredentialExpired, "Credential not yet valid (nbf)") + } + } + lifetime := payload.Exp - payload.Iat + if lifetime > config.MaxTTLSecs { + return failure( + types.ErrCredentialExpired, + fmt.Sprintf("Credential lifetime %d exceeds max TTL %d", lifetime, config.MaxTTLSecs), + ) + } + + // Steps 5-6: Validate discovery document (entity matches iss). + if err := discovery.ValidateDiscoveryDocument(disc, payload.Iss); err != nil { + return failure(types.ErrDiscoveryInvalid, fmt.Sprintf("Discovery validation failed: %v", err)) + } + + // Step 7: Resolve public key by kid. + kk := discovery.FindKeyByKid(disc, header.Kid) + if kk == nil { + return failure(types.ErrKeyNotFound, fmt.Sprintf("Key '%s' not found in discovery document", header.Kid)) + } + if kk.Exp != "" { + if expDt, err := time.Parse(time.RFC3339, kk.Exp); err == nil { + if expDt.Unix() < now-skew { + return failure(types.ErrKeyExpired, fmt.Sprintf("Key '%s' has expired", header.Kid)) + } + } + } + pub, err := jwk.JWKToVerifyingKey(kk) + if err != nil { + return failure(types.ErrKeyNotFound, fmt.Sprintf("Invalid key format for '%s': %v", header.Kid, err)) + } + + // Step 3: Verify JWT signature. + if _, _, err := jwt.VerifyJWT(credentialJWT, pub); err != nil { + return failure(types.ErrSignatureInvalid, fmt.Sprintf("JWT signature verification failed for kid '%s'", header.Kid)) + } + + // Step 10: Revocation. + if rev != nil { + if err := revocation.CheckRevocation(rev, payload.Jti, payload.Sub, header.Kid); err != nil { + if ve, ok := types.AsVerificationError(err); ok { + return failure(ve.Code, ve.Message) + } + return failure(types.ErrCredentialRevoked, err.Error()) + } + } + + // Agent presence + status. + agent := discovery.FindAgentByID(disc, payload.Sub) + if agent == nil { + return failure(types.ErrAgentNotFound, fmt.Sprintf("Agent '%s' not found in discovery document", payload.Sub)) + } + if agent.Status != types.AgentActive { + return failure(types.ErrAgentInactive, fmt.Sprintf("Agent '%s' status is %s", payload.Sub, agent.Status)) + } + + // Step 11: Capability subset. + if err := credential.ValidateCredentialAgainstDiscovery(payload.Capabilities, agent.Capabilities); err != nil { + return failure(types.ErrCapabilityExceeded, err.Error()) + } + + // Constraints subset. + if !types.ConstraintsSubsetOf(agent.Constraints, payload.Constraints) { + return failure(types.ErrConstraintViolation, "Credential constraints are less restrictive than discovery defaults") + } + + // Build success result; pinning + delegation populated below. + result := Result{ + Valid: true, + AgentID: payload.Sub, + Issuer: payload.Iss, + Capabilities: payload.Capabilities, + Constraints: payload.Constraints, + Warnings: []string{}, + } + + // Step 12: Delegation chain (offline mode cannot verify signatures). + if len(payload.DelegationChain) > 0 { + entries := make([]DelegationChainEntry, 0, len(payload.DelegationChain)) + for _, att := range payload.DelegationChain { + entries = append(entries, DelegationChainEntry{ + Domain: att.Domain, + Role: string(att.Role), + Verified: false, + }) + } + result.DelegationChain = entries + f := false + result.DelegationVerified = &f + result.Warnings = append(result.Warnings, "Delegation chain present but not verified in offline mode") + } + + // Step 8: TOFU key pinning. + pr, perr := pinning.CheckPinning(pinStore, payload.Iss, kk) + if perr != nil { + return failure(types.ErrKeyPinMismatch, fmt.Sprintf("Key for '%s' has changed since last pinned", payload.Iss)) + } + switch pr { + case pinning.ResultFirstUse: + result.KeyPinning = &KeyPinningStatus{ + Status: "first_use", + FirstSeen: time.Now().UTC().Format(time.RFC3339), + } + case pinning.ResultMatched: + var first string + if pd := pinStore.GetDomain(payload.Iss); pd != nil && len(pd.PinnedKeys) > 0 { + first = pd.PinnedKeys[0].FirstSeen + } + result.KeyPinning = &KeyPinningStatus{Status: "pinned", FirstSeen: first} + } + + // Audience binding. + if audience != "" { + if payload.Aud != "" && payload.Aud != "*" && payload.Aud != audience { + return failure( + types.ErrAudienceMismatch, + fmt.Sprintf("Credential audience '%s' does not match verifier '%s'", payload.Aud, audience), + ) + } + } + + return result +} + +// VerifyCredentialWithResolver decodes the credential to extract the issuer +// domain, uses r to resolve discovery + revocation, and runs +// VerifyCredentialOffline. +func VerifyCredentialWithResolver( + credentialJWT string, + r resolver.DiscoveryResolver, + pinStore *pinning.KeyPinStore, + audience string, + config VerifierConfig, +) Result { + _, payload, _, err := jwt.DecodeJWTUnverified(credentialJWT) + if err != nil { + return failure(types.ErrAlgorithmRejected, fmt.Sprintf("JWT parse failed: %v", err)) + } + + disc, err := r.ResolveDiscovery(payload.Iss) + if err != nil { + return failure(types.ErrDiscoveryFetchFailed, fmt.Sprintf("Failed to resolve discovery document: %v", err)) + } + + rev, err := r.ResolveRevocation(payload.Iss, disc) + if err != nil { + return failure(types.ErrDiscoveryFetchFailed, "Revocation document unreachable (fail-closed)") + } + + return VerifyCredentialOffline(credentialJWT, disc, rev, pinStore, audience, config) +} diff --git a/go/pkg/verification/verification_test.go b/go/pkg/verification/verification_test.go new file mode 100644 index 0000000..bb4ccea --- /dev/null +++ b/go/pkg/verification/verification_test.go @@ -0,0 +1,280 @@ +package verification + +import ( + "crypto/ecdsa" + "testing" + "time" + + "github.com/ThirdKeyAi/agentpin/go/pkg/credential" + "github.com/ThirdKeyAi/agentpin/go/pkg/crypto" + "github.com/ThirdKeyAi/agentpin/go/pkg/discovery" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwk" + "github.com/ThirdKeyAi/agentpin/go/pkg/jwt" + "github.com/ThirdKeyAi/agentpin/go/pkg/pinning" + "github.com/ThirdKeyAi/agentpin/go/pkg/resolver" + "github.com/ThirdKeyAi/agentpin/go/pkg/revocation" + "github.com/ThirdKeyAi/agentpin/go/pkg/types" +) + +type fixture struct { + jwt string + discovery *types.DiscoveryDocument + revocation *types.RevocationDocument + pinStore *pinning.KeyPinStore + cfg VerifierConfig + priv *ecdsa.PrivateKey +} + +func setup(t *testing.T) fixture { + t.Helper() + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + j := jwk.VerifyingKeyToJWK(pub, "test-2026-01") + + conf := types.DataConfidential + intl := types.DataInternal + disc := discovery.BuildDiscoveryDocument( + "example.com", + types.EntityMaker, + []types.JWK{j}, + []types.AgentDeclaration{ + { + AgentID: "urn:agentpin:example.com:agent", + Name: "Test Agent", + Capabilities: []types.Capability{"read:*", "write:report"}, + Constraints: &types.Constraints{ + DataClassificationMax: &conf, + RateLimit: "100/hour", + }, + Status: types.AgentActive, + }, + }, + 2, + "2026-01-15T00:00:00Z", + ) + + jwtStr, err := credential.IssueCredential( + priv, "test-2026-01", "example.com", "urn:agentpin:example.com:agent", + "verifier.com", + []types.Capability{"read:data", "write:report"}, + &types.Constraints{ + DataClassificationMax: &intl, + RateLimit: "50/hour", + }, + nil, 3600, + ) + if err != nil { + t.Fatal(err) + } + rev := revocation.BuildRevocationDocument("example.com") + return fixture{ + jwt: jwtStr, + discovery: &disc, + revocation: &rev, + pinStore: pinning.NewKeyPinStore(), + cfg: DefaultVerifierConfig(), + priv: priv, + } +} + +func TestHappyPath(t *testing.T) { + f := setup(t) + r := VerifyCredentialOffline(f.jwt, f.discovery, f.revocation, f.pinStore, "verifier.com", f.cfg) + if !r.Valid { + t.Fatalf("expected valid: %+v", r) + } + if r.AgentID != "urn:agentpin:example.com:agent" { + t.Fatalf("agent_id: %s", r.AgentID) + } + if r.Issuer != "example.com" { + t.Fatalf("issuer: %s", r.Issuer) + } +} + +func TestExpiredCredential(t *testing.T) { + f := setup(t) + header := &types.JWTHeader{Alg: jwt.RequiredAlg, Typ: jwt.RequiredTyp, Kid: "test-2026-01"} + payload := &types.JWTPayload{ + Iss: "example.com", Sub: "urn:agentpin:example.com:agent", + Iat: 1000000, Exp: 1003600, + Jti: "expired", AgentpinVersion: "0.1", + Capabilities: []types.Capability{"read:data"}, + } + expired, err := jwt.EncodeJWT(header, payload, f.priv) + if err != nil { + t.Fatal(err) + } + r := VerifyCredentialOffline(expired, f.discovery, nil, pinning.NewKeyPinStore(), "", f.cfg) + if r.Valid { + t.Fatal("expired credential must fail") + } + if r.ErrorCode != types.ErrCredentialExpired { + t.Fatalf("ErrorCode: %s", r.ErrorCode) + } +} + +func TestWrongAlgorithmRejected(t *testing.T) { + f := setup(t) + r := VerifyCredentialOffline("invalid.jwt.token", f.discovery, nil, pinning.NewKeyPinStore(), "", f.cfg) + if r.Valid { + t.Fatal("malformed jwt should fail") + } + if r.ErrorCode != types.ErrAlgorithmRejected { + t.Fatalf("ErrorCode: %s", r.ErrorCode) + } +} + +func TestCredentialRevoked(t *testing.T) { + f := setup(t) + _, payload, _, _ := jwt.DecodeJWTUnverified(f.jwt) + revocation.AddRevokedCredential(f.revocation, payload.Jti, types.ReasonKeyCompromise) + r := VerifyCredentialOffline(f.jwt, f.discovery, f.revocation, f.pinStore, "verifier.com", f.cfg) + if r.Valid { + t.Fatal("revoked should fail") + } + if r.ErrorCode != types.ErrCredentialRevoked { + t.Fatalf("ErrorCode: %s", r.ErrorCode) + } +} + +func TestAgentRevoked(t *testing.T) { + f := setup(t) + revocation.AddRevokedAgent(f.revocation, "urn:agentpin:example.com:agent", types.ReasonPrivilegeWithdrawn) + r := VerifyCredentialOffline(f.jwt, f.discovery, f.revocation, f.pinStore, "verifier.com", f.cfg) + if r.Valid { + t.Fatal("revoked agent should fail") + } +} + +func TestInactiveAgent(t *testing.T) { + f := setup(t) + f.discovery.Agents[0].Status = types.AgentSuspended + r := VerifyCredentialOffline(f.jwt, f.discovery, f.revocation, f.pinStore, "verifier.com", f.cfg) + if r.Valid { + t.Fatal("inactive agent should fail") + } + if r.ErrorCode != types.ErrAgentInactive { + t.Fatalf("ErrorCode: %s", r.ErrorCode) + } +} + +func TestCapabilityExceeded(t *testing.T) { + f := setup(t) + f.discovery.Agents[0].Capabilities = []types.Capability{"read:limited"} + r := VerifyCredentialOffline(f.jwt, f.discovery, f.revocation, f.pinStore, "verifier.com", f.cfg) + if r.Valid { + t.Fatal("excess caps should fail") + } + if r.ErrorCode != types.ErrCapabilityExceeded { + t.Fatalf("ErrorCode: %s", r.ErrorCode) + } +} + +func TestAudienceMismatch(t *testing.T) { + f := setup(t) + r := VerifyCredentialOffline(f.jwt, f.discovery, f.revocation, f.pinStore, "wrong.com", f.cfg) + if r.Valid { + t.Fatal("aud mismatch should fail") + } + if r.ErrorCode != types.ErrAudienceMismatch { + t.Fatalf("ErrorCode: %s", r.ErrorCode) + } +} + +func TestKeyPinChangeRejected(t *testing.T) { + f := setup(t) + r1 := VerifyCredentialOffline(f.jwt, f.discovery, f.revocation, f.pinStore, "verifier.com", f.cfg) + if !r1.Valid { + t.Fatalf("first verify failed: %+v", r1) + } + // Now rotate the key in discovery + reissue with a new key. + kp2, _ := crypto.GenerateKeyPair() + priv2, _ := crypto.LoadPrivateKey(kp2.PrivateKeyPEM) + pub2, _ := crypto.LoadPublicKey(kp2.PublicKeyPEM) + f.discovery.PublicKeys = []types.JWK{jwk.VerifyingKeyToJWK(pub2, "test-2026-01")} + jwt2, _ := credential.IssueCredential( + priv2, "test-2026-01", "example.com", "urn:agentpin:example.com:agent", + "verifier.com", []types.Capability{"read:data"}, nil, nil, 3600, + ) + r2 := VerifyCredentialOffline(jwt2, f.discovery, f.revocation, f.pinStore, "verifier.com", f.cfg) + if r2.Valid { + t.Fatal("key rotation must fail without explicit pinning trust") + } + if r2.ErrorCode != types.ErrKeyPinMismatch { + t.Fatalf("ErrorCode: %s", r2.ErrorCode) + } +} + +func TestVerifyWithTrustBundleResolver(t *testing.T) { + f := setup(t) + b := types.TrustBundle{ + AgentpinBundleVersion: "0.1", + CreatedAt: time.Now().UTC().Format(time.RFC3339), + Documents: []types.DiscoveryDocument{*f.discovery}, + Revocations: []types.RevocationDocument{*f.revocation}, + } + r := resolver.NewTrustBundleResolver(&b) + res := VerifyCredentialWithResolver(f.jwt, r, pinning.NewKeyPinStore(), "verifier.com", f.cfg) + if !res.Valid { + t.Fatalf("trust bundle verify: %+v", res) + } + if res.AgentID != "urn:agentpin:example.com:agent" { + t.Fatal("agent id") + } +} + +func TestVerifyResolverMissingDomain(t *testing.T) { + f := setup(t) + b := types.TrustBundle{AgentpinBundleVersion: "0.1"} + r := resolver.NewTrustBundleResolver(&b) + res := VerifyCredentialWithResolver(f.jwt, r, pinning.NewKeyPinStore(), "verifier.com", f.cfg) + if res.Valid { + t.Fatal("missing domain should fail") + } + if res.ErrorCode != types.ErrDiscoveryFetchFailed { + t.Fatalf("ErrorCode: %s", res.ErrorCode) + } +} + +func TestDomainMismatch(t *testing.T) { + f := setup(t) + f.discovery.Entity = "other.com" + r := VerifyCredentialOffline(f.jwt, f.discovery, f.revocation, f.pinStore, "", f.cfg) + if r.Valid { + t.Fatal("domain mismatch should fail") + } + if r.ErrorCode != types.ErrDiscoveryInvalid { + t.Fatalf("ErrorCode: %s", r.ErrorCode) + } +} + +func TestWildcardAudienceAccepted(t *testing.T) { + kp, _ := crypto.GenerateKeyPair() + priv, _ := crypto.LoadPrivateKey(kp.PrivateKeyPEM) + pub, _ := crypto.LoadPublicKey(kp.PublicKeyPEM) + j := jwk.VerifyingKeyToJWK(pub, "test-key") + disc := discovery.BuildDiscoveryDocument( + "example.com", types.EntityMaker, + []types.JWK{j}, + []types.AgentDeclaration{ + { + AgentID: "urn:agentpin:example.com:agent", + Name: "T", Capabilities: []types.Capability{"read:*"}, + Status: types.AgentActive, + }, + }, + 2, "2026-01-15T00:00:00Z", + ) + cred, err := credential.IssueCredential( + priv, "test-key", "example.com", "urn:agentpin:example.com:agent", + "*", []types.Capability{"read:data"}, nil, nil, 3600, + ) + if err != nil { + t.Fatal(err) + } + r := VerifyCredentialOffline(cred, &disc, nil, pinning.NewKeyPinStore(), "any.com", DefaultVerifierConfig()) + if !r.Valid { + t.Fatalf("wildcard aud should pass: %+v", r) + } +} diff --git a/javascript/package.json b/javascript/package.json index 87492ae..eee2598 100644 --- a/javascript/package.json +++ b/javascript/package.json @@ -1,6 +1,6 @@ { "name": "agentpin", - "version": "0.2.0", + "version": "0.3.0", "description": "Domain-anchored cryptographic identity protocol for AI agents", "main": "src/index.js", "type": "module", diff --git a/javascript/src/a2a.js b/javascript/src/a2a.js new file mode 100644 index 0000000..1b188e2 --- /dev/null +++ b/javascript/src/a2a.js @@ -0,0 +1,204 @@ +/** + * A2A AgentCard signing and verification (v0.3.0). + * + * Mirrors the Rust `agentpin::a2a` module. AgentPin extends the + * Google A2A AgentCard format with cryptographic identity verification. + * The `agentpin` extension carries the AgentPin endpoint URL, the entity's + * public key in JWK form, and a detached ECDSA P-256 signature over the + * canonical bytes of the rest of the AgentCard. + * + * Canonicalisation: the signing input is the AgentCard with its `agentpin` + * field cleared, serialised as JSON with object keys sorted alphabetically + * (matches the Rust `serde_json::to_value` + `BTreeMap` trick). + */ + +import { createPublicKey } from 'crypto'; +import { signData, verifySignature } from './crypto.js'; +import { pemToJwk, jwkToPem, jwkThumbprint } from './jwk.js'; +import { AllowedDomains } from './discovery.js'; +import { AgentPinError, ErrorCode } from './types.js'; + +/** + * Map an AgentPin capability string (or `{ id }` Capability object) to a + * minimal A2A AgentSkill. + * @param {string|{id:string}} cap + * @returns {{ id: string, name: string, description?: string }} + */ +export function capabilityToSkill(cap) { + const id = typeof cap === 'string' ? cap : (cap && cap.id) || String(cap); + return { id, name: id }; +} + +/** + * Build an unsigned A2A AgentCard from an AgentPin `AgentDeclaration`. + * + * The capability list is mapped 1:1 to skills via `capabilityToSkill`; the + * `allowed_domains` constraint is copied into `capabilities.allowed_domains` + * (omitted entirely when unrestricted, matching the Rust serde behaviour). + * + * @param {string} url - Public URL where the agent receives A2A traffic. + * @param {object} declaration - AgentPin AgentDeclaration. + * @param {object} [opts] + * @param {Array} [opts.skills] - Override the auto-mapped skill list. + * @param {boolean} [opts.streaming] + * @param {boolean} [opts.pushNotifications] + * @returns {object} Unsigned A2A AgentCard (no `agentpin` extension). + */ +export function buildUnsignedAgentCard(url, declaration, opts = {}) { + const skills = (opts.skills && opts.skills.length > 0) + ? opts.skills.map((s) => ({ ...s })) + : (declaration.capabilities || []).map(capabilityToSkill); + + const allowedDomains = declaration.constraints + ? AllowedDomains.fromConstraints(declaration.constraints) + : AllowedDomains.unrestricted(); + + const capabilities = { + streaming: !!opts.streaming, + pushNotifications: !!opts.pushNotifications, + }; + if (!AllowedDomains.isUnrestricted(allowedDomains)) { + capabilities.allowed_domains = allowedDomains; + } + + const card = { + name: declaration.name, + url, + capabilities, + skills, + }; + if (declaration.description !== undefined && declaration.description !== null) { + card.description = declaration.description; + } + if (declaration.version !== undefined && declaration.version !== null) { + card.version = declaration.version; + } + return card; +} + +/** + * Sign an A2A AgentCard with an ECDSA P-256 private key. + * + * @param {object} unsignedCard - Card produced by `buildUnsignedAgentCard`. + * @param {string} privateKeyPem + * @param {string} kid + * @param {string} agentpinEndpoint - URL of the entity's AgentPin discovery + * document (`.well-known/agent-identity.json`). + * @returns {object} Signed A2A AgentCard with `agentpin` extension populated. + */ +export function signAgentCard(unsignedCard, privateKeyPem, kid, agentpinEndpoint) { + if (!agentpinEndpoint) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + 'signAgentCard requires agentpinEndpoint' + ); + } + const canonical = canonicalizeForSigning({ ...unsignedCard, agentpin: undefined }); + const signature = signData(privateKeyPem, Buffer.from(canonical, 'utf8')); + + const publicKeyPem = createPublicKey(privateKeyPem) + .export({ type: 'spki', format: 'pem' }); + const publicKeyJwk = pemToJwk(publicKeyPem, kid); + + return { + ...unsignedCard, + agentpin: { + agentpin_endpoint: agentpinEndpoint, + public_key_jwk: publicKeyJwk, + signature, + }, + }; +} + +/** + * One-shot helper: build + sign in a single call. + * + * @param {string} url + * @param {object} declaration + * @param {string} privateKeyPem + * @param {string} kid + * @param {string} agentpinEndpoint + * @param {object} [opts] - Forwarded to `buildUnsignedAgentCard`. + * @returns {object} Signed A2A AgentCard. + */ +export function buildAndSignAgentCard(url, declaration, privateKeyPem, kid, agentpinEndpoint, opts = {}) { + const unsigned = buildUnsignedAgentCard(url, declaration, opts); + return signAgentCard(unsigned, privateKeyPem, kid, agentpinEndpoint); +} + +/** + * Verify the `agentpin` extension on an A2A AgentCard. + * + * Returns nothing on success; throws `AgentPinError(DISCOVERY_INVALID)` on + * any failure (extension missing, malformed JWK, signature mismatch). + * + * This proves only that the card has not been tampered with relative to the + * key inside its own extension. The caller still has to verify the JWK + * chains back to a trusted AgentPin discovery document — pair this with + * `A2aAgentCardResolver` for the full chain. + * + * @param {object} card - A2A AgentCard. + */ +export function verifyAgentpinExtension(card) { + const ext = card && card.agentpin; + if (!ext) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + 'AgentCard has no agentpin extension' + ); + } + const withoutExt = { ...card, agentpin: undefined }; + const canonical = canonicalizeForSigning(withoutExt); + const publicKeyPem = jwkToPem(ext.public_key_jwk); + const ok = verifySignature(publicKeyPem, Buffer.from(canonical, 'utf8'), ext.signature); + if (!ok) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + 'A2A AgentCard signature did not verify against extension JWK' + ); + } +} + +/** + * Compute the JWK thumbprint of the public key carried in a card's + * `agentpin` extension. Convenience wrapper used by resolvers when matching + * the card's key against a discovery document. + * + * @param {object} extension - Card's `agentpin` extension. + * @returns {string} Hex thumbprint (no `sha256:` prefix). + */ +export function extensionKeyThumbprint(extension) { + return jwkThumbprint(extension.public_key_jwk); +} + +// --------------------------------------------------------------------------- +// Canonicalisation +// --------------------------------------------------------------------------- + +/** + * Canonicalise a JSON-able value with object keys sorted alphabetically and + * `undefined` properties dropped — matches `serde_json::to_value` + sorted + * `BTreeMap` re-serialisation in the Rust SDK so signatures verify across + * languages. + * + * @param {*} value + * @returns {string} Canonical JSON string. + */ +export function canonicalizeForSigning(value) { + return JSON.stringify(sortedCanonical(value)); +} + +function sortedCanonical(value) { + if (value === null || value === undefined) return null; + if (Array.isArray(value)) return value.map(sortedCanonical); + if (typeof value === 'object') { + const out = {}; + for (const key of Object.keys(value).sort()) { + const v = value[key]; + if (v === undefined) continue; + out[key] = sortedCanonical(v); + } + return out; + } + return value; +} diff --git a/javascript/src/capability.js b/javascript/src/capability.js index 8440cde..0a81437 100644 --- a/javascript/src/capability.js +++ b/javascript/src/capability.js @@ -107,3 +107,37 @@ export function capabilitiesHash(capabilities) { const json = JSON.stringify(sorted); return sha256Hex(json); } + +/** Core actions defined by the AgentPin taxonomy. */ +export const CORE_ACTIONS = ['read', 'write', 'execute', 'admin', 'delegate']; + +/** + * Check if a string uses reverse-domain notation (contains a dot). + * @param {string} s + * @returns {boolean} + */ +function isReverseDomain(s) { + return s.includes('.'); +} + +/** + * Validate a capability against the AgentPin taxonomy. + * @param {Capability} cap + * @throws {Error} if invalid + */ +export function validateCapability(cap) { + const parsed = Capability.parse(cap.value); + if (!parsed) { + throw new Error(`Invalid capability format (missing ':'): ${cap.value}`); + } + const [action, resource] = parsed; + if (action === 'admin' && resource === '*') { + throw new Error('admin:* wildcard is not allowed; admin capabilities must be explicitly scoped'); + } + if (CORE_ACTIONS.includes(action)) { + return; + } + if (!isReverseDomain(action)) { + throw new Error(`Custom action '${action}' must use reverse-domain prefix (e.g., com.example.${action})`); + } +} diff --git a/javascript/src/discovery.js b/javascript/src/discovery.js index a2de040..a4eba2c 100644 --- a/javascript/src/discovery.js +++ b/javascript/src/discovery.js @@ -12,10 +12,12 @@ import { AgentPinError, ErrorCode } from './types.js'; * @param {object[]} agents - Array of AgentDeclaration objects * @param {number} maxDelegationDepth * @param {string} updatedAt - ISO 8601 timestamp + * @param {object} [opts] + * @param {string} [opts.a2aEndpoint] - Optional A2A AgentCard endpoint URL (v0.3.0) * @returns {object} Discovery document */ -export function buildDiscoveryDocument(entity, entityType, publicKeys, agents, maxDelegationDepth, updatedAt) { - return { +export function buildDiscoveryDocument(entity, entityType, publicKeys, agents, maxDelegationDepth, updatedAt, opts = {}) { + const doc = { agentpin_version: '0.1', entity, entity_type: entityType, @@ -25,6 +27,10 @@ export function buildDiscoveryDocument(entity, entityType, publicKeys, agents, m max_delegation_depth: maxDelegationDepth, updated_at: updatedAt, }; + if (opts.a2aEndpoint) { + doc.a2a_endpoint = opts.a2aEndpoint; + } + return doc; } /** @@ -80,6 +86,53 @@ export function findAgentById(doc, agentId) { return doc.agents.find(a => a.agent_id === agentId) || null; } +/** + * Helpers for working with the `allowed_domains` constraint as a typed + * allow-list (v0.3.0). Convention: an empty list means *unrestricted* (all + * domains trusted); a non-empty list restricts the agent to exactly those + * domains. Mirrors `AllowedDomains` in the Rust SDK. + */ +export const AllowedDomains = Object.freeze({ + /** Construct an empty (unrestricted) list. */ + unrestricted() { + return []; + }, + /** Construct from any iterable of strings. */ + fromDomains(iter) { + return Array.from(iter, (d) => String(d)); + }, + /** `true` when the list is empty (no restriction). */ + isUnrestricted(list) { + return !list || list.length === 0; + }, + /** `true` when `domain` is allowed under this list. Empty list = all allowed. */ + allows(list, domain) { + return this.isUnrestricted(list) || list.some((d) => d === domain); + }, + /** + * Intersection of two allow-lists. `unrestricted ∩ X = X`. The intersection + * of two non-empty lists may itself be empty (which then means + * unrestricted under our convention) — callers that care about the + * difference between "intentionally restricted to nothing" and + * "unrestricted" should not use this helper. + */ + intersect(a, b) { + if (this.isUnrestricted(a)) return [...(b || [])]; + if (this.isUnrestricted(b)) return [...a]; + return a.filter((d) => b.includes(d)); + }, + /** + * Pull the typed list out of a `Constraints` object. Returns + * `unrestricted()` when the constraints have no `allowed_domains`. + */ + fromConstraints(constraints) { + if (!constraints || !constraints.allowed_domains) { + return this.unrestricted(); + } + return [...constraints.allowed_domains]; + }, +}); + /** * Fetch a discovery document from a domain over HTTPS. * @param {string} domain diff --git a/javascript/src/dns.js b/javascript/src/dns.js new file mode 100644 index 0000000..5a0b654 --- /dev/null +++ b/javascript/src/dns.js @@ -0,0 +1,173 @@ +/** + * DNS TXT cross-verification at `_agentpin.{domain}` (v0.3.0). + * + * Mirrors the Rust `agentpin::dns` module. The wire format is + * + * _agentpin.example.com. 3600 IN TXT "v=agentpin1; kid=acme-2026-04; fp=sha256:a1b2c3..." + * + * Semantics: + * - Absent record -> no effect (DNS TXT is purely additive) + * - Present matching -> verification succeeds + * - Present mismatching / malformed -> hard failure (DISCOVERY_INVALID) + * + * Mismatch is fail-closed because a publisher who *intentionally* published a + * TXT record has signaled DNS is part of their trust chain. + */ + +import { jwkThumbprint } from './jwk.js'; +import { AgentPinError, ErrorCode } from './types.js'; + +const VERSION = 'agentpin1'; +const FP_PREFIX = 'sha256:'; + +/** + * Parse a raw TXT record value (e.g. `"v=agentpin1; kid=acme-2026-04; fp=sha256:..."`). + * + * Whitespace around `;` and `=` is tolerated. Field order is not significant. + * Throws on missing `v`/`fp`, unknown version, or malformed fingerprint. + * Unknown fields are ignored for forward compatibility. + * + * @param {string} value + * @returns {{ version: string, kid: string|null, fingerprint: string }} + */ +export function parseTxtRecord(value) { + let version = null; + let kid = null; + let fp = null; + + for (const rawPart of value.split(';')) { + const part = rawPart.trim(); + if (part.length === 0) continue; + const eq = part.indexOf('='); + if (eq === -1) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + `DNS TXT field missing '=': ${part}` + ); + } + const k = part.slice(0, eq).trim().toLowerCase(); + const v = part.slice(eq + 1).trim(); + switch (k) { + case 'v': + version = v; + break; + case 'kid': + kid = v; + break; + case 'fp': + fp = v.toLowerCase(); + break; + default: + // Forward-compat: ignore unknown fields. + break; + } + } + + if (version === null) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + 'DNS TXT record missing required \'v\' field' + ); + } + if (version !== VERSION) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + `DNS TXT unsupported version: ${version}` + ); + } + if (fp === null) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + 'DNS TXT record missing required \'fp\' field' + ); + } + if (!fp.startsWith(FP_PREFIX)) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + `DNS TXT 'fp' must be sha256:: ${fp}` + ); + } + + return { version, kid, fingerprint: fp }; +} + +/** + * Cross-check a parsed TXT record's fingerprint against a discovery document. + * + * Returns nothing on success. Throws `AgentPinError(DISCOVERY_INVALID)` when no + * key in `discovery.public_keys` matches `txt.fingerprint` (and `txt.kid` when + * present). + * + * @param {object} discovery + * @param {{ kid: string|null, fingerprint: string }} txt + */ +export function verifyDnsMatch(discovery, txt) { + const target = txt.fingerprint.toLowerCase(); + for (const jwk of discovery.public_keys || []) { + let computed = jwkThumbprint(jwk).toLowerCase(); + if (!computed.startsWith(FP_PREFIX)) { + computed = `${FP_PREFIX}${computed}`; + } + if (computed !== target) continue; + if (txt.kid && jwk.kid !== txt.kid) continue; + return; + } + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + `DNS TXT fingerprint ${target} does not match any key in the discovery document` + ); +} + +/** + * Build the lookup name for a domain: `_agentpin.{domain}` with any trailing + * dot stripped. + * + * @param {string} domain + * @returns {string} + */ +export function txtRecordName(domain) { + return `_agentpin.${domain.replace(/\.+$/, '')}`; +} + +/** + * Fetch and parse the `_agentpin.{domain}` TXT record using Node's built-in + * `dns/promises`. + * + * Returns: + * - `null` when no `_agentpin` TXT record exists (or DNS NODATA/NOTFOUND) + * - parsed record when present + * Throws `AgentPinError(DISCOVERY_INVALID)` when the record exists but is + * malformed, or `AgentPinError(DISCOVERY_FETCH_FAILED)` for other DNS errors. + * + * When multiple TXT records exist at the same name, the first whose value + * contains `v=agentpin1` is used. Multiple chunks per record are joined per + * RFC 1464. + * + * @param {string} domain + * @returns {Promise<{ version: string, kid: string|null, fingerprint: string }|null>} + */ +export async function fetchDnsTxt(domain) { + const { resolveTxt } = await import('dns/promises'); + const name = txtRecordName(domain); + + let records; + try { + records = await resolveTxt(name); + } catch (err) { + if (err && (err.code === 'ENODATA' || err.code === 'ENOTFOUND')) { + return null; + } + throw new AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + `DNS TXT lookup failed for ${name}: ${err.message || err}` + ); + } + + for (const chunks of records) { + const joined = chunks.join(''); + if (joined.includes('v=agentpin1')) { + return parseTxtRecord(joined); + } + } + return null; +} diff --git a/javascript/src/index.js b/javascript/src/index.js index 74fd78a..56bf5e8 100644 --- a/javascript/src/index.js +++ b/javascript/src/index.js @@ -41,6 +41,8 @@ export { Capability, capabilitiesSubset, capabilitiesHash, + validateCapability, + CORE_ACTIONS, } from './capability.js'; export { @@ -60,6 +62,7 @@ export { findKeyByKid, findAgentById, fetchDiscoveryDocument, + AllowedDomains, } from './discovery.js'; export { @@ -88,6 +91,7 @@ export { createChallenge, createResponse, verifyResponse, + verifyResponseWithNonceStore, } from './mutual.js'; export { @@ -103,4 +107,53 @@ export { verifyCredentialWithBundle, } from './bundle.js'; -export const version = '0.2.0'; +export { + httpExtractCredential, + httpFormatAuthorizationHeader, + mcpExtractCredential, + mcpFormatMetaField, + wsExtractCredential, + wsFormatAuthMessage, + GRPC_METADATA_KEY, + grpcExtractCredential, + grpcFormatMetadataValue, +} from './transport.js'; + +export { + prepareRotation, + applyRotation, + completeRotation, +} from './rotation.js'; + +export { + InMemoryNonceStore, +} from './nonce.js'; + +export { + capabilityToSkill, + buildUnsignedAgentCard, + signAgentCard, + buildAndSignAgentCard, + verifyAgentpinExtension, + extensionKeyThumbprint, + canonicalizeForSigning, +} from './a2a.js'; + +export { + LocalAgentCardStore, + cardEndpointHost, + deriveDiscoveryFromCard, +} from './resolverLocal.js'; + +export { + A2aAgentCardResolver, +} from './resolverA2a.js'; + +export { + parseTxtRecord, + verifyDnsMatch, + txtRecordName, + fetchDnsTxt, +} from './dns.js'; + +export const version = '0.3.0'; diff --git a/javascript/src/mutual.js b/javascript/src/mutual.js index 32cc37c..70dd738 100644 --- a/javascript/src/mutual.js +++ b/javascript/src/mutual.js @@ -5,7 +5,7 @@ import { randomBytes } from 'crypto'; import { signData, verifySignature } from './crypto.js'; -const NONCE_EXPIRY_SECS = 60; +export const NONCE_EXPIRY_SECS = 60; /** * Base64url encode bytes (no padding). @@ -85,3 +85,21 @@ export function verifyResponse(response, challenge, publicKeyPem) { // Verify signature over the nonce return verifySignature(publicKeyPem, Buffer.from(challenge.nonce), response.signature); } + +/** + * Verify a challenge response with optional nonce deduplication. + * @param {object} response + * @param {object} challenge + * @param {string} publicKeyPem + * @param {import('./nonce.js').InMemoryNonceStore|null} nonceStore - Optional nonce store for replay prevention + * @returns {boolean} + * @throws {Error} if nonce has been replayed or expired + */ +export function verifyResponseWithNonceStore(response, challenge, publicKeyPem, nonceStore = null) { + if (nonceStore) { + if (!nonceStore.checkAndRecord(response.nonce, NONCE_EXPIRY_SECS * 1000)) { + throw new Error(`Nonce '${response.nonce}' has already been used (replay attack)`); + } + } + return verifyResponse(response, challenge, publicKeyPem); +} diff --git a/javascript/src/nonce.js b/javascript/src/nonce.js new file mode 100644 index 0000000..c339cdf --- /dev/null +++ b/javascript/src/nonce.js @@ -0,0 +1,30 @@ +/** + * Nonce deduplication for replay attack prevention. + */ + +/** + * In-memory nonce store that tracks seen nonces with TTL-based expiry. + */ +export class InMemoryNonceStore { + constructor() { + /** @type {Map} nonce -> expiry timestamp (ms) */ + this._entries = new Map(); + } + + /** + * Check if nonce is fresh. Returns true if fresh, false if replay. + * @param {string} nonce + * @param {number} ttlMs - TTL in milliseconds + * @returns {boolean} + */ + checkAndRecord(nonce, ttlMs) { + const now = Date.now(); + // Lazy cleanup + for (const [key, expiry] of this._entries) { + if (expiry <= now) this._entries.delete(key); + } + if (this._entries.has(nonce)) return false; + this._entries.set(nonce, now + ttlMs); + return true; + } +} diff --git a/javascript/src/resolverA2a.js b/javascript/src/resolverA2a.js new file mode 100644 index 0000000..d1e28fc --- /dev/null +++ b/javascript/src/resolverA2a.js @@ -0,0 +1,118 @@ +/** + * A2aAgentCardResolver (v0.3.0) — fetches A2A AgentCards over HTTPS. + * + * Mirrors the Rust `agentpin::resolver_a2a` module: + * 1. GET https://{domain}/.well-known/agent-card.json + * 2. Verify the AgentPin extension signature against its embedded JWK + * 3. Cross-check that the agentpin endpoint inside the card matches the + * fetched domain (defends against a card pointing at someone else's + * AgentPin discovery) + * 4. Derive a DiscoveryDocument so the rest of the AgentPin stack runs + * unchanged + */ + +import { verifyAgentpinExtension } from './a2a.js'; +import { cardEndpointHost, deriveDiscoveryFromCard } from './resolverLocal.js'; +import { AgentPinError, ErrorCode } from './types.js'; + +const AGENT_CARD_PATH = '/.well-known/agent-card.json'; +const DEFAULT_TIMEOUT_MS = 10000; + +/** + * Resolver that fetches an A2A AgentCard from a domain over HTTPS and + * exposes both the original card and the derived discovery document. + */ +export class A2aAgentCardResolver { + constructor({ timeoutMs = DEFAULT_TIMEOUT_MS, fetchImpl } = {}) { + this.timeoutMs = timeoutMs; + this._fetch = fetchImpl || globalThis.fetch; + this._lastCard = null; + this._lastDomain = null; + } + + /** + * Return the last successfully resolved AgentCard for a domain, or + * `null` when no card has been resolved for that domain yet. + */ + lastCard(domain) { + if (this._lastDomain !== domain) return null; + return this._lastCard; + } + + /** + * Fetch + verify the AgentCard at `https://{domain}/.well-known/agent-card.json` + * and return the derived discovery document. + * + * @param {string} domain + * @returns {Promise} + */ + async resolveDiscovery(domain) { + if (!this._fetch) { + throw new AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + 'No fetch implementation available — pass `fetchImpl` or run on Node >= 18' + ); + } + const url = `https://${domain}${AGENT_CARD_PATH}`; + const controller = typeof AbortController === 'function' ? new AbortController() : null; + const timer = controller + ? setTimeout(() => controller.abort(), this.timeoutMs) + : null; + + let response; + try { + response = await this._fetch(url, { + redirect: 'error', + headers: { Accept: 'application/json' }, + signal: controller ? controller.signal : undefined, + }); + } catch (err) { + throw new AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + `Failed to fetch ${url}: ${err.message || err}` + ); + } finally { + if (timer) clearTimeout(timer); + } + + if (!response.ok) { + throw new AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + `Failed to fetch ${url}: HTTP ${response.status}` + ); + } + + let card; + try { + card = await response.json(); + } catch (err) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + `Failed to parse AgentCard at ${url}: ${err.message || err}` + ); + } + + verifyAgentpinExtension(card); + + const endpointHost = cardEndpointHost(card); + if (endpointHost !== domain) { + throw new AgentPinError( + ErrorCode.DOMAIN_MISMATCH, + `AgentCard at ${domain} declares agentpin endpoint host ${endpointHost} (mismatch)` + ); + } + + const discovery = deriveDiscoveryFromCard(card); + this._lastCard = card; + this._lastDomain = domain; + return discovery; + } + + /** + * A2A AgentCards don't carry revocation data. Pair with a separate + * revocation resolver if revocation is required. Always returns `null`. + */ + async resolveRevocation(_domain, _discovery) { + return null; + } +} diff --git a/javascript/src/resolverLocal.js b/javascript/src/resolverLocal.js new file mode 100644 index 0000000..e61c8f0 --- /dev/null +++ b/javascript/src/resolverLocal.js @@ -0,0 +1,200 @@ +/** + * LocalAgentCardStore (v0.3.0) — in-memory A2A AgentCard store. + * + * Mirrors the Rust `agentpin::resolver_local` module. For agents that don't + * serve HTTP themselves (CLI tools, daemon processes, external agents pushed + * into a coordinator at registration time), the coordinator can keep their + * AgentCards in memory and look them up by domain without making network + * calls — supporting Symbiont's push-based external-agent registration flow. + */ + +import { verifyAgentpinExtension } from './a2a.js'; +import { AllowedDomains } from './discovery.js'; +import { AgentStatus, EntityType, AgentPinError, ErrorCode } from './types.js'; + +/** + * Derive the host portion of an AgentCard's agentpin endpoint URL. + * + * `https://example.com/.well-known/agent-identity.json` -> `example.com`. + * + * @param {object} card + * @returns {string} + */ +export function cardEndpointHost(card) { + const ext = card && card.agentpin; + if (!ext) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + 'AgentCard has no agentpin extension' + ); + } + let url; + try { + url = new URL(ext.agentpin_endpoint); + } catch (err) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + `Invalid agentpin_endpoint URL: ${err.message || err}` + ); + } + if (!url.hostname) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + 'agentpin_endpoint URL has no host' + ); + } + return url.hostname; +} + +function slug(input) { + return input + .split('') + .map((c) => (/[A-Za-z0-9]/.test(c) ? c.toLowerCase() : '-')) + .join('') + .replace(/^-+|-+$/g, ''); +} + +/** + * Derive a minimal `DiscoveryDocument` from a signed A2A AgentCard. + * + * Mirrors `derive_discovery_from_card` in the Rust SDK — the card's + * public-key JWK becomes the sole `public_keys` entry; the card's + * name/description/version/skills become a single `AgentDeclaration` so the + * rest of the AgentPin verification stack (TOFU pinning, revocation, + * capability validation) runs against AgentCards unchanged. + * + * @param {object} card + * @returns {object} + */ +export function deriveDiscoveryFromCard(card) { + const ext = card.agentpin; + if (!ext) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + 'AgentCard has no agentpin extension' + ); + } + const domain = cardEndpointHost(card); + + const capabilities = (card.skills || []).map((s) => s.id); + const allowedDomains = (card.capabilities && card.capabilities.allowed_domains) || []; + const constraints = AllowedDomains.isUnrestricted(allowedDomains) + ? null + : { allowed_domains: [...allowedDomains] }; + + const agentId = `urn:agentpin:${domain}:${slug(card.name)}`; + const agent = { + agent_id: agentId, + name: card.name, + capabilities, + status: AgentStatus.ACTIVE, + }; + if (card.description !== undefined && card.description !== null) { + agent.description = card.description; + } + if (card.version !== undefined && card.version !== null) { + agent.version = card.version; + } + if (constraints) { + agent.constraints = constraints; + } + + return { + agentpin_version: '0.3', + entity: domain, + entity_type: EntityType.BOTH, + public_keys: [ext.public_key_jwk], + agents: [agent], + a2a_endpoint: ext.agentpin_endpoint, + max_delegation_depth: 0, + updated_at: new Date().toISOString(), + }; +} + +/** + * In-memory store of pre-registered A2A AgentCards keyed by their AgentPin + * discovery domain. + * + * Cards are added via `register(card)` (after the extension signature is + * verified) and looked up via `resolveDiscovery(domain)`. Pair with a + * `ChainResolver` (or wrap manually) to fall back to HTTP for unregistered + * domains. + */ +export class LocalAgentCardStore { + constructor() { + this._cards = new Map(); + this._docs = new Map(); + } + + /** + * Register an AgentCard. Verifies the extension signature before storing. + * Re-registering an existing domain replaces the prior entry — useful for + * handling key rotations. + * + * @param {object} card + */ + register(card) { + verifyAgentpinExtension(card); + const domain = cardEndpointHost(card); + const discovery = deriveDiscoveryFromCard(card); + this._cards.set(domain, card); + this._docs.set(domain, discovery); + } + + /** Number of registered AgentCards. */ + get size() { + return this._cards.size; + } + + /** `true` when no AgentCards are registered. */ + isEmpty() { + return this._cards.size === 0; + } + + /** + * Return the raw AgentCard for a domain, or `null` when none registered. + * @param {string} domain + * @returns {object|null} + */ + resolveCard(domain) { + return this._cards.get(domain) || null; + } + + /** + * Resolve the derived discovery document for a domain. + * + * Throws `AgentPinError(DISCOVERY_INVALID)` when the domain isn't + * registered. + * + * @param {string} domain + * @returns {object} + */ + resolveDiscovery(domain) { + const doc = this._docs.get(domain); + if (!doc) { + throw new AgentPinError( + ErrorCode.DISCOVERY_INVALID, + `Domain '${domain}' not in LocalAgentCardStore` + ); + } + return doc; + } + + /** + * The store doesn't carry revocation data — pair with an HTTP or file + * resolver for revocation. Always returns `null`. + */ + resolveRevocation(_domain, _discovery) { + return null; + } + + /** + * Drop a registered AgentCard. Returns `true` when one was removed. + * @param {string} domain + */ + remove(domain) { + const had = this._cards.delete(domain); + this._docs.delete(domain); + return had; + } +} diff --git a/javascript/src/rotation.js b/javascript/src/rotation.js new file mode 100644 index 0000000..6941a4a --- /dev/null +++ b/javascript/src/rotation.js @@ -0,0 +1,42 @@ +/** + * Key rotation helpers for AgentPin. + */ + +import { generateKeyPair, generateKeyId } from './crypto.js'; +import { pemToJwk } from './jwk.js'; +import { addRevokedKey } from './revocation.js'; + +/** + * Prepare a key rotation by generating a new keypair. + * @param {string} oldKid - The key ID of the key being rotated out + * @returns {{ newKeyPair: { privateKeyPem: string, publicKeyPem: string }, newKid: string, newJwk: object, oldKid: string }} + */ +export function prepareRotation(oldKid) { + const newKeyPair = generateKeyPair(); + const newKid = generateKeyId(newKeyPair.publicKeyPem); + const newJwk = pemToJwk(newKeyPair.publicKeyPem, newKid); + return { newKeyPair, newKid, newJwk, oldKid }; +} + +/** + * Apply a rotation plan by adding the new key to a discovery document. + * @param {object} doc - Discovery document + * @param {{ newJwk: object }} plan - Rotation plan from prepareRotation + */ +export function applyRotation(doc, plan) { + doc.public_keys.push(plan.newJwk); + doc.updated_at = new Date().toISOString(); +} + +/** + * Complete a rotation by removing the old key and adding it to the revocation document. + * @param {object} doc - Discovery document + * @param {object} revocationDoc - Revocation document + * @param {string} oldKid - The key ID being retired + * @param {string} reason - Revocation reason + */ +export function completeRotation(doc, revocationDoc, oldKid, reason) { + doc.public_keys = doc.public_keys.filter(k => k.kid !== oldKid); + doc.updated_at = new Date().toISOString(); + addRevokedKey(revocationDoc, oldKid, reason); +} diff --git a/javascript/src/transport.js b/javascript/src/transport.js new file mode 100644 index 0000000..dbd6932 --- /dev/null +++ b/javascript/src/transport.js @@ -0,0 +1,124 @@ +/** + * Transport binding helpers for AgentPin (spec Section 13). + */ + +import { AgentPinError, ErrorCode } from './types.js'; + +// --- HTTP --- +const HTTP_PREFIX = 'AgentPin '; + +/** + * Extract a credential JWT from an HTTP Authorization header value. + * @param {string} headerValue + * @returns {string} The JWT credential + * @throws {AgentPinError} if the header is malformed + */ +export function httpExtractCredential(headerValue) { + if (!headerValue.startsWith(HTTP_PREFIX)) { + throw new AgentPinError(ErrorCode.DISCOVERY_FETCH_FAILED, 'Missing \'AgentPin \' prefix in Authorization header'); + } + const jwt = headerValue.slice(HTTP_PREFIX.length); + if (!jwt) { + throw new AgentPinError(ErrorCode.DISCOVERY_FETCH_FAILED, 'Empty credential in Authorization header'); + } + return jwt; +} + +/** + * Format a JWT into an HTTP Authorization header value. + * @param {string} jwt + * @returns {string} + */ +export function httpFormatAuthorizationHeader(jwt) { + return `AgentPin ${jwt}`; +} + +// --- MCP --- +const MCP_FIELD = 'agentpin_credential'; + +/** + * Extract a credential JWT from MCP metadata. + * @param {object} meta + * @returns {string} The JWT credential + * @throws {AgentPinError} if the field is missing or not a string + */ +export function mcpExtractCredential(meta) { + if (!(MCP_FIELD in meta)) { + throw new AgentPinError(ErrorCode.DISCOVERY_FETCH_FAILED, `Missing '${MCP_FIELD}' field in MCP metadata`); + } + const value = meta[MCP_FIELD]; + if (typeof value !== 'string') { + throw new AgentPinError(ErrorCode.DISCOVERY_FETCH_FAILED, `'${MCP_FIELD}' field is not a string`); + } + return value; +} + +/** + * Format a JWT into an MCP metadata field. + * @param {string} jwt + * @returns {object} + */ +export function mcpFormatMetaField(jwt) { + return { [MCP_FIELD]: jwt }; +} + +// --- WebSocket --- +const WS_AUTH_TYPE = 'agentpin-auth'; + +/** + * Extract a credential JWT from a WebSocket auth message (JSON string). + * @param {string} message - JSON-encoded message + * @returns {string} The JWT credential + * @throws {AgentPinError} if the message is malformed + */ +export function wsExtractCredential(message) { + let parsed; + try { + parsed = JSON.parse(message); + } catch (e) { + throw new AgentPinError(ErrorCode.DISCOVERY_FETCH_FAILED, `Invalid JSON: ${e.message}`); + } + if (parsed.type !== WS_AUTH_TYPE) { + throw new AgentPinError(ErrorCode.DISCOVERY_FETCH_FAILED, `Expected type '${WS_AUTH_TYPE}', got '${parsed.type}'`); + } + if (typeof parsed.credential !== 'string') { + throw new AgentPinError(ErrorCode.DISCOVERY_FETCH_FAILED, 'Missing or non-string \'credential\' field'); + } + return parsed.credential; +} + +/** + * Format a JWT into a WebSocket auth message (JSON string). + * @param {string} jwt + * @returns {string} JSON-encoded message + */ +export function wsFormatAuthMessage(jwt) { + return JSON.stringify({ type: WS_AUTH_TYPE, credential: jwt }); +} + +// --- gRPC --- + +/** The gRPC metadata key for AgentPin credentials. */ +export const GRPC_METADATA_KEY = 'agentpin-credential'; + +/** + * Extract a credential JWT from a gRPC metadata value. + * @param {string} metadataValue + * @returns {string} The JWT credential + * @throws {AgentPinError} if the value is empty + */ +export function grpcExtractCredential(metadataValue) { + if (!metadataValue) { + throw new AgentPinError(ErrorCode.DISCOVERY_FETCH_FAILED, 'Empty gRPC metadata value'); + } + return metadataValue; +} + +/** + * Format a JWT into a gRPC metadata value. + * @param {string} jwt + * @returns {string} + */ +export function grpcFormatMetadataValue(jwt) { + return jwt; +} diff --git a/javascript/tests/a2a.test.js b/javascript/tests/a2a.test.js new file mode 100644 index 0000000..184705e --- /dev/null +++ b/javascript/tests/a2a.test.js @@ -0,0 +1,171 @@ +/** + * Tests for A2A AgentCard types, builder, and verification (v0.3.0). + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +import { + generateKeyPair, + generateKeyId, + buildUnsignedAgentCard, + signAgentCard, + buildAndSignAgentCard, + verifyAgentpinExtension, + capabilityToSkill, + extensionKeyThumbprint, + canonicalizeForSigning, + jwkThumbprint, + AgentStatus, +} from '../src/index.js'; + +function declarationWith(capabilities, allowedDomains = null) { + const decl = { + agent_id: 'urn:agentpin:example.com:test', + name: 'Test Agent', + description: 'test', + version: '1.0.0', + capabilities, + status: AgentStatus.ACTIVE, + credential_ttl_max: 3600, + }; + if (allowedDomains !== null) { + decl.constraints = { allowed_domains: allowedDomains }; + } + return decl; +} + +describe('capabilityToSkill', () => { + it('maps a string capability to a skill', () => { + const skill = capabilityToSkill('read:customers/*'); + assert.equal(skill.id, 'read:customers/*'); + assert.equal(skill.name, 'read:customers/*'); + assert.equal(skill.description, undefined); + }); +}); + +describe('buildUnsignedAgentCard', () => { + it('maps capabilities to skills 1:1', () => { + const decl = declarationWith(['read:customers', 'write:invoices']); + const card = buildUnsignedAgentCard('https://example.com/agent', decl); + assert.equal(card.skills.length, 2); + assert.equal(card.skills[0].id, 'read:customers'); + assert.equal(card.skills[1].id, 'write:invoices'); + assert.equal(card.agentpin, undefined); + }); + + it('maps allowed_domains constraint into capabilities', () => { + const decl = declarationWith(['read:*'], ['a.com', 'b.com']); + const card = buildUnsignedAgentCard('https://example.com/agent', decl); + assert.deepEqual(card.capabilities.allowed_domains, ['a.com', 'b.com']); + }); + + it('omits allowed_domains when unrestricted', () => { + const decl = declarationWith(['read:*']); + const card = buildUnsignedAgentCard('https://example.com/agent', decl); + assert.equal(card.capabilities.allowed_domains, undefined); + }); + + it('honours skill overrides', () => { + const decl = declarationWith(['read:*']); + const card = buildUnsignedAgentCard('https://example.com/agent', decl, { + skills: [{ id: 'read:*', name: 'Read everything', description: 'desc' }], + }); + assert.equal(card.skills[0].description, 'desc'); + }); +}); + +describe('signAgentCard', () => { + it('requires agentpinEndpoint', () => { + const { privateKeyPem } = generateKeyPair(); + const decl = declarationWith(['read:*']); + const unsigned = buildUnsignedAgentCard('https://example.com/agent', decl); + assert.throws(() => signAgentCard(unsigned, privateKeyPem, 'kid-1', '')); + }); + + it('produces a card that verifies cleanly', () => { + const { privateKeyPem } = generateKeyPair(); + const decl = declarationWith(['read:customers', 'write:invoices'], ['partner.com']); + const card = buildAndSignAgentCard( + 'https://example.com/agent', + decl, + privateKeyPem, + 'kid-1', + 'https://example.com/.well-known/agent-identity.json', + { streaming: true } + ); + assert.ok(card.agentpin); + verifyAgentpinExtension(card); + }); + + it('roundtrips through JSON and re-verifies', () => { + const { privateKeyPem } = generateKeyPair(); + const decl = declarationWith(['read:*']); + const card = buildAndSignAgentCard( + 'https://example.com/agent', + decl, + privateKeyPem, + 'kid-1', + 'https://example.com/.well-known/agent-identity.json' + ); + const json = JSON.stringify(card); + const parsed = JSON.parse(json); + verifyAgentpinExtension(parsed); + }); +}); + +describe('verifyAgentpinExtension', () => { + it('throws when extension is missing', () => { + const decl = declarationWith(['read:*']); + const card = buildUnsignedAgentCard('https://example.com/agent', decl); + assert.throws(() => verifyAgentpinExtension(card), /no agentpin extension/); + }); + + it('throws when card has been tampered with', () => { + const { privateKeyPem } = generateKeyPair(); + const decl = declarationWith(['read:customers']); + const card = buildAndSignAgentCard( + 'https://example.com/agent', + decl, + privateKeyPem, + 'kid-1', + 'https://example.com/.well-known/agent-identity.json' + ); + card.url = 'https://attacker.example/agent'; + assert.throws(() => verifyAgentpinExtension(card), /did not verify/); + }); +}); + +describe('extensionKeyThumbprint', () => { + it('matches jwkThumbprint over the extension JWK', () => { + const { privateKeyPem, publicKeyPem } = generateKeyPair(); + const decl = declarationWith(['read:*']); + const card = buildAndSignAgentCard( + 'https://example.com/agent', + decl, + privateKeyPem, + 'kid-1', + 'https://example.com/.well-known/agent-identity.json' + ); + const fromHelper = extensionKeyThumbprint(card.agentpin); + const direct = jwkThumbprint(card.agentpin.public_key_jwk); + assert.equal(fromHelper, direct); + }); +}); + +describe('canonicalizeForSigning', () => { + it('produces sorted-key compact JSON', () => { + const out = canonicalizeForSigning({ b: 1, a: { d: 4, c: 3 } }); + assert.equal(out, '{"a":{"c":3,"d":4},"b":1}'); + }); + + it('drops undefined values', () => { + const out = canonicalizeForSigning({ a: 1, b: undefined, c: 3 }); + assert.equal(out, '{"a":1,"c":3}'); + }); + + it('recurses into arrays', () => { + const out = canonicalizeForSigning([{ b: 1, a: 2 }]); + assert.equal(out, '[{"a":2,"b":1}]'); + }); +}); diff --git a/javascript/tests/allowedDomains.test.js b/javascript/tests/allowedDomains.test.js new file mode 100644 index 0000000..f620366 --- /dev/null +++ b/javascript/tests/allowedDomains.test.js @@ -0,0 +1,79 @@ +/** + * Tests for AllowedDomains helpers + a2a_endpoint discovery field (v0.3.0). + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +import { + AllowedDomains, + EntityType, + buildDiscoveryDocument, +} from '../src/index.js'; + +describe('AllowedDomains', () => { + it('unrestricted accepts anything', () => { + const ad = AllowedDomains.unrestricted(); + assert.ok(AllowedDomains.isUnrestricted(ad)); + assert.ok(AllowedDomains.allows(ad, 'anything.com')); + }); + + it('restricted filters', () => { + const ad = AllowedDomains.fromDomains(['a.com', 'b.com']); + assert.ok(!AllowedDomains.isUnrestricted(ad)); + assert.ok(AllowedDomains.allows(ad, 'a.com')); + assert.ok(!AllowedDomains.allows(ad, 'c.com')); + }); + + it('intersects with unrestricted to return the other', () => { + const unrestricted = AllowedDomains.unrestricted(); + const restricted = AllowedDomains.fromDomains(['a.com', 'b.com']); + assert.deepEqual(AllowedDomains.intersect(unrestricted, restricted), restricted); + assert.deepEqual(AllowedDomains.intersect(restricted, unrestricted), restricted); + }); + + it('intersect returns overlap', () => { + const lhs = AllowedDomains.fromDomains(['a.com', 'b.com', 'c.com']); + const rhs = AllowedDomains.fromDomains(['b.com', 'c.com', 'd.com']); + assert.deepEqual(AllowedDomains.intersect(lhs, rhs), ['b.com', 'c.com']); + }); + + it('fromConstraints extracts existing list', () => { + assert.deepEqual( + AllowedDomains.fromConstraints({ allowed_domains: ['a.com'] }), + ['a.com'] + ); + }); + + it('fromConstraints returns unrestricted for missing fields', () => { + assert.ok(AllowedDomains.isUnrestricted(AllowedDomains.fromConstraints({}))); + assert.ok(AllowedDomains.isUnrestricted(AllowedDomains.fromConstraints(null))); + }); +}); + +describe('buildDiscoveryDocument with a2a_endpoint', () => { + it('includes a2a_endpoint when provided', () => { + const doc = buildDiscoveryDocument( + 'example.com', + EntityType.MAKER, + [{ kid: 'k', kty: 'EC', crv: 'P-256', x: 'x', y: 'y' }], + [], + 2, + '2026-05-01T00:00:00Z', + { a2aEndpoint: 'https://example.com/.well-known/agent-card.json' } + ); + assert.equal(doc.a2a_endpoint, 'https://example.com/.well-known/agent-card.json'); + }); + + it('omits a2a_endpoint when not provided', () => { + const doc = buildDiscoveryDocument( + 'example.com', + EntityType.MAKER, + [{ kid: 'k', kty: 'EC', crv: 'P-256', x: 'x', y: 'y' }], + [], + 2, + '2026-05-01T00:00:00Z' + ); + assert.equal(doc.a2a_endpoint, undefined); + }); +}); diff --git a/javascript/tests/capability.test.js b/javascript/tests/capability.test.js index 7b8fc65..28e99ad 100644 --- a/javascript/tests/capability.test.js +++ b/javascript/tests/capability.test.js @@ -4,7 +4,7 @@ import { test, describe } from 'node:test'; import assert from 'node:assert'; -import { Capability, capabilitiesSubset, capabilitiesHash } from '../src/capability.js'; +import { Capability, capabilitiesSubset, capabilitiesHash, validateCapability, CORE_ACTIONS } from '../src/capability.js'; describe('Capability.parse', () => { test('parses action and resource', () => { @@ -64,3 +64,51 @@ describe('capabilitiesHash', () => { assert.strictEqual(capabilitiesHash(caps1), capabilitiesHash(caps2)); }); }); + +describe('CORE_ACTIONS', () => { + test('contains expected actions', () => { + assert.deepStrictEqual(CORE_ACTIONS, ['read', 'write', 'execute', 'admin', 'delegate']); + }); +}); + +describe('validateCapability', () => { + test('accepts core actions', () => { + assert.doesNotThrow(() => validateCapability(new Capability('read:codebase'))); + assert.doesNotThrow(() => validateCapability(new Capability('write:report'))); + assert.doesNotThrow(() => validateCapability(new Capability('execute:task'))); + assert.doesNotThrow(() => validateCapability(new Capability('delegate:sub'))); + }); + + test('accepts admin with scoped resource', () => { + assert.doesNotThrow(() => validateCapability(new Capability('admin:users'))); + }); + + test('rejects admin:* wildcard', () => { + assert.throws( + () => validateCapability(new Capability('admin:*')), + /admin:\* wildcard is not allowed/ + ); + }); + + test('accepts reverse-domain custom action', () => { + assert.doesNotThrow(() => validateCapability(new Capability('com.example.deploy:staging'))); + }); + + test('rejects custom action without reverse-domain', () => { + assert.throws( + () => validateCapability(new Capability('deploy:staging')), + /must use reverse-domain prefix/ + ); + }); + + test('rejects missing colon', () => { + assert.throws( + () => validateCapability(new Capability('readcodebase')), + /Invalid capability format/ + ); + }); + + test('accepts read:* wildcard', () => { + assert.doesNotThrow(() => validateCapability(new Capability('read:*'))); + }); +}); diff --git a/javascript/tests/dns.test.js b/javascript/tests/dns.test.js new file mode 100644 index 0000000..5c4b1b4 --- /dev/null +++ b/javascript/tests/dns.test.js @@ -0,0 +1,136 @@ +/** + * Tests for DNS TXT cross-verification (v0.3.0). + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +import { + generateKeyPair, + pemToJwk, + jwkThumbprint, + parseTxtRecord, + verifyDnsMatch, + txtRecordName, + EntityType, +} from '../src/index.js'; + +function makeDiscovery(jwks) { + return { + agentpin_version: '0.3', + entity: 'example.com', + entity_type: EntityType.MAKER, + public_keys: jwks, + agents: [], + max_delegation_depth: 0, + updated_at: '2026-05-01T00:00:00Z', + }; +} + +describe('parseTxtRecord', () => { + it('parses a full record', () => { + const r = parseTxtRecord('v=agentpin1; kid=acme-2026-04; fp=sha256:abcd1234'); + assert.equal(r.version, 'agentpin1'); + assert.equal(r.kid, 'acme-2026-04'); + assert.equal(r.fingerprint, 'sha256:abcd1234'); + }); + + it('parses a minimal record', () => { + const r = parseTxtRecord('v=agentpin1;fp=sha256:abc'); + assert.equal(r.version, 'agentpin1'); + assert.equal(r.kid, null); + assert.equal(r.fingerprint, 'sha256:abc'); + }); + + it('lowercases the fingerprint', () => { + const r = parseTxtRecord('v=agentpin1; fp=SHA256:ABCDEF'); + assert.equal(r.fingerprint, 'sha256:abcdef'); + }); + + it('tolerates whitespace and unsorted fields', () => { + const r = parseTxtRecord(' fp = sha256:beef ; v = agentpin1 '); + assert.equal(r.version, 'agentpin1'); + assert.equal(r.fingerprint, 'sha256:beef'); + }); + + it('ignores unknown fields for forward compat', () => { + const r = parseTxtRecord('v=agentpin1; fp=sha256:abc; future=ignoreme'); + assert.equal(r.fingerprint, 'sha256:abc'); + }); + + it('rejects missing v', () => { + assert.throws(() => parseTxtRecord('fp=sha256:abc')); + }); + + it('rejects missing fp', () => { + assert.throws(() => parseTxtRecord('v=agentpin1')); + }); + + it('rejects unsupported version', () => { + assert.throws(() => parseTxtRecord('v=agentpin99; fp=sha256:abc')); + }); + + it('rejects fingerprint without sha256: prefix', () => { + assert.throws(() => parseTxtRecord('v=agentpin1; fp=abc')); + }); + + it('rejects field without =', () => { + assert.throws(() => parseTxtRecord('v=agentpin1; broken')); + }); + + it('rejects SchemaPin-format records', () => { + // Sanity check: must not accidentally accept SchemaPin's TXT format. + assert.throws(() => parseTxtRecord('v=schemapin1; fp=sha256:abc')); + }); +}); + +describe('verifyDnsMatch', () => { + function fingerprintForKey(jwk) { + const t = jwkThumbprint(jwk).toLowerCase(); + return t.startsWith('sha256:') ? t : `sha256:${t}`; + } + + it('matches when TXT fp equals the key thumbprint', () => { + const { publicKeyPem } = generateKeyPair(); + const jwk = pemToJwk(publicKeyPem, 'kid-1'); + const doc = makeDiscovery([jwk]); + const txt = { kid: null, fingerprint: fingerprintForKey(jwk) }; + verifyDnsMatch(doc, txt); + }); + + it('matches one of multiple discovery keys', () => { + const { publicKeyPem: pk1 } = generateKeyPair(); + const { publicKeyPem: pk2 } = generateKeyPair(); + const jwk1 = pemToJwk(pk1, 'kid-a'); + const jwk2 = pemToJwk(pk2, 'kid-b'); + const doc = makeDiscovery([jwk1, jwk2]); + const txt = { kid: 'kid-b', fingerprint: fingerprintForKey(jwk2) }; + verifyDnsMatch(doc, txt); + }); + + it('fails when kid in TXT does not match the discovery key', () => { + const { publicKeyPem } = generateKeyPair(); + const jwk = pemToJwk(publicKeyPem, 'kid-real'); + const doc = makeDiscovery([jwk]); + const txt = { kid: 'kid-different', fingerprint: fingerprintForKey(jwk) }; + assert.throws(() => verifyDnsMatch(doc, txt)); + }); + + it('fails on fingerprint mismatch', () => { + const { publicKeyPem } = generateKeyPair(); + const jwk = pemToJwk(publicKeyPem, 'kid-1'); + const doc = makeDiscovery([jwk]); + const txt = { + kid: null, + fingerprint: 'sha256:0000000000000000000000000000000000000000000000000000000000000000', + }; + assert.throws(() => verifyDnsMatch(doc, txt)); + }); +}); + +describe('txtRecordName', () => { + it('strips trailing dot', () => { + assert.equal(txtRecordName('example.com'), '_agentpin.example.com'); + assert.equal(txtRecordName('example.com.'), '_agentpin.example.com'); + }); +}); diff --git a/javascript/tests/integration.test.js b/javascript/tests/integration.test.js new file mode 100644 index 0000000..c0eae4a --- /dev/null +++ b/javascript/tests/integration.test.js @@ -0,0 +1,299 @@ +/** + * End-to-end integration tests for AgentPin. + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +import { + generateKeyPair, + generateKeyId, + pemToJwk, + issueCredential, + decodeJwtUnverified, + verifyJwt, + verifyCredentialOffline, + defaultVerifierConfig, + buildDiscoveryDocument, + validateDiscoveryDocument, + findKeyByKid, + findAgentById, + buildRevocationDocument, + addRevokedKey, + checkRevocation, + KeyPinStore, + PinningResult, + checkPinning, + createChallenge, + createResponse, + verifyResponseWithNonceStore, + InMemoryNonceStore, + httpExtractCredential, + httpFormatAuthorizationHeader, + mcpExtractCredential, + mcpFormatMetaField, + wsExtractCredential, + wsFormatAuthMessage, + grpcExtractCredential, + grpcFormatMetadataValue, + prepareRotation, + applyRotation, + completeRotation, + AgentPinError, +} from '../src/index.js'; + +function makeTestSetup() { + const { privateKeyPem, publicKeyPem } = generateKeyPair(); + const kid = generateKeyId(publicKeyPem); + const jwk = pemToJwk(publicKeyPem, kid); + const agentId = 'urn:agentpin:example.com:test-agent'; + const doc = buildDiscoveryDocument( + 'example.com', + 'maker', + [jwk], + [ + { + agent_id: agentId, + name: 'Test Agent', + capabilities: ['read:*', 'write:report'], + status: 'active', + credential_ttl_max: 3600, + }, + ], + 2, + '2026-01-01T00:00:00Z' + ); + return { privateKeyPem, publicKeyPem, kid, agentId, doc }; +} + +describe('Maker-Deployer Flow', () => { + it('should issue, decode, verify, and validate a credential end-to-end', () => { + const { privateKeyPem, publicKeyPem, kid, agentId, doc } = makeTestSetup(); + + // Issue a credential + const jwtStr = issueCredential( + privateKeyPem, + kid, + 'example.com', + agentId, + 'verifier.com', + ['read:data', 'write:report'], + null, + null, + 3600 + ); + assert.ok(jwtStr); + assert.equal(jwtStr.split('.').length, 3); + + // Decode unverified to inspect + const { header, payload } = decodeJwtUnverified(jwtStr); + assert.equal(header.alg, 'ES256'); + assert.equal(header.typ, 'agentpin-credential+jwt'); + assert.equal(header.kid, kid); + assert.equal(payload.iss, 'example.com'); + assert.equal(payload.sub, agentId); + + // Verify signature + const verified = verifyJwt(jwtStr, publicKeyPem); + assert.equal(verified.header.kid, kid); + assert.equal(verified.payload.iss, 'example.com'); + + // Full offline verification + const pinStore = new KeyPinStore(); + const config = defaultVerifierConfig(); + const result = verifyCredentialOffline( + jwtStr, + doc, + null, + pinStore, + 'verifier.com', + config + ); + assert.ok(result.valid, `Expected valid, got: ${result.error_message}`); + assert.equal(result.agent_id, agentId); + assert.equal(result.issuer, 'example.com'); + }); +}); + +describe('Revocation Flow', () => { + it('should detect a revoked key during verification', () => { + const { privateKeyPem, publicKeyPem, kid, agentId, doc } = makeTestSetup(); + + const jwtStr = issueCredential( + privateKeyPem, + kid, + 'example.com', + agentId, + null, + ['read:data'], + null, + null, + 3600 + ); + + const { header, payload } = decodeJwtUnverified(jwtStr); + + // Clean revocation: should pass + const revDoc = buildRevocationDocument('example.com'); + checkRevocation(revDoc, payload.jti, agentId, kid); // no error + + // Add revoked key + addRevokedKey(revDoc, kid, 'key_compromise'); + + // Now checkRevocation should fail + assert.throws( + () => checkRevocation(revDoc, payload.jti, agentId, kid), + AgentPinError + ); + + // Full offline verification should also fail + const pinStore = new KeyPinStore(); + const config = defaultVerifierConfig(); + const vresult = verifyCredentialOffline( + jwtStr, + doc, + revDoc, + pinStore, + null, + config + ); + assert.equal(vresult.valid, false); + }); +}); + +describe('Mutual Verification with Nonce Store', () => { + it('should prevent nonce replay', () => { + const { privateKeyPem, publicKeyPem } = generateKeyPair(); + + const store = new InMemoryNonceStore(); + const challenge = createChallenge(); + const response = createResponse(challenge, privateKeyPem, 'test-key'); + + // First verification should succeed + const valid = verifyResponseWithNonceStore( + response, + challenge, + publicKeyPem, + store + ); + assert.ok(valid); + + // Second verification with same nonce should fail (replay) + assert.throws( + () => + verifyResponseWithNonceStore( + response, + challenge, + publicKeyPem, + store + ), + /already been used/ + ); + }); +}); + +describe('Transport Roundtrip', () => { + it('should format and extract credentials across all transports', () => { + const { privateKeyPem, publicKeyPem } = generateKeyPair(); + const kid = generateKeyId(publicKeyPem); + + const jwtStr = issueCredential( + privateKeyPem, + kid, + 'example.com', + 'urn:agentpin:example.com:test-agent', + null, + ['read:data'], + null, + null, + 3600 + ); + + // HTTP roundtrip + const httpHeader = httpFormatAuthorizationHeader(jwtStr); + const httpExtracted = httpExtractCredential(httpHeader); + assert.equal(httpExtracted, jwtStr); + + // MCP roundtrip + const mcpMeta = mcpFormatMetaField(jwtStr); + const mcpExtracted = mcpExtractCredential(mcpMeta); + assert.equal(mcpExtracted, jwtStr); + + // WebSocket roundtrip + const wsMsg = wsFormatAuthMessage(jwtStr); + const wsExtracted = wsExtractCredential(wsMsg); + assert.equal(wsExtracted, jwtStr); + + // gRPC roundtrip + const grpcVal = grpcFormatMetadataValue(jwtStr); + const grpcExtracted = grpcExtractCredential(grpcVal); + assert.equal(grpcExtracted, jwtStr); + }); +}); + +describe('Key Rotation Lifecycle', () => { + it('should add new key, then remove old key and record revocation', () => { + const { publicKeyPem } = generateKeyPair(); + const oldKid = generateKeyId(publicKeyPem); + const oldJwk = pemToJwk(publicKeyPem, oldKid); + + const doc = buildDiscoveryDocument( + 'example.com', + 'maker', + [oldJwk], + [], + 2, + '2026-01-01T00:00:00Z' + ); + assert.equal(doc.public_keys.length, 1); + + // Prepare rotation + const plan = prepareRotation(oldKid); + assert.notEqual(plan.newKid, oldKid); + + // Apply rotation: both keys should be present + applyRotation(doc, plan); + assert.equal(doc.public_keys.length, 2); + const kids = doc.public_keys.map((k) => k.kid); + assert.ok(kids.includes(oldKid)); + assert.ok(kids.includes(plan.newKid)); + + // Complete rotation: old key removed, added to revocation + const revDoc = buildRevocationDocument('example.com'); + completeRotation(doc, revDoc, oldKid, 'superseded'); + + assert.equal(doc.public_keys.length, 1); + assert.equal(doc.public_keys[0].kid, plan.newKid); + assert.equal(revDoc.revoked_keys.length, 1); + assert.equal(revDoc.revoked_keys[0].kid, oldKid); + assert.equal(revDoc.revoked_keys[0].reason, 'superseded'); + }); +}); + +describe('Pinning Flow', () => { + it('should pin on first use, match on second, error on different key', () => { + const { publicKeyPem: pub1 } = generateKeyPair(); + const kid1 = generateKeyId(pub1); + const jwk1 = pemToJwk(pub1, kid1); + + const store = new KeyPinStore(); + + // First verification pins the key + const result1 = checkPinning(store, 'example.com', jwk1); + assert.equal(result1, PinningResult.FIRST_USE); + + // Same key succeeds + const result2 = checkPinning(store, 'example.com', jwk1); + assert.equal(result2, PinningResult.MATCHED); + + // Different key triggers error + const { publicKeyPem: pub2 } = generateKeyPair(); + const kid2 = generateKeyId(pub2); + const jwk2 = pemToJwk(pub2, kid2); + + assert.throws( + () => checkPinning(store, 'example.com', jwk2), + AgentPinError + ); + }); +}); diff --git a/javascript/tests/nonce.test.js b/javascript/tests/nonce.test.js new file mode 100644 index 0000000..18fb8c2 --- /dev/null +++ b/javascript/tests/nonce.test.js @@ -0,0 +1,72 @@ +/** + * Tests for nonce deduplication and replay prevention. + */ + +import { test, describe } from 'node:test'; +import assert from 'node:assert'; +import { InMemoryNonceStore } from '../src/nonce.js'; +import { generateKeyPair } from '../src/crypto.js'; +import { createChallenge, createResponse, verifyResponseWithNonceStore } from '../src/mutual.js'; + +describe('InMemoryNonceStore', () => { + test('accepts fresh nonce', () => { + const store = new InMemoryNonceStore(); + assert.strictEqual(store.checkAndRecord('nonce-1', 60000), true); + }); + + test('rejects duplicate nonce', () => { + const store = new InMemoryNonceStore(); + assert.strictEqual(store.checkAndRecord('nonce-1', 60000), true); + assert.strictEqual(store.checkAndRecord('nonce-1', 60000), false); + }); + + test('accepts different nonces', () => { + const store = new InMemoryNonceStore(); + assert.strictEqual(store.checkAndRecord('nonce-1', 60000), true); + assert.strictEqual(store.checkAndRecord('nonce-2', 60000), true); + }); + + test('expired nonces are cleaned up', () => { + const store = new InMemoryNonceStore(); + // Insert with 0ms TTL (already expired) + store._entries.set('old-nonce', Date.now() - 1); + // Triggering checkAndRecord should clean up the expired entry + assert.strictEqual(store.checkAndRecord('new-nonce', 60000), true); + assert.ok(!store._entries.has('old-nonce')); + }); +}); + +describe('verifyResponseWithNonceStore', () => { + test('passes with nonce store on first use', () => { + const kp = generateKeyPair(); + const store = new InMemoryNonceStore(); + const challenge = createChallenge(); + const response = createResponse(challenge, kp.privateKeyPem, 'test-key'); + const valid = verifyResponseWithNonceStore(response, challenge, kp.publicKeyPem, store); + assert.ok(valid); + }); + + test('rejects replay with nonce store', () => { + const kp = generateKeyPair(); + const store = new InMemoryNonceStore(); + const challenge = createChallenge(); + const response = createResponse(challenge, kp.privateKeyPem, 'test-key'); + + // First use succeeds + verifyResponseWithNonceStore(response, challenge, kp.publicKeyPem, store); + + // Second use (replay) throws + assert.throws( + () => verifyResponseWithNonceStore(response, challenge, kp.publicKeyPem, store), + /replay attack/ + ); + }); + + test('works without nonce store (null)', () => { + const kp = generateKeyPair(); + const challenge = createChallenge(); + const response = createResponse(challenge, kp.privateKeyPem, 'test-key'); + const valid = verifyResponseWithNonceStore(response, challenge, kp.publicKeyPem, null); + assert.ok(valid); + }); +}); diff --git a/javascript/tests/resolverA2a.test.js b/javascript/tests/resolverA2a.test.js new file mode 100644 index 0000000..c607ae8 --- /dev/null +++ b/javascript/tests/resolverA2a.test.js @@ -0,0 +1,106 @@ +/** + * Tests for A2aAgentCardResolver (v0.3.0). + * + * Uses a stub `fetchImpl` so we don't need a real HTTP server. + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +import { + generateKeyPair, + buildAndSignAgentCard, + A2aAgentCardResolver, + AgentStatus, +} from '../src/index.js'; + +function signedCardForDomain(domain) { + const { privateKeyPem } = generateKeyPair(); + const decl = { + agent_id: `urn:agentpin:${domain}:test`, + name: 'Test Agent', + description: 'desc', + version: '1.0.0', + capabilities: ['read:*'], + credential_ttl_max: 3600, + status: AgentStatus.ACTIVE, + }; + return buildAndSignAgentCard( + `https://${domain}/agent`, + decl, + privateKeyPem, + 'kid-1', + `https://${domain}/.well-known/agent-identity.json` + ); +} + +function stubFetch(responses) { + return async (url) => { + const entry = responses[url]; + if (!entry) { + throw new Error(`unexpected fetch: ${url}`); + } + if (entry.networkError) { + throw new Error(entry.networkError); + } + return { + ok: entry.status >= 200 && entry.status < 300, + status: entry.status, + async json() { + if (entry.body === undefined) throw new Error('no body'); + return entry.body; + }, + }; + }; +} + +describe('A2aAgentCardResolver', () => { + it('resolves and verifies a card served over HTTPS', async () => { + const card = signedCardForDomain('example.com'); + const fetchImpl = stubFetch({ + 'https://example.com/.well-known/agent-card.json': { status: 200, body: card }, + }); + const resolver = new A2aAgentCardResolver({ fetchImpl }); + const doc = await resolver.resolveDiscovery('example.com'); + assert.equal(doc.entity, 'example.com'); + assert.equal(doc.public_keys.length, 1); + assert.deepEqual(resolver.lastCard('example.com'), card); + }); + + it('rejects an HTTP error response', async () => { + const fetchImpl = stubFetch({ + 'https://example.com/.well-known/agent-card.json': { status: 404 }, + }); + const resolver = new A2aAgentCardResolver({ fetchImpl }); + await assert.rejects(() => resolver.resolveDiscovery('example.com'), /HTTP 404/); + }); + + it('rejects a card whose extension does not verify', async () => { + const card = signedCardForDomain('example.com'); + card.url = 'https://attacker.example/agent'; // tamper + const fetchImpl = stubFetch({ + 'https://example.com/.well-known/agent-card.json': { status: 200, body: card }, + }); + const resolver = new A2aAgentCardResolver({ fetchImpl }); + await assert.rejects(() => resolver.resolveDiscovery('example.com'), /did not verify/); + }); + + it('rejects a card whose agentpin endpoint host disagrees with the fetch domain', async () => { + const card = signedCardForDomain('other.com'); // valid for other.com + const fetchImpl = stubFetch({ + 'https://example.com/.well-known/agent-card.json': { status: 200, body: card }, + }); + const resolver = new A2aAgentCardResolver({ fetchImpl }); + await assert.rejects(() => resolver.resolveDiscovery('example.com'), /mismatch/); + }); + + it('resolveRevocation returns null', async () => { + const card = signedCardForDomain('example.com'); + const fetchImpl = stubFetch({ + 'https://example.com/.well-known/agent-card.json': { status: 200, body: card }, + }); + const resolver = new A2aAgentCardResolver({ fetchImpl }); + const doc = await resolver.resolveDiscovery('example.com'); + assert.equal(await resolver.resolveRevocation('example.com', doc), null); + }); +}); diff --git a/javascript/tests/resolverLocal.test.js b/javascript/tests/resolverLocal.test.js new file mode 100644 index 0000000..63bc5b9 --- /dev/null +++ b/javascript/tests/resolverLocal.test.js @@ -0,0 +1,124 @@ +/** + * Tests for LocalAgentCardStore (v0.3.0). + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +import { + generateKeyPair, + buildAndSignAgentCard, + LocalAgentCardStore, + cardEndpointHost, + deriveDiscoveryFromCard, + AgentStatus, +} from '../src/index.js'; + +function declaration() { + return { + agent_id: 'urn:agentpin:example.com:tester', + name: 'Tester', + description: 'Test agent', + version: '1.0.0', + capabilities: ['read:*'], + constraints: { allowed_domains: ['partner.com'] }, + credential_ttl_max: 3600, + status: AgentStatus.ACTIVE, + }; +} + +function signedCard() { + const { privateKeyPem } = generateKeyPair(); + return buildAndSignAgentCard( + 'https://example.com/agent', + declaration(), + privateKeyPem, + 'kid-1', + 'https://example.com/.well-known/agent-identity.json' + ); +} + +describe('cardEndpointHost', () => { + it('returns the host of the agentpin endpoint URL', () => { + const card = signedCard(); + assert.equal(cardEndpointHost(card), 'example.com'); + }); + + it('throws when the card has no agentpin extension', () => { + assert.throws(() => cardEndpointHost({ name: 'x' })); + }); +}); + +describe('deriveDiscoveryFromCard', () => { + it('produces a discovery document with the card key and skills', () => { + const card = signedCard(); + const doc = deriveDiscoveryFromCard(card); + assert.equal(doc.entity, 'example.com'); + assert.equal(doc.public_keys.length, 1); + assert.equal(doc.agents.length, 1); + assert.equal(doc.agents[0].name, 'Tester'); + assert.deepEqual(doc.agents[0].capabilities, ['read:*']); + assert.deepEqual(doc.agents[0].constraints.allowed_domains, ['partner.com']); + assert.equal(doc.a2a_endpoint, 'https://example.com/.well-known/agent-identity.json'); + }); +}); + +describe('LocalAgentCardStore', () => { + it('registers then resolves', () => { + const store = new LocalAgentCardStore(); + store.register(signedCard()); + assert.equal(store.size, 1); + const doc = store.resolveDiscovery('example.com'); + assert.equal(doc.entity, 'example.com'); + assert.equal(doc.agents[0].name, 'Tester'); + }); + + it('propagates signature failures from register()', () => { + const card = signedCard(); + card.url = 'https://attacker.example/agent'; // tampered + const store = new LocalAgentCardStore(); + assert.throws(() => store.register(card)); + assert.ok(store.isEmpty()); + }); + + it('resolveDiscovery throws for unknown domain', () => { + const store = new LocalAgentCardStore(); + assert.throws(() => store.resolveDiscovery('missing.com')); + }); + + it('re-registering replaces the prior entry', () => { + const store = new LocalAgentCardStore(); + store.register(signedCard()); + store.register(signedCard()); + assert.equal(store.size, 1); + }); + + it('remove() drops the entry', () => { + const store = new LocalAgentCardStore(); + store.register(signedCard()); + assert.equal(store.remove('example.com'), true); + assert.ok(store.isEmpty()); + assert.equal(store.remove('example.com'), false); + }); + + it('resolveCard returns the original card', () => { + const store = new LocalAgentCardStore(); + store.register(signedCard()); + const card = store.resolveCard('example.com'); + assert.equal(card.name, 'Tester'); + }); + + it('resolveRevocation returns null', () => { + const store = new LocalAgentCardStore(); + store.register(signedCard()); + const doc = store.resolveDiscovery('example.com'); + assert.equal(store.resolveRevocation('example.com', doc), null); + }); + + it('preserves allowed_domains into the derived discovery doc', () => { + const store = new LocalAgentCardStore(); + store.register(signedCard()); + const doc = store.resolveDiscovery('example.com'); + assert.deepEqual(doc.agents[0].constraints.allowed_domains, ['partner.com']); + }); +}); diff --git a/javascript/tests/rotation.test.js b/javascript/tests/rotation.test.js new file mode 100644 index 0000000..4edc7bc --- /dev/null +++ b/javascript/tests/rotation.test.js @@ -0,0 +1,53 @@ +/** + * Tests for key rotation helpers. + */ + +import { test, describe } from 'node:test'; +import assert from 'node:assert'; +import { prepareRotation, applyRotation, completeRotation } from '../src/rotation.js'; +import { buildRevocationDocument } from '../src/revocation.js'; + +describe('key rotation', () => { + test('prepareRotation generates new key and preserves oldKid', () => { + const plan = prepareRotation('old-key-1'); + assert.strictEqual(plan.oldKid, 'old-key-1'); + assert.ok(plan.newKid); + assert.ok(plan.newKeyPair.privateKeyPem); + assert.ok(plan.newKeyPair.publicKeyPem); + assert.strictEqual(plan.newJwk.kid, plan.newKid); + assert.strictEqual(plan.newJwk.kty, 'EC'); + assert.strictEqual(plan.newJwk.crv, 'P-256'); + }); + + test('applyRotation adds new key to discovery document', () => { + const doc = { + public_keys: [{ kid: 'old-key-1', kty: 'EC' }], + updated_at: '2024-01-01T00:00:00.000Z', + }; + const plan = prepareRotation('old-key-1'); + applyRotation(doc, plan); + assert.strictEqual(doc.public_keys.length, 2); + assert.strictEqual(doc.public_keys[1].kid, plan.newKid); + assert.notStrictEqual(doc.updated_at, '2024-01-01T00:00:00.000Z'); + }); + + test('completeRotation removes old key and adds revocation', () => { + const plan = prepareRotation('old-key-1'); + const doc = { + public_keys: [ + { kid: 'old-key-1', kty: 'EC' }, + plan.newJwk, + ], + updated_at: '2024-01-01T00:00:00.000Z', + }; + const revDoc = buildRevocationDocument('example.com'); + + completeRotation(doc, revDoc, 'old-key-1', 'superseded'); + + assert.strictEqual(doc.public_keys.length, 1); + assert.strictEqual(doc.public_keys[0].kid, plan.newKid); + assert.strictEqual(revDoc.revoked_keys.length, 1); + assert.strictEqual(revDoc.revoked_keys[0].kid, 'old-key-1'); + assert.strictEqual(revDoc.revoked_keys[0].reason, 'superseded'); + }); +}); diff --git a/javascript/tests/transport.test.js b/javascript/tests/transport.test.js new file mode 100644 index 0000000..f2ddba9 --- /dev/null +++ b/javascript/tests/transport.test.js @@ -0,0 +1,141 @@ +/** + * Tests for transport binding helpers. + */ + +import { test, describe } from 'node:test'; +import assert from 'node:assert'; +import { + httpExtractCredential, + httpFormatAuthorizationHeader, + mcpExtractCredential, + mcpFormatMetaField, + wsExtractCredential, + wsFormatAuthMessage, + grpcExtractCredential, + grpcFormatMetadataValue, + GRPC_METADATA_KEY, +} from '../src/transport.js'; + +const TEST_JWT = 'eyJhbGciOiJFUzI1NiJ9.eyJzdWIiOiJ0ZXN0In0.sig'; + +describe('HTTP transport', () => { + test('extract valid credential', () => { + const jwt = httpExtractCredential('AgentPin ' + TEST_JWT); + assert.strictEqual(jwt, TEST_JWT); + }); + + test('reject missing prefix', () => { + assert.throws( + () => httpExtractCredential('Bearer ' + TEST_JWT), + /Missing 'AgentPin ' prefix/ + ); + }); + + test('reject empty credential after prefix', () => { + assert.throws( + () => httpExtractCredential('AgentPin '), + /Empty credential/ + ); + }); + + test('format roundtrip', () => { + const header = httpFormatAuthorizationHeader(TEST_JWT); + const extracted = httpExtractCredential(header); + assert.strictEqual(extracted, TEST_JWT); + }); +}); + +describe('MCP transport', () => { + test('extract valid credential', () => { + const jwt = mcpExtractCredential({ agentpin_credential: TEST_JWT }); + assert.strictEqual(jwt, TEST_JWT); + }); + + test('reject missing field', () => { + assert.throws( + () => mcpExtractCredential({}), + /Missing 'agentpin_credential'/ + ); + }); + + test('reject non-string field', () => { + assert.throws( + () => mcpExtractCredential({ agentpin_credential: 42 }), + /not a string/ + ); + }); + + test('format roundtrip', () => { + const meta = mcpFormatMetaField(TEST_JWT); + const extracted = mcpExtractCredential(meta); + assert.strictEqual(extracted, TEST_JWT); + }); +}); + +describe('WebSocket transport', () => { + test('extract valid credential', () => { + const msg = JSON.stringify({ type: 'agentpin-auth', credential: TEST_JWT }); + const jwt = wsExtractCredential(msg); + assert.strictEqual(jwt, TEST_JWT); + }); + + test('reject invalid JSON', () => { + assert.throws( + () => wsExtractCredential('not json'), + /Invalid JSON/ + ); + }); + + test('reject wrong type', () => { + const msg = JSON.stringify({ type: 'other', credential: TEST_JWT }); + assert.throws( + () => wsExtractCredential(msg), + /Expected type 'agentpin-auth'/ + ); + }); + + test('reject missing credential field', () => { + const msg = JSON.stringify({ type: 'agentpin-auth' }); + assert.throws( + () => wsExtractCredential(msg), + /Missing or non-string 'credential'/ + ); + }); + + test('format roundtrip', () => { + const msg = wsFormatAuthMessage(TEST_JWT); + const extracted = wsExtractCredential(msg); + assert.strictEqual(extracted, TEST_JWT); + }); +}); + +describe('gRPC transport', () => { + test('metadata key is correct', () => { + assert.strictEqual(GRPC_METADATA_KEY, 'agentpin-credential'); + }); + + test('extract valid credential', () => { + const jwt = grpcExtractCredential(TEST_JWT); + assert.strictEqual(jwt, TEST_JWT); + }); + + test('reject empty value', () => { + assert.throws( + () => grpcExtractCredential(''), + /Empty gRPC metadata/ + ); + }); + + test('reject null value', () => { + assert.throws( + () => grpcExtractCredential(null), + /Empty gRPC metadata/ + ); + }); + + test('format roundtrip', () => { + const val = grpcFormatMetadataValue(TEST_JWT); + const extracted = grpcExtractCredential(val); + assert.strictEqual(extracted, TEST_JWT); + }); +}); diff --git a/python/agentpin/__init__.py b/python/agentpin/__init__.py index 88bc081..d48eed5 100644 --- a/python/agentpin/__init__.py +++ b/python/agentpin/__init__.py @@ -1,9 +1,11 @@ """AgentPin: Domain-anchored cryptographic identity protocol for AI agents.""" from .capability import ( + CORE_ACTIONS, Capability, capabilities_hash, capabilities_subset, + validate_capability, ) from .constraint import ( constraints_subset_of, @@ -29,12 +31,36 @@ verify_chain_depth, ) from .discovery import ( + AllowedDomains, build_discovery_document, fetch_discovery_document, find_agent_by_id, find_key_by_kid, validate_discovery_document, ) +from .a2a import ( + build_and_sign_agent_card, + build_unsigned_agent_card, + canonicalize_for_signing, + capability_to_skill, + extension_key_thumbprint, + sign_agent_card, + verify_agentpin_extension, +) +from .dns import ( + fetch_dns_txt, + parse_txt_record, + txt_record_name, + verify_dns_match, +) +from .resolver_local import ( + LocalAgentCardStore, + card_endpoint_host, + derive_discovery_from_card, +) +from .resolver_a2a import ( + A2aAgentCardResolver, +) from .jwk import ( jwk_thumbprint, jwk_to_pem, @@ -51,6 +77,29 @@ create_challenge, create_response, verify_response, + verify_response_with_nonce_store, +) +from .nonce import ( + InMemoryNonceStore, + NonceStore, +) +from .rotation import ( + apply_rotation, + complete_rotation, + prepare_rotation, +) +from .transport import ( + AUTH_TYPE, + FIELD_NAME, + GRPC_METADATA_KEY, + grpc_extract_credential, + grpc_format_metadata_value, + http_extract_credential, + http_format_authorization_header, + mcp_extract_credential, + mcp_format_meta_field, + ws_extract_credential, + ws_format_auth_message, ) from .pinning import ( KeyPinStore, @@ -91,7 +140,7 @@ verify_credential_with_bundle, ) -__version__ = "0.2.0" +__version__ = "0.3.0" __all__ = [ # Types @@ -127,6 +176,8 @@ "Capability", "capabilities_subset", "capabilities_hash", + "validate_capability", + "CORE_ACTIONS", # Constraint "parse_rate_limit", "domain_pattern_matches", @@ -140,6 +191,25 @@ "find_key_by_kid", "find_agent_by_id", "fetch_discovery_document", + "AllowedDomains", + # A2A AgentCard (v0.3.0) + "build_and_sign_agent_card", + "build_unsigned_agent_card", + "canonicalize_for_signing", + "capability_to_skill", + "extension_key_thumbprint", + "sign_agent_card", + "verify_agentpin_extension", + # DNS TXT (v0.3.0) + "fetch_dns_txt", + "parse_txt_record", + "txt_record_name", + "verify_dns_match", + # Resolvers (v0.3.0) + "LocalAgentCardStore", + "A2aAgentCardResolver", + "card_endpoint_host", + "derive_discovery_from_card", # Revocation "build_revocation_document", "add_revoked_credential", @@ -160,6 +230,26 @@ "create_challenge", "create_response", "verify_response", + "verify_response_with_nonce_store", + # Nonce + "NonceStore", + "InMemoryNonceStore", + # Rotation + "prepare_rotation", + "apply_rotation", + "complete_rotation", + # Transport + "http_extract_credential", + "http_format_authorization_header", + "FIELD_NAME", + "mcp_extract_credential", + "mcp_format_meta_field", + "AUTH_TYPE", + "ws_extract_credential", + "ws_format_auth_message", + "GRPC_METADATA_KEY", + "grpc_extract_credential", + "grpc_format_metadata_value", # Verification "verify_credential_offline", "verify_credential", diff --git a/python/agentpin/a2a.py b/python/agentpin/a2a.py new file mode 100644 index 0000000..3651b91 --- /dev/null +++ b/python/agentpin/a2a.py @@ -0,0 +1,218 @@ +"""A2A AgentCard signing and verification (v0.3.0). + +Mirrors the Rust ``agentpin::a2a`` module. AgentPin extends the Google A2A +AgentCard format with cryptographic identity verification. The ``agentpin`` +extension carries the AgentPin endpoint URL, the entity's public key in JWK +form, and a detached ECDSA P-256 signature over the canonical bytes of the +rest of the AgentCard. + +Canonicalisation: the signing input is the AgentCard with its ``agentpin`` +field omitted, JSON-serialised with sorted keys and compact separators — +matches the Rust ``serde_json::to_value`` + ``BTreeMap`` trick. +""" + +import json +from typing import Any, Dict, List, Optional, Union + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey + +from .crypto import sign_data, verify_signature +from .discovery import AllowedDomains +from .jwk import jwk_thumbprint, jwk_to_pem, pem_to_jwk +from .types import AgentPinError, ErrorCode + + +# --------------------------------------------------------------------------- +# Capability -> Skill mapping +# --------------------------------------------------------------------------- + + +def capability_to_skill(cap: Union[str, dict]) -> dict: + """Map an AgentPin capability (string or ``{"id": ...}`` dict) to a skill.""" + if isinstance(cap, dict): + id_ = cap.get("id", str(cap)) + else: + id_ = str(cap) + return {"id": id_, "name": id_} + + +# --------------------------------------------------------------------------- +# Builder +# --------------------------------------------------------------------------- + + +def build_unsigned_agent_card( + url: str, + declaration: dict, + *, + skills: Optional[List[dict]] = None, + streaming: bool = False, + push_notifications: bool = False, +) -> dict: + """Build an unsigned A2A AgentCard from an AgentPin ``AgentDeclaration``. + + Capabilities map 1:1 to skills via ``capability_to_skill``; the + ``allowed_domains`` constraint is copied into ``capabilities.allowed_domains`` + (omitted entirely when unrestricted, matching the Rust serde behaviour). + """ + if skills is None or len(skills) == 0: + out_skills = [capability_to_skill(c) for c in declaration.get("capabilities", [])] + else: + out_skills = [dict(s) for s in skills] + + allowed_domains = AllowedDomains.from_constraints(declaration.get("constraints")) + + capabilities: Dict[str, Any] = { + "streaming": bool(streaming), + "pushNotifications": bool(push_notifications), + } + if not AllowedDomains.is_unrestricted(allowed_domains): + capabilities["allowed_domains"] = list(allowed_domains) + + card: Dict[str, Any] = { + "name": declaration["name"], + "url": url, + "capabilities": capabilities, + "skills": out_skills, + } + if declaration.get("description") is not None: + card["description"] = declaration["description"] + if declaration.get("version") is not None: + card["version"] = declaration["version"] + return card + + +def sign_agent_card( + unsigned_card: dict, + private_key_pem: str, + kid: str, + agentpin_endpoint: str, +) -> dict: + """Sign an A2A AgentCard with an ECDSA P-256 private key (PEM). + + Returns the input card with the ``agentpin`` extension populated. + """ + if not agentpin_endpoint: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + "sign_agent_card requires agentpin_endpoint", + ) + + # Sign over the canonical bytes with the extension cleared. + card_for_signing = {k: v for k, v in unsigned_card.items() if k != "agentpin"} + canonical = canonicalize_for_signing(card_for_signing) + signature = sign_data(private_key_pem, canonical.encode("utf-8")) + + # Derive the public-key JWK from the private key. + private_key = serialization.load_pem_private_key( + private_key_pem.encode("utf-8"), password=None + ) + assert isinstance(private_key, EllipticCurvePrivateKey) + public_pem = private_key.public_key().public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ).decode("utf-8") + public_jwk = pem_to_jwk(public_pem, kid) + + signed = dict(unsigned_card) + signed["agentpin"] = { + "agentpin_endpoint": agentpin_endpoint, + "public_key_jwk": public_jwk, + "signature": signature, + } + return signed + + +def build_and_sign_agent_card( + url: str, + declaration: dict, + private_key_pem: str, + kid: str, + agentpin_endpoint: str, + *, + skills: Optional[List[dict]] = None, + streaming: bool = False, + push_notifications: bool = False, +) -> dict: + """One-shot helper: build + sign in a single call.""" + unsigned = build_unsigned_agent_card( + url, + declaration, + skills=skills, + streaming=streaming, + push_notifications=push_notifications, + ) + return sign_agent_card(unsigned, private_key_pem, kid, agentpin_endpoint) + + +# --------------------------------------------------------------------------- +# Verification +# --------------------------------------------------------------------------- + + +def verify_agentpin_extension(card: dict) -> None: + """Verify the ``agentpin`` extension on an A2A AgentCard. + + Raises ``AgentPinError(DISCOVERY_INVALID)`` on any failure (extension + missing, malformed JWK, signature mismatch). + + This proves only that the card has not been tampered with relative to the + key inside its own extension. Pair with ``A2aAgentCardResolver`` for the + full chain back to a trusted AgentPin discovery document. + """ + ext = card.get("agentpin") if card else None + if not ext: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, "AgentCard has no agentpin extension" + ) + + without_ext = {k: v for k, v in card.items() if k != "agentpin"} + canonical = canonicalize_for_signing(without_ext) + public_pem = jwk_to_pem(ext["public_key_jwk"]) + ok = verify_signature(public_pem, canonical.encode("utf-8"), ext["signature"]) + if not ok: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + "A2A AgentCard signature did not verify against extension JWK", + ) + + +def extension_key_thumbprint(extension: dict) -> str: + """JWK thumbprint of the public key carried in a card's ``agentpin`` extension.""" + return jwk_thumbprint(extension["public_key_jwk"]) + + +# --------------------------------------------------------------------------- +# Canonicalisation +# --------------------------------------------------------------------------- + + +def canonicalize_for_signing(value: Any) -> str: + """Canonical JSON: sorted object keys, compact separators. + + Drops ``None`` values from objects so they round-trip identically with the + Rust SDK's ``skip_serializing_if = "Option::is_none"`` behaviour. + """ + return json.dumps( + _sorted_canonical(value), + sort_keys=False, + separators=(",", ":"), + ensure_ascii=False, + ) + + +def _sorted_canonical(value: Any) -> Any: + if value is None: + return None + if isinstance(value, dict): + out: Dict[str, Any] = {} + for k in sorted(value.keys()): + v = value[k] + if v is None: + continue + out[k] = _sorted_canonical(v) + return out + if isinstance(value, list): + return [_sorted_canonical(v) for v in value] + return value diff --git a/python/agentpin/capability.py b/python/agentpin/capability.py index c882bd9..61b138e 100644 --- a/python/agentpin/capability.py +++ b/python/agentpin/capability.py @@ -89,3 +89,39 @@ def capabilities_hash(capabilities: List[Capability]) -> str: sorted_caps = sorted(c.value for c in capabilities) json_str = json.dumps(sorted_caps, separators=(",", ":")) return sha256_hex(json_str.encode("utf-8")) + + +CORE_ACTIONS = ["read", "write", "execute", "admin", "delegate"] + + +def _is_reverse_domain(s: str) -> bool: + """Check if string looks like a reverse domain prefix (contains a dot).""" + return "." in s + + +def validate_capability(cap: Capability) -> None: + """Validate a capability against the AgentPin taxonomy. + + Raises ValueError if invalid. + Rules: + - Must be action:resource format + - admin:* wildcard rejected + - Custom (non-core) actions must use reverse-domain prefix + """ + parsed = Capability.parse(cap.value) + if not parsed: + raise ValueError( + f"Invalid capability format (missing ':'): {cap.value}" + ) + action, resource = parsed + if action == "admin" and resource == "*": + raise ValueError( + "admin:* wildcard is not allowed; admin capabilities must be explicitly scoped" + ) + if action in CORE_ACTIONS: + return + if not _is_reverse_domain(action): + raise ValueError( + f"Custom action '{action}' must use reverse-domain prefix " + f"(e.g., com.example.{action})" + ) diff --git a/python/agentpin/discovery.py b/python/agentpin/discovery.py index f204126..97036d0 100644 --- a/python/agentpin/discovery.py +++ b/python/agentpin/discovery.py @@ -1,6 +1,6 @@ """Discovery document handling for AgentPin.""" -from typing import List, Optional +from typing import Iterable, List, Optional from .types import AgentPinError, ErrorCode @@ -12,9 +12,14 @@ def build_discovery_document( agents: List[dict], max_delegation_depth: int, updated_at: str, + a2a_endpoint: Optional[str] = None, ) -> dict: - """Build a new discovery document.""" - return { + """Build a new discovery document. + + ``a2a_endpoint`` (v0.3.0) optionally specifies the URL of the entity's + A2A AgentCard, enabling cross-protocol discovery. + """ + doc = { "agentpin_version": "0.1", "entity": entity, "entity_type": entity_type, @@ -24,6 +29,69 @@ def build_discovery_document( "max_delegation_depth": max_delegation_depth, "updated_at": updated_at, } + if a2a_endpoint is not None: + doc["a2a_endpoint"] = a2a_endpoint + return doc + + +# --------------------------------------------------------------------------- +# v0.3.0: AllowedDomains helpers +# --------------------------------------------------------------------------- + + +class AllowedDomains: + """Helpers for the ``allowed_domains`` constraint as a typed allow-list. + + Convention: an empty list means *unrestricted* (all domains trusted); a + non-empty list restricts the agent to exactly those domains. Mirrors the + ``AllowedDomains`` type in the Rust SDK. + + All methods are static — instances are plain ``list[str]``. + """ + + @staticmethod + def unrestricted() -> List[str]: + """Construct an empty (unrestricted) list.""" + return [] + + @staticmethod + def from_domains(iter_: Iterable[str]) -> List[str]: + """Construct from any iterable of strings.""" + return [str(d) for d in iter_] + + @staticmethod + def is_unrestricted(list_: Optional[List[str]]) -> bool: + """``True`` when the list is empty (no restriction).""" + return not list_ + + @staticmethod + def allows(list_: Optional[List[str]], domain: str) -> bool: + """``True`` when ``domain`` is allowed under this list.""" + return AllowedDomains.is_unrestricted(list_) or domain in list_ + + @staticmethod + def intersect(a: Optional[List[str]], b: Optional[List[str]]) -> List[str]: + """Intersection of two allow-lists. ``unrestricted ∩ X = X``.""" + if AllowedDomains.is_unrestricted(a): + return list(b or []) + if AllowedDomains.is_unrestricted(b): + return list(a or []) + b_set = set(b or []) + return [d for d in a if d in b_set] + + @staticmethod + def from_constraints(constraints: Optional[dict]) -> List[str]: + """Pull the typed list from a constraints dict. + + Returns ``unrestricted()`` when constraints are ``None`` or have no + ``allowed_domains`` field. + """ + if not constraints: + return AllowedDomains.unrestricted() + ad = constraints.get("allowed_domains") + if ad is None: + return AllowedDomains.unrestricted() + return list(ad) def validate_discovery_document(doc: dict, expected_entity: str) -> None: diff --git a/python/agentpin/dns.py b/python/agentpin/dns.py new file mode 100644 index 0000000..ba7011f --- /dev/null +++ b/python/agentpin/dns.py @@ -0,0 +1,152 @@ +"""DNS TXT cross-verification at ``_agentpin.{domain}`` (v0.3.0). + +Mirrors the Rust ``agentpin::dns`` module. The wire format is + + _agentpin.example.com. 3600 IN TXT "v=agentpin1; kid=acme-2026-04; fp=sha256:a1b2c3..." + +Semantics: + - Absent record -> no effect (DNS TXT is purely additive) + - Present matching -> verification succeeds + - Present mismatching / malformed -> hard failure (DISCOVERY_INVALID) + +Mismatch is fail-closed because a publisher who *intentionally* published a +TXT record has signaled DNS is part of their trust chain. +""" + +from typing import Optional + +from .jwk import jwk_thumbprint +from .types import AgentPinError, ErrorCode + + +VERSION = "agentpin1" +FP_PREFIX = "sha256:" + + +def parse_txt_record(value: str) -> dict: + """Parse a raw ``_agentpin.{domain}`` TXT record value. + + Whitespace around ``;`` and ``=`` is tolerated. Field order is not + significant. Unknown fields are ignored for forward compatibility. + + Returns a dict ``{"version": ..., "kid": str|None, "fingerprint": ...}``. + Raises ``AgentPinError(DISCOVERY_INVALID)`` on malformed input. + """ + version: Optional[str] = None + kid: Optional[str] = None + fp: Optional[str] = None + + for raw_part in value.split(";"): + part = raw_part.strip() + if not part: + continue + if "=" not in part: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + f"DNS TXT field missing '=': {part}", + ) + k, _, v = part.partition("=") + k = k.strip().lower() + v = v.strip() + if k == "v": + version = v + elif k == "kid": + kid = v + elif k == "fp": + fp = v.lower() + else: + # Forward-compat: ignore unknown fields. + continue + + if version is None: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + "DNS TXT record missing required 'v' field", + ) + if version != VERSION: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + f"DNS TXT unsupported version: {version}", + ) + if fp is None: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + "DNS TXT record missing required 'fp' field", + ) + if not fp.startswith(FP_PREFIX): + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + f"DNS TXT 'fp' must be sha256:: {fp}", + ) + + return {"version": version, "kid": kid, "fingerprint": fp} + + +def verify_dns_match(discovery: dict, txt: dict) -> None: + """Cross-check a parsed TXT record's fingerprint against a discovery doc. + + Raises ``AgentPinError(DISCOVERY_INVALID)`` when no key in + ``discovery['public_keys']`` matches ``txt['fingerprint']`` (and + ``txt['kid']`` when present). + """ + target = txt["fingerprint"].lower() + for jwk in discovery.get("public_keys", []): + computed = jwk_thumbprint(jwk).lower() + if not computed.startswith(FP_PREFIX): + computed = FP_PREFIX + computed + if computed != target: + continue + txt_kid = txt.get("kid") + if txt_kid and jwk.get("kid") != txt_kid: + continue + return + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + f"DNS TXT fingerprint {target} does not match any key in the discovery document", + ) + + +def txt_record_name(domain: str) -> str: + """Build the lookup name for a domain: ``_agentpin.{domain}`` with any + trailing dot stripped.""" + return f"_agentpin.{domain.rstrip('.')}" + + +def fetch_dns_txt(domain: str) -> Optional[dict]: + """Fetch and parse the ``_agentpin.{domain}`` TXT record. + + Uses ``dnspython`` (an optional install). Returns: + - ``None`` when no ``_agentpin`` TXT record exists for the domain + - parsed record when present + Raises ``AgentPinError(DISCOVERY_INVALID)`` when the record exists but is + malformed, or ``AgentPinError(DISCOVERY_FETCH_FAILED)`` for other DNS + errors. + + When multiple TXT records exist at the same name, the first whose value + contains ``v=agentpin1`` is used. + """ + try: + import dns.resolver + import dns.exception + except ImportError as exc: # pragma: no cover - optional dep + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + "fetch_dns_txt requires the 'dnspython' package", + ) from exc + + name = txt_record_name(domain) + try: + answers = dns.resolver.resolve(name, "TXT") + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + return None + except dns.exception.DNSException as exc: + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + f"DNS TXT lookup failed for {name}: {exc}", + ) from exc + + for rdata in answers: + joined = b"".join(rdata.strings).decode("utf-8", errors="replace") + if "v=agentpin1" in joined: + return parse_txt_record(joined) + return None diff --git a/python/agentpin/mutual.py b/python/agentpin/mutual.py index 143c467..7921067 100644 --- a/python/agentpin/mutual.py +++ b/python/agentpin/mutual.py @@ -64,3 +64,13 @@ def verify_response(response: dict, challenge: dict, public_key_pem: str) -> boo # Verify signature over the nonce return verify_signature(public_key_pem, challenge["nonce"].encode("utf-8"), response["signature"]) + + +def verify_response_with_nonce_store(response, challenge, public_key_pem, nonce_store=None): + """Verify with optional nonce dedup. Raises ValueError on nonce reuse.""" + if nonce_store is not None: + if not nonce_store.check_and_record(response["nonce"], NONCE_EXPIRY_SECS): + raise ValueError( + f"Nonce '{response['nonce']}' has already been used (replay attack)" + ) + return verify_response(response, challenge, public_key_pem) diff --git a/python/agentpin/nonce.py b/python/agentpin/nonce.py new file mode 100644 index 0000000..b2b73ae --- /dev/null +++ b/python/agentpin/nonce.py @@ -0,0 +1,32 @@ +"""Nonce deduplication for replay attack prevention.""" + +import threading +import time + + +class NonceStore: + """Abstract base for nonce deduplication.""" + + def check_and_record(self, nonce: str, ttl_seconds: float) -> bool: + """Check if nonce is fresh. Returns True if fresh, False if replay.""" + raise NotImplementedError + + +class InMemoryNonceStore(NonceStore): + """In-memory nonce store with lazy expiry cleanup.""" + + def __init__(self): + self._entries = {} # nonce -> expiry_time + self._lock = threading.Lock() + + def check_and_record(self, nonce: str, ttl_seconds: float) -> bool: + with self._lock: + now = time.monotonic() + # Lazy cleanup + self._entries = {k: v for k, v in self._entries.items() if v > now} + # Check + if nonce in self._entries: + return False + # Record + self._entries[nonce] = now + ttl_seconds + return True diff --git a/python/agentpin/resolver_a2a.py b/python/agentpin/resolver_a2a.py new file mode 100644 index 0000000..a454dcf --- /dev/null +++ b/python/agentpin/resolver_a2a.py @@ -0,0 +1,123 @@ +"""A2aAgentCardResolver (v0.3.0) — fetches A2A AgentCards over HTTPS. + +Mirrors the Rust ``agentpin::resolver_a2a`` module: + 1. GET https://{domain}/.well-known/agent-card.json + 2. Verify the AgentPin extension signature against its embedded JWK + 3. Cross-check that the agentpin endpoint inside the card matches the + fetched domain (defends against a card pointing at someone else's + AgentPin discovery) + 4. Derive a DiscoveryDocument so the rest of the AgentPin stack runs + unchanged +""" + +from threading import RLock +from typing import Any, Callable, Optional + +from .a2a import verify_agentpin_extension +from .resolver_local import card_endpoint_host, derive_discovery_from_card +from .types import AgentPinError, ErrorCode + + +AGENT_CARD_PATH = "/.well-known/agent-card.json" +DEFAULT_TIMEOUT_SECS = 10.0 + +_FetchFn = Callable[[str], Any] + + +class A2aAgentCardResolver: + """Resolver that fetches an A2A AgentCard from a domain over HTTPS and + exposes both the original card and the derived discovery document. + + Uses ``requests`` by default; pass ``fetch=...`` to inject a custom + callable (useful for tests). The callable receives the URL and must + return an object with ``ok``, ``status_code`` and ``.json()`` attributes + (matching ``requests.Response``). + """ + + def __init__( + self, + *, + timeout: float = DEFAULT_TIMEOUT_SECS, + fetch: Optional[_FetchFn] = None, + ) -> None: + self.timeout = timeout + self._fetch = fetch + self._last_card: Optional[dict] = None + self._last_domain: Optional[str] = None + self._lock = RLock() + + def last_card(self, domain: str) -> Optional[dict]: + """Return the last successfully resolved AgentCard for ``domain``, + or ``None``.""" + with self._lock: + if self._last_domain != domain: + return None + return self._last_card + + def resolve_discovery(self, domain: str) -> dict: + """Fetch + verify the AgentCard at + ``https://{domain}/.well-known/agent-card.json`` and return the + derived discovery document.""" + url = f"https://{domain}{AGENT_CARD_PATH}" + + try: + response = self._do_fetch(url) + except AgentPinError: + raise + except Exception as exc: + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + f"Failed to fetch {url}: {exc}", + ) from exc + + # Tolerate both requests.Response (with .ok / .status_code) and + # custom test stubs that expose either property. + ok = getattr(response, "ok", None) + status = getattr(response, "status_code", None) + if ok is None and status is not None: + ok = 200 <= status < 300 + if not ok: + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + f"Failed to fetch {url}: HTTP {status}", + ) + + try: + card = response.json() + except Exception as exc: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + f"Failed to parse AgentCard at {url}: {exc}", + ) from exc + + verify_agentpin_extension(card) + + endpoint_host = card_endpoint_host(card) + if endpoint_host != domain: + raise AgentPinError( + ErrorCode.DOMAIN_MISMATCH, + f"AgentCard at {domain} declares agentpin endpoint host " + f"{endpoint_host} (mismatch)", + ) + + discovery = derive_discovery_from_card(card) + with self._lock: + self._last_card = card + self._last_domain = domain + return discovery + + def resolve_revocation(self, _domain: str, _discovery: dict) -> None: + """A2A AgentCards don't carry revocation data. Always returns ``None``.""" + return None + + def _do_fetch(self, url: str): + if self._fetch is not None: + return self._fetch(url) + import requests # local import keeps `requests` an optional dep + + return requests.get( + url, + headers={"Accept": "application/json"}, + allow_redirects=False, + timeout=self.timeout, + ) diff --git a/python/agentpin/resolver_local.py b/python/agentpin/resolver_local.py new file mode 100644 index 0000000..e5bf954 --- /dev/null +++ b/python/agentpin/resolver_local.py @@ -0,0 +1,159 @@ +"""LocalAgentCardStore (v0.3.0) — in-memory A2A AgentCard store. + +Mirrors the Rust ``agentpin::resolver_local`` module. For agents that don't +serve HTTP themselves (CLI tools, daemon processes, external agents pushed +into a coordinator at registration time), the coordinator can keep their +AgentCards in memory and look them up by domain without making network +calls — supporting Symbiont's push-based external-agent registration flow. +""" + +from datetime import datetime, timezone +from threading import RLock +from typing import Dict, Optional +from urllib.parse import urlparse + +from .a2a import verify_agentpin_extension +from .discovery import AllowedDomains +from .types import AgentPinError, AgentStatus, EntityType, ErrorCode + + +def card_endpoint_host(card: dict) -> str: + """Derive the host of the AgentCard's agentpin endpoint URL.""" + ext = card.get("agentpin") if card else None + if not ext: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, "AgentCard has no agentpin extension" + ) + parsed = urlparse(ext["agentpin_endpoint"]) + if not parsed.hostname: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + "agentpin_endpoint URL has no host", + ) + return parsed.hostname + + +def _slug(input_: str) -> str: + out = [] + for ch in input_: + if ch.isascii() and ch.isalnum(): + out.append(ch.lower()) + else: + out.append("-") + return "".join(out).strip("-") + + +def derive_discovery_from_card(card: dict) -> dict: + """Derive a minimal discovery document from a signed A2A AgentCard. + + The card's public-key JWK becomes the sole ``public_keys`` entry; the + card's name/description/version/skills become a single agent declaration + so the rest of the AgentPin verification stack (TOFU pinning, revocation, + capability validation) runs against AgentCards unchanged. + """ + ext = card.get("agentpin") + if not ext: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, "AgentCard has no agentpin extension" + ) + domain = card_endpoint_host(card) + + capabilities = [s["id"] for s in card.get("skills", [])] + allowed_domains = (card.get("capabilities") or {}).get("allowed_domains", []) + constraints = ( + None + if AllowedDomains.is_unrestricted(allowed_domains) + else {"allowed_domains": list(allowed_domains)} + ) + + agent_id = f"urn:agentpin:{domain}:{_slug(card['name'])}" + agent: Dict[str, object] = { + "agent_id": agent_id, + "name": card["name"], + "capabilities": capabilities, + "status": AgentStatus.ACTIVE, + } + if card.get("description") is not None: + agent["description"] = card["description"] + if card.get("version") is not None: + agent["version"] = card["version"] + if constraints: + agent["constraints"] = constraints + + return { + "agentpin_version": "0.3", + "entity": domain, + "entity_type": EntityType.BOTH, + "public_keys": [ext["public_key_jwk"]], + "agents": [agent], + "a2a_endpoint": ext["agentpin_endpoint"], + "max_delegation_depth": 0, + "updated_at": datetime.now(timezone.utc) + .replace(microsecond=0) + .isoformat() + .replace("+00:00", "Z"), + } + + +class LocalAgentCardStore: + """In-memory store of pre-registered A2A AgentCards keyed by their + AgentPin discovery domain. + + Cards are added via :meth:`register` (after the extension signature is + verified) and looked up via :meth:`resolve_discovery`. Pair with a chain + resolver to fall back to HTTP for unregistered domains. + """ + + def __init__(self) -> None: + self._cards: Dict[str, dict] = {} + self._docs: Dict[str, dict] = {} + self._lock = RLock() + + def register(self, card: dict) -> None: + """Register an AgentCard. Verifies the extension signature before + storing. Re-registering an existing domain replaces the prior entry.""" + verify_agentpin_extension(card) + domain = card_endpoint_host(card) + doc = derive_discovery_from_card(card) + with self._lock: + self._cards[domain] = card + self._docs[domain] = doc + + def __len__(self) -> int: + with self._lock: + return len(self._cards) + + def is_empty(self) -> bool: + return len(self) == 0 + + def resolve_card(self, domain: str) -> Optional[dict]: + """Return the raw AgentCard for a domain, or ``None``.""" + with self._lock: + return self._cards.get(domain) + + def resolve_discovery(self, domain: str) -> dict: + """Resolve the derived discovery document for a domain. + + Raises ``AgentPinError(DISCOVERY_INVALID)`` when the domain isn't + registered. + """ + with self._lock: + doc = self._docs.get(domain) + if doc is None: + raise AgentPinError( + ErrorCode.DISCOVERY_INVALID, + f"Domain '{domain}' not in LocalAgentCardStore", + ) + return doc + + def resolve_revocation(self, _domain: str, _discovery: dict) -> None: + """The store doesn't carry revocation data — pair with an HTTP/file + revocation resolver. Always returns ``None``.""" + return None + + def remove(self, domain: str) -> bool: + """Drop a registered AgentCard. Returns ``True`` when one was removed.""" + with self._lock: + had = self._cards.pop(domain, None) is not None + self._docs.pop(domain, None) + return had diff --git a/python/agentpin/rotation.py b/python/agentpin/rotation.py new file mode 100644 index 0000000..bed39cb --- /dev/null +++ b/python/agentpin/rotation.py @@ -0,0 +1,40 @@ +"""Key rotation helpers for AgentPin.""" + +from datetime import datetime, timezone + +from .crypto import generate_key_id, generate_key_pair +from .jwk import pem_to_jwk +from .revocation import add_revoked_key + + +def prepare_rotation(old_kid: str) -> dict: + """Prepare a key rotation: generate new keypair, compute kid and JWK. + + Returns dict with keys: new_key_pair, new_kid, new_jwk, old_kid + """ + private_pem, public_pem = generate_key_pair() + new_kid = generate_key_id(public_pem) + new_jwk = pem_to_jwk(public_pem, new_kid) + return { + "new_key_pair": (private_pem, public_pem), + "new_kid": new_kid, + "new_jwk": new_jwk, + "old_kid": old_kid, + } + + +def apply_rotation(doc: dict, plan: dict) -> None: + """Apply rotation plan: add new key to discovery document.""" + doc["public_keys"].append(plan["new_jwk"]) + doc["updated_at"] = datetime.now(timezone.utc).isoformat() + + +def complete_rotation( + doc: dict, revocation_doc: dict, old_kid: str, reason: str +) -> None: + """Complete rotation: remove old key from discovery, add to revocation.""" + doc["public_keys"] = [ + k for k in doc["public_keys"] if k.get("kid") != old_kid + ] + doc["updated_at"] = datetime.now(timezone.utc).isoformat() + add_revoked_key(revocation_doc, old_kid, reason) diff --git a/python/agentpin/transport.py b/python/agentpin/transport.py new file mode 100644 index 0000000..bc6c3ae --- /dev/null +++ b/python/agentpin/transport.py @@ -0,0 +1,107 @@ +"""Transport binding helpers for AgentPin (spec Section 13).""" + +import json + +from .types import AgentPinError, ErrorCode + +# --- HTTP --- + + +def http_extract_credential(header_value: str) -> str: + """Extract JWT from 'Authorization: AgentPin ' header value.""" + prefix = "AgentPin " + if not header_value.startswith(prefix): + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + "Missing 'AgentPin ' prefix in Authorization header", + ) + jwt = header_value[len(prefix):] + if not jwt: + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + "Empty credential in Authorization header", + ) + return jwt + + +def http_format_authorization_header(jwt: str) -> str: + """Format JWT for Authorization header: 'AgentPin '.""" + return f"AgentPin {jwt}" + + +# --- MCP --- + +FIELD_NAME = "agentpin_credential" + + +def mcp_extract_credential(meta: dict) -> str: + """Extract JWT from MCP metadata dict's 'agentpin_credential' field.""" + if FIELD_NAME not in meta: + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + f"Missing '{FIELD_NAME}' field in MCP metadata", + ) + value = meta[FIELD_NAME] + if not isinstance(value, str): + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + f"'{FIELD_NAME}' field is not a string", + ) + return value + + +def mcp_format_meta_field(jwt: str) -> dict: + """Format JWT as MCP metadata dict.""" + return {FIELD_NAME: jwt} + + +# --- WebSocket --- + +AUTH_TYPE = "agentpin-auth" + + +def ws_extract_credential(message: str) -> str: + """Extract JWT from WebSocket JSON auth message.""" + try: + parsed = json.loads(message) + except json.JSONDecodeError as e: + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, f"Invalid JSON: {e}" + ) + msg_type = parsed.get("type") + if msg_type != AUTH_TYPE: + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + f"Expected type '{AUTH_TYPE}', got '{msg_type}'", + ) + credential = parsed.get("credential") + if not isinstance(credential, str): + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, + "Missing or non-string 'credential' field", + ) + return credential + + +def ws_format_auth_message(jwt: str) -> str: + """Format JWT as WebSocket auth message JSON string.""" + return json.dumps({"type": AUTH_TYPE, "credential": jwt}) + + +# --- gRPC --- + +GRPC_METADATA_KEY = "agentpin-credential" + + +def grpc_extract_credential(metadata_value: str) -> str: + """Extract JWT from gRPC metadata value.""" + if not metadata_value: + raise AgentPinError( + ErrorCode.DISCOVERY_FETCH_FAILED, "Empty gRPC metadata value" + ) + return metadata_value + + +def grpc_format_metadata_value(jwt: str) -> str: + """Format JWT for gRPC metadata (identity function, documents key name).""" + return jwt diff --git a/python/pyproject.toml b/python/pyproject.toml index e0c4730..65ee0d2 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "agentpin" -version = "0.2.0" +version = "0.3.0" description = "Domain-anchored cryptographic identity protocol for AI agents" readme = "README.md" license = {text = "MIT"} diff --git a/python/setup.cfg b/python/setup.cfg index 8da2af2..5037d60 100644 --- a/python/setup.cfg +++ b/python/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = agentpin -version = 0.1.0 +version = 0.3.0 [options] packages = find: diff --git a/python/tests/test_a2a.py b/python/tests/test_a2a.py new file mode 100644 index 0000000..3b787e1 --- /dev/null +++ b/python/tests/test_a2a.py @@ -0,0 +1,134 @@ +"""Tests for A2A AgentCard types, builder, and verification (v0.3.0).""" + +import json + +import pytest + +from agentpin import ( + AgentStatus, + build_and_sign_agent_card, + build_unsigned_agent_card, + canonicalize_for_signing, + capability_to_skill, + extension_key_thumbprint, + generate_key_pair, + jwk_thumbprint, + sign_agent_card, + verify_agentpin_extension, +) + + +def _declaration(capabilities, allowed_domains=None): + decl = { + "agent_id": "urn:agentpin:example.com:test", + "name": "Test Agent", + "description": "test", + "version": "1.0.0", + "capabilities": capabilities, + "credential_ttl_max": 3600, + "status": AgentStatus.ACTIVE, + } + if allowed_domains is not None: + decl["constraints"] = {"allowed_domains": allowed_domains} + return decl + + +def test_capability_to_skill_maps_string(): + skill = capability_to_skill("read:customers/*") + assert skill["id"] == "read:customers/*" + assert skill["name"] == "read:customers/*" + + +def test_build_unsigned_card_maps_capabilities_to_skills(): + card = build_unsigned_agent_card( + "https://example.com/agent", + _declaration(["read:customers", "write:invoices"]), + ) + assert len(card["skills"]) == 2 + assert card["skills"][0]["id"] == "read:customers" + assert "agentpin" not in card + + +def test_build_unsigned_card_maps_allowed_domains(): + card = build_unsigned_agent_card( + "https://example.com/agent", + _declaration(["read:*"], ["a.com", "b.com"]), + ) + assert card["capabilities"]["allowed_domains"] == ["a.com", "b.com"] + + +def test_build_unsigned_card_omits_allowed_domains_when_unrestricted(): + card = build_unsigned_agent_card( + "https://example.com/agent", _declaration(["read:*"]) + ) + assert "allowed_domains" not in card["capabilities"] + + +def test_sign_requires_agentpin_endpoint(): + private_pem, _ = generate_key_pair() + unsigned = build_unsigned_agent_card( + "https://example.com/agent", _declaration(["read:*"]) + ) + with pytest.raises(Exception): + sign_agent_card(unsigned, private_pem, "kid-1", "") + + +def test_signed_card_round_trips_and_verifies(): + private_pem, _ = generate_key_pair() + card = build_and_sign_agent_card( + "https://example.com/agent", + _declaration(["read:customers", "write:invoices"], ["partner.com"]), + private_pem, + "kid-1", + "https://example.com/.well-known/agent-identity.json", + streaming=True, + ) + assert "agentpin" in card + verify_agentpin_extension(card) + parsed = json.loads(json.dumps(card)) + verify_agentpin_extension(parsed) + + +def test_verify_fails_when_extension_missing(): + card = build_unsigned_agent_card( + "https://example.com/agent", _declaration(["read:*"]) + ) + with pytest.raises(Exception, match="no agentpin extension"): + verify_agentpin_extension(card) + + +def test_verify_fails_when_card_tampered(): + private_pem, _ = generate_key_pair() + card = build_and_sign_agent_card( + "https://example.com/agent", + _declaration(["read:customers"]), + private_pem, + "kid-1", + "https://example.com/.well-known/agent-identity.json", + ) + card["url"] = "https://attacker.example/agent" + with pytest.raises(Exception, match="did not verify"): + verify_agentpin_extension(card) + + +def test_extension_key_thumbprint_matches_jwk_thumbprint(): + private_pem, _ = generate_key_pair() + card = build_and_sign_agent_card( + "https://example.com/agent", + _declaration(["read:*"]), + private_pem, + "kid-1", + "https://example.com/.well-known/agent-identity.json", + ) + ext = card["agentpin"] + assert extension_key_thumbprint(ext) == jwk_thumbprint(ext["public_key_jwk"]) + + +def test_canonicalize_sorts_keys_and_drops_none(): + out = canonicalize_for_signing({"b": 1, "a": {"d": 4, "c": 3}, "z": None}) + assert out == '{"a":{"c":3,"d":4},"b":1}' + + +def test_canonicalize_recurses_into_arrays(): + out = canonicalize_for_signing([{"b": 1, "a": 2}]) + assert out == '[{"a":2,"b":1}]' diff --git a/python/tests/test_allowed_domains.py b/python/tests/test_allowed_domains.py new file mode 100644 index 0000000..f2bc603 --- /dev/null +++ b/python/tests/test_allowed_domains.py @@ -0,0 +1,70 @@ +"""Tests for AllowedDomains helpers + a2a_endpoint discovery field (v0.3.0).""" + +import pytest + +from agentpin import ( + AllowedDomains, + EntityType, + build_discovery_document, +) + + +def test_unrestricted_accepts_anything(): + ad = AllowedDomains.unrestricted() + assert AllowedDomains.is_unrestricted(ad) + assert AllowedDomains.allows(ad, "anything.com") + + +def test_restricted_filters(): + ad = AllowedDomains.from_domains(["a.com", "b.com"]) + assert not AllowedDomains.is_unrestricted(ad) + assert AllowedDomains.allows(ad, "a.com") + assert not AllowedDomains.allows(ad, "c.com") + + +def test_intersect_with_unrestricted_returns_other(): + unrestricted = AllowedDomains.unrestricted() + restricted = AllowedDomains.from_domains(["a.com", "b.com"]) + assert AllowedDomains.intersect(unrestricted, restricted) == restricted + assert AllowedDomains.intersect(restricted, unrestricted) == restricted + + +def test_intersect_returns_overlap(): + lhs = AllowedDomains.from_domains(["a.com", "b.com", "c.com"]) + rhs = AllowedDomains.from_domains(["b.com", "c.com", "d.com"]) + assert AllowedDomains.intersect(lhs, rhs) == ["b.com", "c.com"] + + +def test_from_constraints_with_list(): + out = AllowedDomains.from_constraints({"allowed_domains": ["a.com"]}) + assert out == ["a.com"] + + +def test_from_constraints_without_field(): + assert AllowedDomains.is_unrestricted(AllowedDomains.from_constraints({})) + assert AllowedDomains.is_unrestricted(AllowedDomains.from_constraints(None)) + + +def test_build_discovery_document_with_a2a_endpoint(): + doc = build_discovery_document( + "example.com", + EntityType.MAKER, + [{"kid": "k", "kty": "EC", "crv": "P-256", "x": "x", "y": "y"}], + [], + 2, + "2026-05-01T00:00:00Z", + a2a_endpoint="https://example.com/.well-known/agent-card.json", + ) + assert doc["a2a_endpoint"] == "https://example.com/.well-known/agent-card.json" + + +def test_build_discovery_document_without_a2a_endpoint_omits_field(): + doc = build_discovery_document( + "example.com", + EntityType.MAKER, + [{"kid": "k", "kty": "EC", "crv": "P-256", "x": "x", "y": "y"}], + [], + 2, + "2026-05-01T00:00:00Z", + ) + assert "a2a_endpoint" not in doc diff --git a/python/tests/test_capability.py b/python/tests/test_capability.py index b1dd6a2..bfe80ac 100644 --- a/python/tests/test_capability.py +++ b/python/tests/test_capability.py @@ -1,6 +1,14 @@ """Tests for capability parsing and matching.""" -from agentpin.capability import Capability, capabilities_hash, capabilities_subset +import pytest + +from agentpin.capability import ( + CORE_ACTIONS, + Capability, + capabilities_hash, + capabilities_subset, + validate_capability, +) class TestCapabilityParse: @@ -52,3 +60,34 @@ def test_order_independent(self): caps1 = [Capability("read:codebase"), Capability("write:report")] caps2 = [Capability("write:report"), Capability("read:codebase")] assert capabilities_hash(caps1) == capabilities_hash(caps2) + + +class TestValidateCapability: + def test_validate_core_action(self): + validate_capability(Capability("read:codebase")) + validate_capability(Capability("write:report")) + validate_capability(Capability("execute:task")) + + def test_validate_wildcard(self): + validate_capability(Capability("read:*")) + validate_capability(Capability("write:*")) + + def test_validate_admin_wildcard_rejected(self): + with pytest.raises(ValueError, match="admin:\\* wildcard is not allowed"): + validate_capability(Capability("admin:*")) + + def test_validate_admin_scoped_ok(self): + validate_capability(Capability("admin:users")) + validate_capability(Capability("admin:config")) + + def test_validate_custom_action_with_domain(self): + validate_capability(Capability("com.example.audit:logs")) + validate_capability(Capability("org.acme.deploy:staging")) + + def test_validate_custom_action_without_domain(self): + with pytest.raises(ValueError, match="reverse-domain prefix"): + validate_capability(Capability("audit:logs")) + + def test_validate_missing_colon(self): + with pytest.raises(ValueError, match="missing ':'"): + validate_capability(Capability("readcodebase")) diff --git a/python/tests/test_dns.py b/python/tests/test_dns.py new file mode 100644 index 0000000..86102b4 --- /dev/null +++ b/python/tests/test_dns.py @@ -0,0 +1,135 @@ +"""Tests for DNS TXT cross-verification (v0.3.0).""" + +import pytest + +from agentpin import ( + EntityType, + generate_key_pair, + jwk_thumbprint, + parse_txt_record, + pem_to_jwk, + txt_record_name, + verify_dns_match, +) + + +def _discovery(jwks): + return { + "agentpin_version": "0.3", + "entity": "example.com", + "entity_type": EntityType.MAKER, + "public_keys": jwks, + "agents": [], + "max_delegation_depth": 0, + "updated_at": "2026-05-01T00:00:00Z", + } + + +def test_parse_full_record(): + r = parse_txt_record("v=agentpin1; kid=acme-2026-04; fp=sha256:abcd1234") + assert r["version"] == "agentpin1" + assert r["kid"] == "acme-2026-04" + assert r["fingerprint"] == "sha256:abcd1234" + + +def test_parse_minimal_record(): + r = parse_txt_record("v=agentpin1;fp=sha256:abc") + assert r["version"] == "agentpin1" + assert r["kid"] is None + assert r["fingerprint"] == "sha256:abc" + + +def test_parse_lowercases_fingerprint(): + r = parse_txt_record("v=agentpin1; fp=SHA256:ABCDEF") + assert r["fingerprint"] == "sha256:abcdef" + + +def test_parse_tolerates_whitespace_and_order(): + r = parse_txt_record(" fp = sha256:beef ; v = agentpin1 ") + assert r["version"] == "agentpin1" + assert r["fingerprint"] == "sha256:beef" + + +def test_parse_ignores_unknown_fields(): + r = parse_txt_record("v=agentpin1; fp=sha256:abc; future=ignoreme") + assert r["fingerprint"] == "sha256:abc" + + +def test_parse_missing_v_fails(): + with pytest.raises(Exception): + parse_txt_record("fp=sha256:abc") + + +def test_parse_missing_fp_fails(): + with pytest.raises(Exception): + parse_txt_record("v=agentpin1") + + +def test_parse_unsupported_version_fails(): + with pytest.raises(Exception): + parse_txt_record("v=agentpin99; fp=sha256:abc") + + +def test_parse_fp_without_sha256_prefix_fails(): + with pytest.raises(Exception): + parse_txt_record("v=agentpin1; fp=abc") + + +def test_parse_field_without_equals_fails(): + with pytest.raises(Exception): + parse_txt_record("v=agentpin1; broken") + + +def test_schemapin_record_rejected(): + """Sanity: must reject SchemaPin's TXT format.""" + with pytest.raises(Exception): + parse_txt_record("v=schemapin1; fp=sha256:abc") + + +def _fp_for(jwk): + t = jwk_thumbprint(jwk).lower() + return t if t.startswith("sha256:") else f"sha256:{t}" + + +def test_verify_match_against_single_key(): + _, public_pem = generate_key_pair() + jwk = pem_to_jwk(public_pem, "kid-1") + doc = _discovery([jwk]) + txt = {"kid": None, "fingerprint": _fp_for(jwk)} + verify_dns_match(doc, txt) + + +def test_verify_match_against_one_of_multiple_keys(): + _, pk1 = generate_key_pair() + _, pk2 = generate_key_pair() + jwk1 = pem_to_jwk(pk1, "kid-a") + jwk2 = pem_to_jwk(pk2, "kid-b") + doc = _discovery([jwk1, jwk2]) + txt = {"kid": "kid-b", "fingerprint": _fp_for(jwk2)} + verify_dns_match(doc, txt) + + +def test_verify_kid_mismatch_fails_even_when_fp_matches(): + _, pk = generate_key_pair() + jwk = pem_to_jwk(pk, "kid-real") + doc = _discovery([jwk]) + txt = {"kid": "kid-different", "fingerprint": _fp_for(jwk)} + with pytest.raises(Exception): + verify_dns_match(doc, txt) + + +def test_verify_mismatch_raises(): + _, pk = generate_key_pair() + jwk = pem_to_jwk(pk, "kid-1") + doc = _discovery([jwk]) + txt = { + "kid": None, + "fingerprint": "sha256:0000000000000000000000000000000000000000000000000000000000000000", + } + with pytest.raises(Exception): + verify_dns_match(doc, txt) + + +def test_txt_record_name_strips_trailing_dot(): + assert txt_record_name("example.com") == "_agentpin.example.com" + assert txt_record_name("example.com.") == "_agentpin.example.com" diff --git a/python/tests/test_integration.py b/python/tests/test_integration.py new file mode 100644 index 0000000..ead0786 --- /dev/null +++ b/python/tests/test_integration.py @@ -0,0 +1,267 @@ +"""End-to-end integration tests for AgentPin.""" + +import json + +import pytest + +from agentpin import ( + KeyPinStore, + PinningResult, + VerifierConfig, + build_discovery_document, + build_revocation_document, + add_revoked_key, + check_pinning, + check_revocation, + create_challenge, + create_response, + generate_key_id, + generate_key_pair, + issue_credential, + pem_to_jwk, + decode_jwt_unverified, + verify_jwt, + verify_credential_offline, + verify_response_with_nonce_store, + http_extract_credential, + http_format_authorization_header, + mcp_extract_credential, + mcp_format_meta_field, + ws_extract_credential, + ws_format_auth_message, + grpc_extract_credential, + grpc_format_metadata_value, + apply_rotation, + complete_rotation, + prepare_rotation, + AgentPinError, +) +from agentpin.nonce import InMemoryNonceStore + + +def make_test_setup(): + """Create a keypair, kid, JWK, and discovery document for testing.""" + private_pem, public_pem = generate_key_pair() + kid = generate_key_id(public_pem) + jwk = pem_to_jwk(public_pem, kid) + agent_id = "urn:agentpin:example.com:test-agent" + doc = build_discovery_document( + entity="example.com", + entity_type="maker", + public_keys=[jwk], + agents=[ + { + "agent_id": agent_id, + "name": "Test Agent", + "capabilities": ["read:*", "write:report"], + "status": "active", + "credential_ttl_max": 3600, + } + ], + max_delegation_depth=2, + updated_at="2026-01-01T00:00:00Z", + ) + return private_pem, public_pem, kid, agent_id, doc + + +class TestMakerDeployerFlow: + def test_full_credential_lifecycle(self): + private_pem, public_pem, kid, agent_id, doc = make_test_setup() + + # Issue a credential + jwt_str = issue_credential( + private_key_pem=private_pem, + kid=kid, + issuer="example.com", + agent_id=agent_id, + audience="verifier.com", + capabilities=["read:data", "write:report"], + constraints=None, + delegation_chain=None, + ttl_secs=3600, + ) + assert jwt_str + assert jwt_str.count(".") == 2 + + # Decode unverified to inspect + header, payload, _sig = decode_jwt_unverified(jwt_str) + assert header["alg"] == "ES256" + assert header["typ"] == "agentpin-credential+jwt" + assert header["kid"] == kid + assert payload["iss"] == "example.com" + assert payload["sub"] == agent_id + + # Verify signature + verified_header, verified_payload = verify_jwt(jwt_str, public_pem) + assert verified_header["kid"] == kid + assert verified_payload["iss"] == "example.com" + + # Full offline verification + pin_store = KeyPinStore() + config = VerifierConfig() + result = verify_credential_offline( + jwt_str, doc, None, pin_store, "verifier.com", config + ) + assert result.valid, f"Expected valid, got: {result.error_message}" + assert result.agent_id == agent_id + assert result.issuer == "example.com" + + +class TestRevocationFlow: + def test_revoked_key_detected(self): + private_pem, _public_pem, kid, agent_id, doc = make_test_setup() + + jwt_str = issue_credential( + private_key_pem=private_pem, + kid=kid, + issuer="example.com", + agent_id=agent_id, + audience=None, + capabilities=["read:data"], + constraints=None, + delegation_chain=None, + ttl_secs=3600, + ) + + header, payload, _sig = decode_jwt_unverified(jwt_str) + + # Clean revocation: should pass + rev_doc = build_revocation_document("example.com") + check_revocation(rev_doc, payload["jti"], agent_id, kid) # no error + + # Add revoked key + add_revoked_key(rev_doc, kid, "key_compromise") + + # Now check_revocation should fail + with pytest.raises(AgentPinError): + check_revocation(rev_doc, payload["jti"], agent_id, kid) + + # Full offline verification should also fail + pin_store = KeyPinStore() + config = VerifierConfig() + vresult = verify_credential_offline( + jwt_str, doc, rev_doc, pin_store, None, config + ) + assert not vresult.valid + + +class TestMutualVerificationWithNonceStore: + def test_nonce_replay_prevention(self): + private_pem, public_pem = generate_key_pair() + + store = InMemoryNonceStore() + challenge = create_challenge() + response = create_response(challenge, private_pem, "test-key") + + # First verification should succeed + valid = verify_response_with_nonce_store( + response, challenge, public_pem, store + ) + assert valid + + # Second verification with same nonce should fail (replay) + with pytest.raises(ValueError, match="already been used"): + verify_response_with_nonce_store( + response, challenge, public_pem, store + ) + + +class TestTransportRoundtrip: + def test_all_transports(self): + private_pem, _public_pem = generate_key_pair() + kid = generate_key_id(_public_pem) + + jwt_str = issue_credential( + private_key_pem=private_pem, + kid=kid, + issuer="example.com", + agent_id="urn:agentpin:example.com:test-agent", + audience=None, + capabilities=["read:data"], + constraints=None, + delegation_chain=None, + ttl_secs=3600, + ) + + # HTTP roundtrip + http_header = http_format_authorization_header(jwt_str) + http_extracted = http_extract_credential(http_header) + assert http_extracted == jwt_str + + # MCP roundtrip + mcp_meta = mcp_format_meta_field(jwt_str) + mcp_extracted = mcp_extract_credential(mcp_meta) + assert mcp_extracted == jwt_str + + # WebSocket roundtrip + ws_msg = ws_format_auth_message(jwt_str) + ws_extracted = ws_extract_credential(ws_msg) + assert ws_extracted == jwt_str + + # gRPC roundtrip + grpc_val = grpc_format_metadata_value(jwt_str) + grpc_extracted = grpc_extract_credential(grpc_val) + assert grpc_extracted == jwt_str + + +class TestKeyRotationLifecycle: + def test_rotation_add_and_remove(self): + private_pem, public_pem = generate_key_pair() + old_kid = generate_key_id(public_pem) + old_jwk = pem_to_jwk(public_pem, old_kid) + + doc = build_discovery_document( + entity="example.com", + entity_type="maker", + public_keys=[old_jwk], + agents=[], + max_delegation_depth=2, + updated_at="2026-01-01T00:00:00Z", + ) + assert len(doc["public_keys"]) == 1 + + # Prepare rotation + plan = prepare_rotation(old_kid) + assert plan["new_kid"] != old_kid + + # Apply rotation: both keys should be present + apply_rotation(doc, plan) + assert len(doc["public_keys"]) == 2 + kids = [k["kid"] for k in doc["public_keys"]] + assert old_kid in kids + assert plan["new_kid"] in kids + + # Complete rotation: old key removed, added to revocation + rev_doc = build_revocation_document("example.com") + complete_rotation(doc, rev_doc, old_kid, "superseded") + + assert len(doc["public_keys"]) == 1 + assert doc["public_keys"][0]["kid"] == plan["new_kid"] + assert len(rev_doc["revoked_keys"]) == 1 + assert rev_doc["revoked_keys"][0]["kid"] == old_kid + assert rev_doc["revoked_keys"][0]["reason"] == "superseded" + + +class TestPinningFlow: + def test_tofu_pinning(self): + _priv1, pub1 = generate_key_pair() + kid1 = generate_key_id(pub1) + jwk1 = pem_to_jwk(pub1, kid1) + + store = KeyPinStore() + + # First verification pins the key + result1 = check_pinning(store, "example.com", jwk1) + assert result1 == PinningResult.FIRST_USE + + # Same key succeeds + result2 = check_pinning(store, "example.com", jwk1) + assert result2 == PinningResult.MATCHED + + # Different key triggers error + _priv2, pub2 = generate_key_pair() + kid2 = generate_key_id(pub2) + jwk2 = pem_to_jwk(pub2, kid2) + + with pytest.raises(AgentPinError): + check_pinning(store, "example.com", jwk2) diff --git a/python/tests/test_nonce.py b/python/tests/test_nonce.py new file mode 100644 index 0000000..2b055fe --- /dev/null +++ b/python/tests/test_nonce.py @@ -0,0 +1,45 @@ +"""Tests for nonce deduplication.""" + +import threading +import time + +from agentpin.nonce import InMemoryNonceStore + + +class TestInMemoryNonceStore: + def test_fresh_nonce(self): + store = InMemoryNonceStore() + assert store.check_and_record("nonce-1", 60.0) is True + + def test_duplicate_nonce(self): + store = InMemoryNonceStore() + assert store.check_and_record("nonce-1", 60.0) is True + assert store.check_and_record("nonce-1", 60.0) is False + + def test_expired_nonce(self): + store = InMemoryNonceStore() + # Record with a very short TTL + assert store.check_and_record("nonce-1", 0.05) is True + time.sleep(0.1) + # Should be fresh again after expiry + assert store.check_and_record("nonce-1", 60.0) is True + + def test_concurrent_safety(self): + store = InMemoryNonceStore() + results = [] + barrier = threading.Barrier(10) + + def try_record(): + barrier.wait() + result = store.check_and_record("shared-nonce", 60.0) + results.append(result) + + threads = [threading.Thread(target=try_record) for _ in range(10)] + for t in threads: + t.start() + for t in threads: + t.join() + + # Exactly one thread should succeed + assert results.count(True) == 1 + assert results.count(False) == 9 diff --git a/python/tests/test_resolver_a2a.py b/python/tests/test_resolver_a2a.py new file mode 100644 index 0000000..92948ab --- /dev/null +++ b/python/tests/test_resolver_a2a.py @@ -0,0 +1,106 @@ +"""Tests for A2aAgentCardResolver (v0.3.0). + +Uses a stub fetch so we don't need a real HTTP server. +""" + +import pytest + +from agentpin import ( + A2aAgentCardResolver, + AgentStatus, + build_and_sign_agent_card, + generate_key_pair, +) + + +def _signed_card_for(domain): + private_pem, _ = generate_key_pair() + decl = { + "agent_id": f"urn:agentpin:{domain}:test", + "name": "Test Agent", + "description": "desc", + "version": "1.0.0", + "capabilities": ["read:*"], + "credential_ttl_max": 3600, + "status": AgentStatus.ACTIVE, + } + return build_and_sign_agent_card( + f"https://{domain}/agent", + decl, + private_pem, + "kid-1", + f"https://{domain}/.well-known/agent-identity.json", + ) + + +class _StubResponse: + def __init__(self, status_code, body=None): + self.status_code = status_code + self.ok = 200 <= status_code < 300 + self._body = body + + def json(self): + if self._body is None: + raise ValueError("no body") + return self._body + + +def _stub_fetch(routes): + def fetch(url): + entry = routes.get(url) + if entry is None: + raise AssertionError(f"unexpected fetch: {url}") + return entry + return fetch + + +def test_resolves_and_verifies_a_card(): + card = _signed_card_for("example.com") + fetch = _stub_fetch({ + "https://example.com/.well-known/agent-card.json": _StubResponse(200, card), + }) + resolver = A2aAgentCardResolver(fetch=fetch) + doc = resolver.resolve_discovery("example.com") + assert doc["entity"] == "example.com" + assert len(doc["public_keys"]) == 1 + assert resolver.last_card("example.com") == card + + +def test_rejects_http_error_response(): + fetch = _stub_fetch({ + "https://example.com/.well-known/agent-card.json": _StubResponse(404), + }) + resolver = A2aAgentCardResolver(fetch=fetch) + with pytest.raises(Exception, match="HTTP 404"): + resolver.resolve_discovery("example.com") + + +def test_rejects_card_whose_extension_does_not_verify(): + card = _signed_card_for("example.com") + card["url"] = "https://attacker.example/agent" # tamper + fetch = _stub_fetch({ + "https://example.com/.well-known/agent-card.json": _StubResponse(200, card), + }) + resolver = A2aAgentCardResolver(fetch=fetch) + with pytest.raises(Exception, match="did not verify"): + resolver.resolve_discovery("example.com") + + +def test_rejects_endpoint_host_mismatch(): + card = _signed_card_for("other.com") # valid for other.com + fetch = _stub_fetch({ + "https://example.com/.well-known/agent-card.json": _StubResponse(200, card), + }) + resolver = A2aAgentCardResolver(fetch=fetch) + with pytest.raises(Exception, match="mismatch"): + resolver.resolve_discovery("example.com") + + +def test_resolve_revocation_returns_none(): + card = _signed_card_for("example.com") + fetch = _stub_fetch({ + "https://example.com/.well-known/agent-card.json": _StubResponse(200, card), + }) + resolver = A2aAgentCardResolver(fetch=fetch) + doc = resolver.resolve_discovery("example.com") + assert resolver.resolve_revocation("example.com", doc) is None diff --git a/python/tests/test_resolver_local.py b/python/tests/test_resolver_local.py new file mode 100644 index 0000000..c757447 --- /dev/null +++ b/python/tests/test_resolver_local.py @@ -0,0 +1,118 @@ +"""Tests for LocalAgentCardStore (v0.3.0).""" + +import pytest + +from agentpin import ( + AgentStatus, + LocalAgentCardStore, + build_and_sign_agent_card, + card_endpoint_host, + derive_discovery_from_card, + generate_key_pair, +) + + +def _declaration(): + return { + "agent_id": "urn:agentpin:example.com:tester", + "name": "Tester", + "description": "Test agent", + "version": "1.0.0", + "capabilities": ["read:*"], + "constraints": {"allowed_domains": ["partner.com"]}, + "credential_ttl_max": 3600, + "status": AgentStatus.ACTIVE, + } + + +def _signed_card(): + private_pem, _ = generate_key_pair() + return build_and_sign_agent_card( + "https://example.com/agent", + _declaration(), + private_pem, + "kid-1", + "https://example.com/.well-known/agent-identity.json", + ) + + +def test_card_endpoint_host(): + card = _signed_card() + assert card_endpoint_host(card) == "example.com" + + +def test_card_endpoint_host_without_extension_raises(): + with pytest.raises(Exception): + card_endpoint_host({"name": "x"}) + + +def test_derive_discovery_from_card(): + card = _signed_card() + doc = derive_discovery_from_card(card) + assert doc["entity"] == "example.com" + assert len(doc["public_keys"]) == 1 + assert len(doc["agents"]) == 1 + assert doc["agents"][0]["name"] == "Tester" + assert doc["agents"][0]["capabilities"] == ["read:*"] + assert doc["agents"][0]["constraints"]["allowed_domains"] == ["partner.com"] + assert doc["a2a_endpoint"] == "https://example.com/.well-known/agent-identity.json" + + +def test_register_then_resolve(): + store = LocalAgentCardStore() + store.register(_signed_card()) + assert len(store) == 1 + doc = store.resolve_discovery("example.com") + assert doc["entity"] == "example.com" + assert doc["agents"][0]["name"] == "Tester" + + +def test_register_propagates_signature_failure(): + card = _signed_card() + card["url"] = "https://attacker.example/agent" # tampered + store = LocalAgentCardStore() + with pytest.raises(Exception): + store.register(card) + assert store.is_empty() + + +def test_resolve_discovery_missing_raises(): + store = LocalAgentCardStore() + with pytest.raises(Exception): + store.resolve_discovery("missing.com") + + +def test_re_register_replaces_prior_entry(): + store = LocalAgentCardStore() + store.register(_signed_card()) + store.register(_signed_card()) + assert len(store) == 1 + + +def test_remove_drops_entry(): + store = LocalAgentCardStore() + store.register(_signed_card()) + assert store.remove("example.com") is True + assert store.is_empty() + assert store.remove("example.com") is False + + +def test_resolve_card_returns_original(): + store = LocalAgentCardStore() + store.register(_signed_card()) + card = store.resolve_card("example.com") + assert card["name"] == "Tester" + + +def test_resolve_revocation_returns_none(): + store = LocalAgentCardStore() + store.register(_signed_card()) + doc = store.resolve_discovery("example.com") + assert store.resolve_revocation("example.com", doc) is None + + +def test_allowed_domains_propagate_into_derived_doc(): + store = LocalAgentCardStore() + store.register(_signed_card()) + doc = store.resolve_discovery("example.com") + assert doc["agents"][0]["constraints"]["allowed_domains"] == ["partner.com"] diff --git a/python/tests/test_rotation.py b/python/tests/test_rotation.py new file mode 100644 index 0000000..fa221b0 --- /dev/null +++ b/python/tests/test_rotation.py @@ -0,0 +1,57 @@ +"""Tests for key rotation helpers.""" + +from agentpin.crypto import generate_key_id, generate_key_pair +from agentpin.jwk import pem_to_jwk +from agentpin.revocation import build_revocation_document +from agentpin.rotation import apply_rotation, complete_rotation, prepare_rotation + + +class TestRotation: + def test_prepare_rotation(self): + _, pub = generate_key_pair() + old_kid = generate_key_id(pub) + plan = prepare_rotation(old_kid) + + assert plan["old_kid"] == old_kid + assert plan["new_kid"] != old_kid + assert plan["new_jwk"]["kid"] == plan["new_kid"] + assert len(plan["new_key_pair"]) == 2 # (private, public) + + def test_apply_rotation(self): + # Build a minimal discovery doc + _, pub = generate_key_pair() + old_kid = generate_key_id(pub) + old_jwk = pem_to_jwk(pub, old_kid) + doc = {"public_keys": [old_jwk], "updated_at": "old"} + + plan = prepare_rotation(old_kid) + apply_rotation(doc, plan) + + assert len(doc["public_keys"]) == 2 + kids = [k["kid"] for k in doc["public_keys"]] + assert old_kid in kids + assert plan["new_kid"] in kids + assert doc["updated_at"] != "old" + + def test_complete_rotation(self): + # Build docs + _, pub = generate_key_pair() + old_kid = generate_key_id(pub) + old_jwk = pem_to_jwk(pub, old_kid) + + plan = prepare_rotation(old_kid) + doc = {"public_keys": [old_jwk, plan["new_jwk"]], "updated_at": "old"} + rev_doc = build_revocation_document("example.com") + + complete_rotation(doc, rev_doc, old_kid, "key_compromise") + + # Old key removed from discovery + kids = [k["kid"] for k in doc["public_keys"]] + assert old_kid not in kids + assert plan["new_kid"] in kids + assert len(doc["public_keys"]) == 1 + + # Old key added to revocation + assert len(rev_doc["revoked_keys"]) == 1 + assert rev_doc["revoked_keys"][0]["kid"] == old_kid + assert rev_doc["revoked_keys"][0]["reason"] == "key_compromise" diff --git a/python/tests/test_transport.py b/python/tests/test_transport.py new file mode 100644 index 0000000..c68270d --- /dev/null +++ b/python/tests/test_transport.py @@ -0,0 +1,100 @@ +"""Tests for transport binding helpers.""" + +import json + +import pytest + +from agentpin.transport import ( + AUTH_TYPE, + FIELD_NAME, + GRPC_METADATA_KEY, + grpc_extract_credential, + grpc_format_metadata_value, + http_extract_credential, + http_format_authorization_header, + mcp_extract_credential, + mcp_format_meta_field, + ws_extract_credential, + ws_format_auth_message, +) +from agentpin.types import AgentPinError + + +class TestHttpTransport: + def test_extract_valid(self): + assert http_extract_credential("AgentPin eyJ.test.jwt") == "eyJ.test.jwt" + + def test_extract_missing_prefix(self): + with pytest.raises(AgentPinError, match="Missing 'AgentPin ' prefix"): + http_extract_credential("Bearer eyJ.test.jwt") + + def test_extract_empty_credential(self): + with pytest.raises(AgentPinError, match="Empty credential"): + http_extract_credential("AgentPin ") + + def test_format_roundtrip(self): + jwt = "eyJ.test.jwt" + header = http_format_authorization_header(jwt) + assert header == "AgentPin eyJ.test.jwt" + assert http_extract_credential(header) == jwt + + +class TestMcpTransport: + def test_extract_valid(self): + meta = {FIELD_NAME: "eyJ.test.jwt"} + assert mcp_extract_credential(meta) == "eyJ.test.jwt" + + def test_extract_missing_field(self): + with pytest.raises(AgentPinError, match="Missing"): + mcp_extract_credential({}) + + def test_extract_non_string(self): + with pytest.raises(AgentPinError, match="not a string"): + mcp_extract_credential({FIELD_NAME: 42}) + + def test_format_roundtrip(self): + jwt = "eyJ.test.jwt" + meta = mcp_format_meta_field(jwt) + assert mcp_extract_credential(meta) == jwt + + +class TestWsTransport: + def test_extract_valid(self): + msg = json.dumps({"type": AUTH_TYPE, "credential": "eyJ.test.jwt"}) + assert ws_extract_credential(msg) == "eyJ.test.jwt" + + def test_extract_invalid_json(self): + with pytest.raises(AgentPinError, match="Invalid JSON"): + ws_extract_credential("not json") + + def test_extract_wrong_type(self): + msg = json.dumps({"type": "other", "credential": "eyJ.test.jwt"}) + with pytest.raises(AgentPinError, match="Expected type"): + ws_extract_credential(msg) + + def test_extract_missing_credential(self): + msg = json.dumps({"type": AUTH_TYPE}) + with pytest.raises(AgentPinError, match="Missing or non-string"): + ws_extract_credential(msg) + + def test_format_roundtrip(self): + jwt = "eyJ.test.jwt" + msg = ws_format_auth_message(jwt) + assert ws_extract_credential(msg) == jwt + + +class TestGrpcTransport: + def test_extract_valid(self): + assert grpc_extract_credential("eyJ.test.jwt") == "eyJ.test.jwt" + + def test_extract_empty(self): + with pytest.raises(AgentPinError, match="Empty gRPC metadata"): + grpc_extract_credential("") + + def test_format_roundtrip(self): + jwt = "eyJ.test.jwt" + value = grpc_format_metadata_value(jwt) + assert grpc_extract_credential(value) == jwt + + def test_metadata_key_name(self): + assert GRPC_METADATA_KEY == "agentpin-credential"