Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .conductor.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@
"description": "Copy .env configuration from repository root",
"command": "python3 scripts/setup_conductor_env.py",
"runOnCreate": true
},
{
"name": "install-pre-commit",
"description": "Install pre-commit hooks for code quality checks",
"command": "pre-commit install",
"runOnCreate": true
}
]
},
Expand Down
9 changes: 9 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,15 @@ repos:
pass_filenames: false
args: [src/adcp]

# Security scanning with bandit
- repo: https://github.com/PyCQA/bandit
rev: 1.7.10
hooks:
- id: bandit
args: ["-c", "pyproject.toml"]
additional_dependencies: ["bandit[toml]"]
exclude: ^(tests/|scripts/)

# Basic file checks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
Expand Down
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@

### ⚠ BREAKING CHANGES

*
*

### Features

Expand Down
29 changes: 6 additions & 23 deletions docs/examples/testing_patterns.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,6 @@

from __future__ import annotations

import json
from pathlib import Path

import pytest

# ✅ CORRECT: Import from public API
Expand Down Expand Up @@ -185,16 +182,12 @@ async def test_buyer_discovers_products_for_coffee_campaign(self, mocker):
"property_tags": ["morning", "lifestyle"],
}
],
"pricing_options": [
{"model": "cpm_fixed_rate", "is_fixed": True, "cpm": 4.50}
],
"pricing_options": [{"model": "cpm_fixed_rate", "is_fixed": True, "cpm": 4.50}],
}
]
}

mock_result = TaskResult(
status=TaskStatus.COMPLETED, data=mock_response_data, success=True
)
mock_result = TaskResult(status=TaskStatus.COMPLETED, data=mock_response_data, success=True)

mocker.patch.object(client.adapter, "get_products", return_value=mock_result)

Expand Down Expand Up @@ -227,9 +220,7 @@ async def test_buyer_handles_no_products_available(self, mocker):
client = ADCPClient(config)

# Mock empty response
mock_result = TaskResult(
status=TaskStatus.COMPLETED, data={"products": []}, success=True
)
mock_result = TaskResult(status=TaskStatus.COMPLETED, data={"products": []}, success=True)

mocker.patch.object(client.adapter, "get_products", return_value=mock_result)

Expand Down Expand Up @@ -309,11 +300,7 @@ async def test_create_media_buy_handles_error_response(self, mocker):
# Mock error response
mock_result = TaskResult(
status=TaskStatus.COMPLETED,
data={
"errors": [
{"code": "budget_exceeded", "message": "Budget exceeds limit"}
]
},
data={"errors": [{"code": "budget_exceeded", "message": "Budget exceeds limit"}]},
success=True, # Note: Protocol success, but logical error
)

Expand Down Expand Up @@ -425,9 +412,7 @@ def test_anti_pattern_importing_generated_poc(self):
"property_ids": ["site1"],
}
],
"pricing_options": [
{"model": "cpm_fixed_rate", "is_fixed": True, "cpm": 5.0}
],
"pricing_options": [{"model": "cpm_fixed_rate", "is_fixed": True, "cpm": 5.0}],
}

product = Product.model_validate(product_json)
Expand Down Expand Up @@ -525,9 +510,7 @@ def sample_product_json():
"property_ids": ["homepage", "mobile_app"],
}
],
"pricing_options": [
{"model": "cpm_fixed_rate", "is_fixed": True, "cpm": 5.50}
],
"pricing_options": [{"model": "cpm_fixed_rate", "is_fixed": True, "cpm": 5.50}],
}


Expand Down
46 changes: 44 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ adcp = ["py.typed", "ADCP_VERSION"]
[tool.black]
line-length = 100
target-version = ["py310", "py311", "py312"]
extend-exclude = "/(_generated|tasks)\\.py$"
extend-exclude = "/(tasks)\\.py$"

[tool.ruff]
line-length = 100
Expand All @@ -79,7 +79,10 @@ extend-exclude = [

[tool.ruff.lint]
select = ["E", "F", "I", "N", "W", "UP"]
ignore = ["E402"] # Allow imports after module docstrings
ignore = [
"E402", # Allow imports after module docstrings
"UP038", # isinstance() doesn't support X | Y syntax, only type hints do
]

[tool.mypy]
python_version = "3.10"
Expand All @@ -98,6 +101,45 @@ ignore_errors = true
[tool.pytest.ini_options]
testpaths = ["tests"]
asyncio_mode = "auto"
markers = [
"unit: Unit tests that don't require external services",
"integration: Integration tests that hit real endpoints (may be slow/flaky)",
"slow: Tests that take significant time to run",
]
# By default, skip integration tests for fast local development
addopts = "-m 'not integration'"

[tool.coverage.run]
source = ["src/adcp"]
omit = [
"*/tests/*",
"*/test_*.py",
]
branch = true

[tool.coverage.report]
precision = 2
show_missing = true
skip_covered = false
exclude_lines = [
"pragma: no cover",
"def __repr__",
"raise AssertionError",
"raise NotImplementedError",
"if __name__ == .__main__.:",
"if TYPE_CHECKING:",
"class .*\\bProtocol\\):",
"@(abc\\.)?abstractmethod",
]
# Maintain current 86% coverage, fail if it drops below 80%
fail_under = 80

[tool.coverage.html]
directory = "htmlcov"

[tool.bandit]
exclude_dirs = ["tests", "scripts"]
skips = ["B101"] # Allow assert in code (we're not using -O optimization)

[dependency-groups]
dev = [
Expand Down
45 changes: 36 additions & 9 deletions scripts/consolidate_exports.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
from __future__ import annotations

import ast
import subprocess
import sys
from datetime import datetime, timezone
from pathlib import Path

Expand Down Expand Up @@ -60,12 +62,12 @@ def generate_consolidated_exports() -> str:

# Special handling for known collisions
# We need BOTH versions of these types available, so import them with qualified names
KNOWN_COLLISIONS = {
known_collisions = {
"Package": {"package", "create_media_buy_response"},
}

special_imports = []
collision_modules_seen: dict[str, set[str]] = {name: set() for name in KNOWN_COLLISIONS}
collision_modules_seen: dict[str, set[str]] = {name: set() for name in known_collisions}

for module_path in modules:
# Get relative path from generated_poc directory
Expand All @@ -85,7 +87,7 @@ def generate_consolidated_exports() -> str:
unique_exports = set()
for export_name in exports:
# Special case: Known collisions - track all modules that define them
if export_name in KNOWN_COLLISIONS and display_name in KNOWN_COLLISIONS[export_name]:
if export_name in known_collisions and display_name in known_collisions[export_name]:
collision_modules_seen[export_name].add(module_name)
export_to_module[export_name] = module_name # Track that we've seen it
continue # Don't add to unique_exports, we'll handle specially
Expand Down Expand Up @@ -123,7 +125,9 @@ def generate_consolidated_exports() -> str:
for module_name in sorted(modules_seen):
# Create qualified name from module path (e.g., "core.package" -> "Package")
parts = module_name.split(".")
qualified_name = f"_{type_name}From{parts[-1].replace('_', ' ').title().replace(' ', '')}"
qualified_name = (
f"_{type_name}From{parts[-1].replace('_', ' ').title().replace(' ', '')}"
)
special_imports.append(
f"from adcp.types.generated_poc.{module_name} import {type_name} as {qualified_name}"
)
Expand Down Expand Up @@ -161,7 +165,12 @@ def generate_consolidated_exports() -> str:

# Add special imports for name collisions
if special_imports:
lines.extend(["", "# Special imports for name collisions (qualified names for types defined in multiple modules)"])
lines.extend(
[
"",
"# Special imports for name collisions (qualified names for types defined in multiple modules)",
]
)
lines.extend(special_imports)

# Add backward compatibility aliases (only if source exists)
Expand All @@ -173,10 +182,12 @@ def generate_consolidated_exports() -> str:

alias_lines = []
if aliases:
alias_lines.extend([
"",
"# Backward compatibility aliases for renamed types",
])
alias_lines.extend(
[
"",
"# Backward compatibility aliases for renamed types",
]
)
for alias, target in aliases.items():
alias_lines.append(f"{alias} = {target}")

Expand Down Expand Up @@ -248,6 +259,22 @@ def main():
print(f"\nWriting {OUTPUT_FILE}...")
OUTPUT_FILE.write_text(content)

# Run black to format the generated file
print("Formatting with black...")
try:
result = subprocess.run(
[sys.executable, "-m", "black", str(OUTPUT_FILE), "--quiet"],
capture_output=True,
text=True,
check=False,
)
if result.returncode == 0:
print("✓ Formatted with black")
else:
print(f"⚠ Black formatting had issues: {result.stderr}")
except Exception as e:
print(f"⚠ Could not run black (not critical): {e}")

print("✓ Successfully generated consolidated exports")
export_count = len(
[
Expand Down
6 changes: 1 addition & 5 deletions scripts/generate_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,7 @@ def flatten_schemas():
# Recursively find all JSON schemas (including subdirectories)
schema_files = list(SCHEMAS_DIR.rglob("*.json"))
# Filter out .hashes.json and index.json
schema_files = [
f
for f in schema_files
if f.name not in (".hashes.json", "index.json")
]
schema_files = [f for f in schema_files if f.name not in (".hashes.json", "index.json")]

for schema_file in schema_files:
# Preserve directory structure relative to SCHEMAS_DIR
Expand Down
5 changes: 1 addition & 4 deletions scripts/post_generate_fixes.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@

from __future__ import annotations

import re
from pathlib import Path

REPO_ROOT = Path(__file__).parent.parent
Expand Down Expand Up @@ -133,8 +132,6 @@ def fix_enum_defaults():
print(" brand_manifest.py enum defaults fixed")




def fix_preview_creative_request_discriminator():
"""Add discriminator to PreviewCreativeRequest union.

Expand All @@ -160,7 +157,7 @@ def fix_preview_creative_request_discriminator():
# Add discriminator to the Field
content = content.replace(
"Field(\n description='Request to generate previews",
"Field(\n discriminator='request_type',\n description='Request to generate previews"
"Field(\n discriminator='request_type',\n description='Request to generate previews",
)

with open(preview_request_file, "w") as f:
Expand Down
8 changes: 4 additions & 4 deletions scripts/sync_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,18 +266,18 @@ def main():
index_hash = compute_hash(index_content)
updated_hashes[SCHEMA_INDEX_URL] = index_hash

print(f"Schema index retrieved\n")
print("Schema index retrieved\n")
except Exception as e:
print(f"Error: Could not fetch index.json from {SCHEMA_INDEX_URL}")
print(f"Details: {e}\n")
sys.exit(1)

# Discover all schemas from index
print(f"Discovering schemas from index...")
print("Discovering schemas from index...")
schema_urls = set(discover_schemas_from_index(index_schema))

print(f"Found {len(schema_urls)} schemas in index")
print(f"Checking for transitive dependencies...\n")
print("Checking for transitive dependencies...\n")

# Follow transitive dependencies
# Download schemas and check for additional refs
Expand All @@ -299,7 +299,7 @@ def main():
if ref_url not in processed and ref_url not in to_process:
to_process.append(ref_url)
schema_urls.add(ref_url)
except Exception as e:
except Exception:
# If we can't download, we'll catch it in the main download loop
pass

Expand Down
1 change: 1 addition & 0 deletions src/adcp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,7 @@ def get_adcp_version() -> str:
version_file = files("adcp") / "ADCP_VERSION"
return version_file.read_text().strip()


__all__ = [
# Version functions
"get_adcp_version",
Expand Down
Loading