Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 0 additions & 35 deletions .claude/settings.local.json

This file was deleted.

5 changes: 3 additions & 2 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,9 @@ LITELLM_FALLBACK_MODEL= # Optional fallback model (empty = no fall
MAX_CONCURRENT_REVIEWS=5
RATE_LIMIT_RPM=60

# Storage (SQLite by default; replace with PostgreSQL URL for production)
DATABASE_URL=sqlite+aiosqlite:////data/d1ff.db
# PostgreSQL (default connects to docker-compose postgres service)
# For external DB: DATABASE_URL=postgresql+asyncpg://user:pass@host:5432/dbname
DATABASE_URL=postgresql+asyncpg://d1ff:d1ff_local@postgres:5432/d1ff

# Observability
LOG_LEVEL=INFO # Options: DEBUG, INFO, WARNING, ERROR
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,5 @@ jobs:
with:
python-version: "3.12"
- run: uv sync
- run: uv run ruff check .
- run: uv run mypy src/
- run: uv run pytest
7 changes: 1 addition & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,4 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.4
hooks:
- id: ruff
args: [--fix]
- id: ruff-format
- repo: local
hooks:
- id: mypy
Expand All @@ -14,6 +8,7 @@ repos:
types: [python]
pass_filenames: false
args: [src/]
stages: [pre-push]
- id: pytest
name: pytest
entry: uv run pytest
Expand Down
5 changes: 4 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,10 @@ COPY --from=backend-builder /app/prompts /app/prompts
# Copy compiled React SPA from frontend-builder
COPY --from=frontend-builder /frontend/dist /app/static

# Create volume mount point for SQLite
# Copy Alembic config and migrations
COPY alembic.ini ./
COPY alembic/ ./alembic/

RUN mkdir -p /data

ENV PORT=8000
Expand Down
35 changes: 35 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
[alembic]
script_location = alembic

[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
45 changes: 45 additions & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
"""Alembic environment configuration for async PostgreSQL."""

import asyncio
from logging.config import fileConfig

from sqlalchemy import pool
from sqlalchemy.ext.asyncio import create_async_engine

from alembic import context
from d1ff.storage.schema import metadata

config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)

target_metadata = metadata


def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode — generates SQL script."""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()


def do_run_migrations(connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()


async def run_migrations_online() -> None:
"""Run migrations in 'online' mode — connects to database via asyncpg."""
url = config.get_main_option("sqlalchemy.url")
connectable = create_async_engine(url, poolclass=pool.NullPool)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()


if context.is_offline_mode():
run_migrations_offline()
else:
asyncio.run(run_migrations_online())
25 changes: 25 additions & 0 deletions alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}


def upgrade() -> None:
${upgrades if upgrades else "pass"}


def downgrade() -> None:
${downgrades if downgrades else "pass"}
138 changes: 138 additions & 0 deletions alembic/versions/001_initial_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
"""Initial schema with all tables and pgvector extension.

Revision ID: 001
Revises:
Create Date: 2026-03-29
"""

from collections.abc import Sequence

import sqlalchemy as sa

from alembic import op

revision: str = "001"
down_revision: str | None = None
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None


def upgrade() -> None:
op.execute("CREATE EXTENSION IF NOT EXISTS vector")

op.create_table(
"installations",
sa.Column("installation_id", sa.Integer, primary_key=True, autoincrement=False),
sa.Column("account_login", sa.Text, nullable=False),
sa.Column("account_type", sa.Text, nullable=False),
sa.Column("suspended", sa.Boolean, nullable=False, server_default="false"),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
)

op.create_table(
"api_keys",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column(
"installation_id",
sa.Integer,
sa.ForeignKey("installations.installation_id"),
nullable=False,
),
sa.Column("provider", sa.Text, nullable=False),
sa.Column("model", sa.Text, nullable=False),
sa.Column("encrypted_key", sa.Text, nullable=False),
sa.Column("custom_endpoint", sa.Text),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.UniqueConstraint("installation_id", "provider"),
)

op.create_table(
"repositories",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=False),
sa.Column(
"installation_id",
sa.Integer,
sa.ForeignKey("installations.installation_id", ondelete="CASCADE"),
nullable=False,
),
sa.Column("repo_name", sa.Text, nullable=False),
sa.Column("full_name", sa.Text, nullable=False),
sa.Column("private", sa.Boolean, nullable=False, server_default="false"),
sa.Column(
"created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()
),
)

op.create_table(
"pr_states",
sa.Column("installation_id", sa.Integer, nullable=False),
sa.Column("repo_full_name", sa.Text, nullable=False),
sa.Column("pr_number", sa.Integer, nullable=False),
sa.Column("state", sa.Text, nullable=False, server_default="active"),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("installation_id", "repo_full_name", "pr_number"),
)

op.create_table(
"feedback_reactions",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("comment_id", sa.Integer, nullable=False),
sa.Column("reaction_type", sa.Text, nullable=False),
sa.Column("installation_id", sa.Integer, nullable=False),
sa.Column("pr_number", sa.Integer, nullable=False),
sa.Column("repo_full_name", sa.Text, nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
)

op.create_table(
"users",
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
sa.Column("github_id", sa.Integer, unique=True, nullable=False),
sa.Column("login", sa.Text, nullable=False),
sa.Column("email", sa.Text),
sa.Column("avatar_url", sa.Text),
sa.Column("encrypted_token", sa.Text, nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
)

op.create_table(
"user_installations",
sa.Column(
"user_id", sa.Integer, sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False
),
sa.Column(
"installation_id",
sa.Integer,
sa.ForeignKey("installations.installation_id", ondelete="CASCADE"),
nullable=False,
),
sa.PrimaryKeyConstraint("user_id", "installation_id"),
)

op.create_table(
"user_global_settings",
sa.Column(
"user_id", sa.Integer, sa.ForeignKey("users.id", ondelete="CASCADE"), primary_key=True
),
sa.Column("provider", sa.Text, nullable=False),
sa.Column("model", sa.Text, nullable=False),
sa.Column("encrypted_api_key", sa.Text, nullable=False),
sa.Column("custom_endpoint", sa.Text),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
)


def downgrade() -> None:
op.drop_table("user_global_settings")
op.drop_table("user_installations")
op.drop_table("users")
op.drop_table("feedback_reactions")
op.drop_table("pr_states")
op.drop_table("repositories")
op.drop_table("api_keys")
op.drop_table("installations")
op.execute("DROP EXTENSION IF EXISTS vector")
Empty file added alembic/versions/__init__.py
Empty file.
39 changes: 20 additions & 19 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,34 +3,35 @@
# 1. Build the image:
# docker build -t d1ff .
#
# 2. Run the container (requires a valid .env file with required vars set):
# docker run -v d1ff-data:/data -p 8000:8000 --env-file .env d1ff
#
# For Docker-infra-only smoke testing (stub values to bypass fail-fast):
# docker run -v d1ff-data:/data -p 8000:8000 \
# -e GITHUB_APP_ID=1 \
# -e GITHUB_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEA0Z3VS5JJcds3xHn/ygWep4PAtEsHAcMCRCTxHFWkWgvMbOar\n-----END RSA PRIVATE KEY-----" \
# -e GITHUB_WEBHOOK_SECRET=test \
# -e ENCRYPTION_KEY=$(python -c "from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())") \
# d1ff
# 2. Run the stack:
# docker compose up -d
#
# 3. Verify health endpoint responds:
# curl http://localhost:8000/health
# Expected: JSON with subsystem statuses, HTTP 200
#
# 4. Verify Docker healthcheck status (after start_period of 15s):
# docker inspect <container_id> --format='{{.State.Health.Status}}'
# Expected: "healthy"

services:
postgres:
image: pgvector/pgvector:pg17
volumes:
- postgres-data:/var/lib/postgresql/data
environment:
POSTGRES_DB: d1ff
POSTGRES_USER: d1ff
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-d1ff_local}
healthcheck:
test: ["CMD-SHELL", "pg_isready -U d1ff"]
interval: 5s
timeout: 3s
retries: 5

d1ff:
build: .
# Or use a pre-built image:
# image: ghcr.io/d1ff-dev/d1ff:latest
ports:
- "8000:8000"
volumes:
- d1ff-data:/data
depends_on:
postgres:
condition: service_healthy
env_file:
- .env
restart: unless-stopped
Expand All @@ -42,4 +43,4 @@ services:
start_period: 15s

volumes:
d1ff-data:
postgres-data:
Loading
Loading