Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ COPY api/ ./

ENV PATH="/api/.venv/bin:$PATH"

# 拷贝数据目录(确保 sqlite 文件目录存在) 在容器中的数据保存目录为 /api/data
# 安装后端依赖
RUN mkdir -p data && pip install uv && uv sync

Expand Down
1 change: 0 additions & 1 deletion api/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,4 @@ SECRET_KEY=change-me-generate-a-secure-random-string
ACCESS_TOKEN_EXPIRE_MINUTES=60
REFRESH_TOKEN_EXPIRE_DAYS=7
JWT_ALGORITHM=HS256
# DATABASE_URL 非必填,如果未配置 DATABASE_URL,默认使用 sqlite
# DATABASE_URL=postgres://postgres:password@localhost:5432/postgres
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def upgrade() -> None:
)

# 删除 users 表的 role 列
# PostgreSQL 支持直接删除列,SQLite 需要使用 batch 模式
with op.batch_alter_table("users", schema=None) as batch_op:
batch_op.drop_column("role")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ def upgrade() -> None:

def downgrade() -> None:
"""删除存储后端配置表"""
# 使用 batch mode 来支持 SQLite
with op.batch_alter_table("files", schema=None) as batch_op:
batch_op.drop_constraint("fk_files_storage_backend_id", type_="foreignkey")
batch_op.drop_column("storage_backend_id")
Expand Down
83 changes: 35 additions & 48 deletions api/app/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,62 +15,49 @@

load_dotenv()

# 默认使用异步 sqlite 驱动 aiosqlite
_DEFAULT_SQLITE = "sqlite+aiosqlite:///./data/app.sqlite"

# 支持通过环境变量 `DATABASE_URL` 切换为 Postgres(推荐带 asyncpg 驱动)
# 如果用户提供常见的 postgres URI(postgres:// 或 postgresql://),
# 会自动把 scheme 转换为 `postgresql+asyncpg://` 以使用 asyncpg
raw_db_url = os.getenv("DATABASE_URL", "").strip()
connect_args = {}

if raw_db_url:
# 如果是 sqlite,直接使用,不进行后续的 URL 重组(避免 urlunparse 丢失 /// 问题)
if raw_db_url.startswith("sqlite"):
DATABASE_URL = raw_db_url
else:
if raw_db_url.startswith("postgres://"):
raw_db_url = raw_db_url.replace("postgres://", "postgresql+asyncpg://", 1)
elif raw_db_url.startswith("postgresql://"):
raw_db_url = raw_db_url.replace("postgresql://", "postgresql+asyncpg://", 1)

# 解析 URL 处理 asyncpg 不支持的参数
parsed = urlparse(raw_db_url)
query_params = parse_qs(parsed.query)

# 处理 sslmode
if "sslmode" in query_params:
ssl_mode = query_params.pop("sslmode")[0]
if ssl_mode == "require":
connect_args["ssl"] = "require"
elif ssl_mode == "disable":
connect_args["ssl"] = False

# 处理 channel_binding (asyncpg 不支持此参数作为 kwarg,移除以避免报错)
if "channel_binding" in query_params:
query_params.pop("channel_binding")

# 重组 URL
new_query = urlencode(query_params, doseq=True)
parsed = parsed._replace(query=new_query)
DATABASE_URL = urlunparse(parsed)
else:
DATABASE_URL = _DEFAULT_SQLITE

# 对 sqlite 使用特定 connect_args(aiosqlite 的 check_same_thread)
if DATABASE_URL.startswith("sqlite"):
engine = create_async_engine(
DATABASE_URL, connect_args={"check_same_thread": False}
)
if raw_db_url.startswith("postgres://"):
raw_db_url = raw_db_url.replace("postgres://", "postgresql+asyncpg://", 1)
elif raw_db_url.startswith("postgresql://"):
raw_db_url = raw_db_url.replace("postgresql://", "postgresql+asyncpg://", 1)

# 解析 URL 处理 asyncpg 不支持的参数
parsed = urlparse(raw_db_url)
query_params = parse_qs(parsed.query)

# 处理 sslmode
if "sslmode" in query_params:
ssl_mode = query_params.pop("sslmode")[0]
if ssl_mode == "require":
connect_args["ssl"] = "require"
elif ssl_mode == "disable":
connect_args["ssl"] = False

# 处理 channel_binding (asyncpg 不支持此参数作为 kwarg,移除以避免报错)
if "channel_binding" in query_params:
query_params.pop("channel_binding")

# 重组 URL
new_query = urlencode(query_params, doseq=True)
parsed = parsed._replace(query=new_query)
DATABASE_URL = urlunparse(parsed)
else:
engine = create_async_engine(
DATABASE_URL,
connect_args=connect_args,
pool_size=20, # 增加连接池大小
max_overflow=40, # 允许超出连接池的额外连接数
pool_pre_ping=True, # 连接前ping确保连接有效
pool_recycle=3600, # 1小时后回收连接
)
raise ValueError("DATABASE_URL environment variable is required")

engine = create_async_engine(
DATABASE_URL,
connect_args=connect_args,
pool_size=20, # 增加连接池大小
max_overflow=40, # 允许超出连接池的额外连接数
pool_pre_ping=True, # 连接前ping确保连接有效
pool_recycle=3600, # 1小时后回收连接
)

async_session_maker = async_sessionmaker(engine, expire_on_commit=False)

Expand Down
1 change: 0 additions & 1 deletion api/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"aiosqlite>=0.21.0",
"fastapi>=0.123.4",
"pydantic>=2.12.5",
"python-multipart>=0.0.20",
Expand Down
16 changes: 0 additions & 16 deletions api/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ services:
- ./api:/app
environment:
- SECRET_KEY=dev-secret-key-change-in-production
# - DATABASE_URL=sqlite+aiosqlite:///./data/app.db
- DATABASE_URL=postgresql+asyncpg://postgres:postgres@postgres:5432/archivenote
command: >
bash -c "pip install uv &&
Expand Down