diff --git a/README.md b/README.md index 52c6662..512c483 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,3 @@ -请在此处填写插件使用说明和您的联系方式 +## S3 -如果插件需要付费,请提供付费相关说明 - -如有配套前端插件,请添加前端插件仓库链接说明 - -插件开发文档:[fba plugin dev](https://fastapi-practices.github.io/fastapi_best_architecture_docs/plugin/dev.html) +此插件提供了兼容 S3 协议的存储能力 diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/router.py b/api/router.py new file mode 100644 index 0000000..75b2000 --- /dev/null +++ b/api/router.py @@ -0,0 +1,10 @@ +from fastapi import APIRouter + +from backend.core.conf import settings +from backend.plugin.s3.api.v1.file import router as file_router +from backend.plugin.s3.api.v1.storage import router as business_router + +v1 = APIRouter(prefix=f'{settings.FASTAPI_API_V1_PATH}/s3', tags=['S3']) + +v1.include_router(business_router, prefix='/storages') +v1.include_router(file_router, prefix='/files') diff --git a/api/v1/__init__.py b/api/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/v1/file.py b/api/v1/file.py new file mode 100644 index 0000000..a9a1b80 --- /dev/null +++ b/api/v1/file.py @@ -0,0 +1,43 @@ +from typing import Annotated + +from fastapi import APIRouter, Depends, File, Query, UploadFile + +from backend.common.dataclasses import UploadUrl +from backend.common.exception import errors +from backend.common.response.response_schema import ResponseSchemaModel, response_base +from backend.common.security.permission import RequestPermission +from backend.common.security.rbac import DependsRBAC +from backend.database.db import CurrentSession +from backend.plugin.s3.crud.storage import s3_storage_dao +from backend.plugin.s3.utils.file_ops import write_file +from backend.utils.file_ops import upload_file_verify + +router = APIRouter() + + +@router.post( + '/upload', + summary='S3 文件上传', + dependencies=[ + Depends(RequestPermission('s3:file:upload')), + DependsRBAC, + ], +) +async def upload_s3_files( + db: CurrentSession, file: Annotated[UploadFile, File()], storage: Annotated[int, Query(description='S3 存储 ID')] +) -> ResponseSchemaModel[UploadUrl]: + s3_storage = await s3_storage_dao.get(db, storage) + if not s3_storage: + raise errors.NotFoundError(msg='S3 存储不存在') + upload_file_verify(file) + await write_file(s3_storage, file) + + bucket_path = f'/{s3_storage.bucket}' + if s3_storage.prefix: + prefix = s3_storage.prefix if s3_storage.prefix.startswith('/') else f'/{s3_storage.prefix}' + root = f'{bucket_path}{prefix}' + else: + root = bucket_path + + url = f'{root.rstrip("/")}/{file.filename}' + return response_base.success(data={'url': url}) diff --git a/api/v1/storage.py b/api/v1/storage.py new file mode 100644 index 0000000..a760904 --- /dev/null +++ b/api/v1/storage.py @@ -0,0 +1,96 @@ +from typing import Annotated + +from fastapi import APIRouter, Depends, Path +from fastapi.params import Query + +from backend.common.pagination import DependsPagination, PageData +from backend.common.response.response_schema import ResponseModel, ResponseSchemaModel, response_base +from backend.common.security.jwt import DependsJwtAuth +from backend.common.security.permission import RequestPermission +from backend.common.security.rbac import DependsRBAC +from backend.database.db import CurrentSession, CurrentSessionTransaction +from backend.plugin.s3.schema.storage import ( + CreateS3StorageParam, + DeleteS3StorageParam, + GetS3StorageDetail, + UpdateS3StorageParam, +) +from backend.plugin.s3.service.storage import s3_storage_service + +router = APIRouter() + + +@router.get('/all', summary='获取所有 S3 存储详情', dependencies=[DependsJwtAuth]) +async def get_all_s3_storages(db: CurrentSession) -> ResponseSchemaModel[list[GetS3StorageDetail]]: + s3_storage = await s3_storage_service.get_all(db=db) + return response_base.success(data=s3_storage) + + +@router.get('/{pk}', summary='获取 S3 存储详情', dependencies=[DependsJwtAuth]) +async def get_s3_storage( + db: CurrentSession, pk: Annotated[int, Path(description='S3 存储 ID')] +) -> ResponseSchemaModel[GetS3StorageDetail]: + s3_storage = await s3_storage_service.get(db=db, pk=pk) + return response_base.success(data=s3_storage) + + +@router.get( + '', + summary='分页获取所有 S3 存储', + dependencies=[ + DependsJwtAuth, + DependsPagination, + ], +) +async def get_s3_storages_paginated( + db: CurrentSession, + name: Annotated[str | None, Query(description='存储名称')] = None, + region: Annotated[str | None, Query(description='区域')] = None, +) -> ResponseSchemaModel[PageData[GetS3StorageDetail]]: + page_data = await s3_storage_service.get_list(db=db, name=name, region=region) + return response_base.success(data=page_data) + + +@router.post( + '', + summary='创建 S3 存储', + dependencies=[ + Depends(RequestPermission('s3:storage:add')), + DependsRBAC, + ], +) +async def create_s3_storage(db: CurrentSessionTransaction, obj: CreateS3StorageParam) -> ResponseModel: + await s3_storage_service.create(db=db, obj=obj) + return response_base.success() + + +@router.put( + '/{pk}', + summary='更新 S3 存储', + dependencies=[ + Depends(RequestPermission('s3:storage:edit')), + DependsRBAC, + ], +) +async def update_s3_storage( + db: CurrentSessionTransaction, pk: Annotated[int, Path(description='S3 存储 ID')], obj: UpdateS3StorageParam +) -> ResponseModel: + count = await s3_storage_service.update(db=db, pk=pk, obj=obj) + if count > 0: + return response_base.success() + return response_base.fail() + + +@router.delete( + '', + summary='批量删除 S3 存储', + dependencies=[ + Depends(RequestPermission('s3:storage:del')), + DependsRBAC, + ], +) +async def delete_s3_storages(db: CurrentSessionTransaction, obj: DeleteS3StorageParam) -> ResponseModel: + count = await s3_storage_service.delete(db=db, obj=obj) + if count > 0: + return response_base.success() + return response_base.fail() diff --git a/crud/__init__.py b/crud/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/crud/storage.py b/crud/storage.py new file mode 100644 index 0000000..da7ca74 --- /dev/null +++ b/crud/storage.py @@ -0,0 +1,80 @@ +from collections.abc import Sequence + +from sqlalchemy import Select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy_crud_plus import CRUDPlus + +from backend.plugin.s3.model import S3Storage +from backend.plugin.s3.schema.storage import CreateS3StorageParam, UpdateS3StorageParam + + +class CRUDS3Storage(CRUDPlus[S3Storage]): + async def get(self, db: AsyncSession, pk: int) -> S3Storage | None: + """ + 获取 S3 存储 + + :param db: 数据库会话 + :param pk: S3 存储 ID + :return: + """ + return await self.select_model(db, pk) + + async def get_select(self, name: str | None, region: str | None) -> Select: + """ + 获取 S3 存储列表查询表达式 + + :param name: 存储名称 + :param region: 区域 + :return: + """ + filters = {} + + if name is not None: + filters['name__like'] = f'%{name}%' + if region is not None: + filters['region'] = region + + return await self.select_order('id', 'desc', **filters) + + async def get_all(self, db: AsyncSession) -> Sequence[S3Storage]: + """ + 获取所有 S3 存储 + + :param db: 数据库会话 + :return: + """ + return await self.select_models(db) + + async def create(self, db: AsyncSession, obj: CreateS3StorageParam) -> None: + """ + 创建 S3 存储 + + :param db: 数据库会话 + :param obj: 创建S3 存储参数 + :return: + """ + await self.create_model(db, obj) + + async def update(self, db: AsyncSession, pk: int, obj: UpdateS3StorageParam) -> int: + """ + 更新 S3 存储 + + :param db: 数据库会话 + :param pk: S3 存储 ID + :param obj: 更新 S3 存储参数 + :return: + """ + return await self.update_model(db, pk, obj) + + async def delete(self, db: AsyncSession, pks: list[int]) -> int: + """ + 批量删除 S3 存储 + + :param db: 数据库会话 + :param pks: S3 存储 ID 列表 + :return: + """ + return await self.delete_model_by_column(db, allow_multiple=True, id__in=pks) + + +s3_storage_dao: CRUDS3Storage = CRUDS3Storage(S3Storage) diff --git a/model/__init__.py b/model/__init__.py new file mode 100644 index 0000000..878aecb --- /dev/null +++ b/model/__init__.py @@ -0,0 +1 @@ +from backend.plugin.s3.model.storage import S3Storage as S3Storage diff --git a/model/storage.py b/model/storage.py new file mode 100644 index 0000000..bae2599 --- /dev/null +++ b/model/storage.py @@ -0,0 +1,21 @@ +import sqlalchemy as sa + +from sqlalchemy.orm import Mapped, mapped_column + +from backend.common.model import Base, UniversalText, id_key + + +class S3Storage(Base): + """S3 存储""" + + __tablename__ = 's3_storage' + + id: Mapped[id_key] = mapped_column(init=False) + name: Mapped[str] = mapped_column(sa.String(64), default='', comment='存储名称') + endpoint: Mapped[str] = mapped_column(sa.String(512), default='', comment='终端节点') + access_key: Mapped[str] = mapped_column(sa.String(512), default='', comment='访问密钥') + secret_key: Mapped[str] = mapped_column(sa.String(512), default='', comment='密钥') + bucket: Mapped[str] = mapped_column(sa.String(64), default='', comment='存储桶') + prefix: Mapped[str | None] = mapped_column(sa.String(256), default=None, comment='前缀') + region: Mapped[str | None] = mapped_column(sa.String(64), default=None, comment='区域') + remark: Mapped[str | None] = mapped_column(UniversalText, default=None, comment='备注') diff --git a/plugin.toml b/plugin.toml index e69de29..84265eb 100644 --- a/plugin.toml +++ b/plugin.toml @@ -0,0 +1,8 @@ +[plugin] +summary = 'S3' +version = '0.0.1' +description = '提供兼容 S3 协议的对象存储能力' +author = 'wu-clan' + +[app] +router = ['v1'] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..f932435 --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +opendal>=0.46.0 diff --git a/schema/__init__.py b/schema/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/schema/storage.py b/schema/storage.py new file mode 100644 index 0000000..579d410 --- /dev/null +++ b/schema/storage.py @@ -0,0 +1,42 @@ +from datetime import datetime + +from pydantic import ConfigDict, Field + +from backend.common.schema import SchemaBase + + +class S3StorageSchemaBase(SchemaBase): + """S3 存储基础模型""" + + name: str = Field(description='存储名称') + endpoint: str = Field(description='终端节点') + access_key: str = Field(description='访问密钥') + secret_key: str = Field(description='密钥') + bucket: str = Field(description='存储桶') + prefix: str | None = Field(None, description='前缀') + region: str | None = Field(None, description='区域') + remark: str | None = Field(None, description='备注') + + +class CreateS3StorageParam(S3StorageSchemaBase): + """创建 S3 存储参数""" + + +class UpdateS3StorageParam(S3StorageSchemaBase): + """更新 S3 存储参数""" + + +class DeleteS3StorageParam(SchemaBase): + """删除 S3 存储参数""" + + pks: list[int] = Field(description='S3 存储 ID 列表') + + +class GetS3StorageDetail(S3StorageSchemaBase): + """S3 存储详情""" + + model_config = ConfigDict(from_attributes=True) + + id: int = Field(description='S3 存储 ID') + created_time: datetime = Field(description='创建时间') + updated_time: datetime | None = Field(None, description='更新时间') diff --git a/service/__init__.py b/service/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/service/storage.py b/service/storage.py new file mode 100644 index 0000000..cb79a99 --- /dev/null +++ b/service/storage.py @@ -0,0 +1,89 @@ +from collections.abc import Sequence +from typing import Any + +from sqlalchemy.ext.asyncio import AsyncSession + +from backend.common.exception import errors +from backend.common.pagination import paging_data +from backend.plugin.s3.crud.storage import s3_storage_dao +from backend.plugin.s3.model import S3Storage +from backend.plugin.s3.schema.storage import CreateS3StorageParam, DeleteS3StorageParam, UpdateS3StorageParam + + +class S3StorageService: + @staticmethod + async def get(*, db: AsyncSession, pk: int) -> S3Storage: + """ + 获取 S3 存储 + + :param db: 数据库会话 + :param pk: S3 存储 ID + :return: + """ + s3_storage = await s3_storage_dao.get(db, pk) + if not s3_storage: + raise errors.NotFoundError(msg='S3 存储不存在') + return s3_storage + + @staticmethod + async def get_list(db: AsyncSession, name: str | None, region: str | None) -> dict[str, Any]: + """ + 获取 S3 存储列表 + + :param db: 数据库会话 + :param name: 存储名称 + :param region: 区域 + :return: + """ + s3_storage_select = await s3_storage_dao.get_select(name, region) + return await paging_data(db, s3_storage_select) + + @staticmethod + async def get_all(*, db: AsyncSession) -> Sequence[S3Storage]: + """ + 获取所有 S3 存储 + + :param db: 数据库会话 + :return: + """ + s3_storages = await s3_storage_dao.get_all(db) + return s3_storages + + @staticmethod + async def create(*, db: AsyncSession, obj: CreateS3StorageParam) -> None: + """ + 创建 S3 存储 + + :param db: 数据库会话 + :param obj: 创建S3 存储参数 + :return: + """ + await s3_storage_dao.create(db, obj) + + @staticmethod + async def update(*, db: AsyncSession, pk: int, obj: UpdateS3StorageParam) -> int: + """ + 更新 S3 存储 + + :param db: 数据库会话 + :param pk: S3 存储 ID + :param obj: 更新S3 存储参数 + :return: + """ + count = await s3_storage_dao.update(db, pk, obj) + return count + + @staticmethod + async def delete(*, db: AsyncSession, obj: DeleteS3StorageParam) -> int: + """ + 删除 S3 存储 + + :param db: 数据库会话 + :param obj: S3 存储 ID 列表 + :return: + """ + count = await s3_storage_dao.delete(db, obj.pks) + return count + + +s3_storage_service: S3StorageService = S3StorageService() diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/file_ops.py b/utils/file_ops.py new file mode 100644 index 0000000..f05c4b2 --- /dev/null +++ b/utils/file_ops.py @@ -0,0 +1,49 @@ +from fastapi import UploadFile +from opendal import AsyncOperator + +from backend.plugin.s3.model import S3Storage + + +def get_operator( + endpoint: str, access_key: str, secret_key: str, bucket: str, prefix: str, region: str +) -> AsyncOperator: + """ + 获取操作 + + :param endpoint: 终端节点 + :param access_key: 访问密钥 + :param secret_key: 密钥 + :param bucket: 存储桶 + :param prefix: 前缀 + :param region: 区域 + :return: + """ + return AsyncOperator( + 's3', + endpoint=endpoint, + access_key_id=access_key, + secret_access_key=secret_key, + bucket=bucket, + root=prefix, + region=region, + ) + + +async def write_file(s3_storage: S3Storage, file: UploadFile) -> None: + """ + 写入文件 + + :param s3_storage: S3 存储 + :param file: 上传文件 + :return: + """ + op = get_operator( + s3_storage.endpoint, + s3_storage.access_key, + s3_storage.secret_key, + s3_storage.bucket, + s3_storage.prefix or '/', + s3_storage.region or 'any', + ) + contents = await file.read() + await op.write(file.filename, contents)