Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""Article、ArticleDB表增加点击量

Revision ID: 023952869ee6
Revises: f89d896e9b57
Create Date: 2025-05-27 20:53:36.631307

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = '023952869ee6'
down_revision: Union[str, None] = 'f89d896e9b57'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('articleDB', sa.Column('clicks', sa.Integer(), nullable=False))
op.add_column('articles', sa.Column('clicks', sa.Integer(), nullable=False))
# ### end Alembic commands ###


def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('articles', 'clicks')
op.drop_column('articleDB', 'clicks')
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
"""Article表增加所属个人或组织

Revision ID: 56dcd6190dd0
Revises: 023952869ee6
Create Date: 2025-05-27 21:37:05.492437

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = '56dcd6190dd0'
down_revision: Union[str, None] = '023952869ee6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('articles', sa.Column('user_id', sa.Integer(), nullable=True))
op.add_column('articles', sa.Column('group_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'articles', 'groups', ['group_id'], ['id'])
op.create_foreign_key(None, 'articles', 'users', ['user_id'], ['id'])
# ### end Alembic commands ###


def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'articles', type_='foreignkey')
op.drop_constraint(None, 'articles', type_='foreignkey')
op.drop_column('articles', 'group_id')
op.drop_column('articles', 'user_id')
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
"""权限定义、删除申请表,Article表URL

Revision ID: 618f8bcbc41e
Revises: 56dcd6190dd0
Create Date: 2025-05-27 23:08:11.483163

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = '618f8bcbc41e'
down_revision: Union[str, None] = '56dcd6190dd0'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('delete_applications',
sa.Column('group_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('item_type', sa.Integer(), nullable=False),
sa.Column('item_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'item_type', 'item_id')
)
op.create_table('operate_permissions',
sa.Column('group_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('item_type', sa.Integer(), nullable=False),
sa.Column('item_id', sa.Integer(), nullable=False),
sa.Column('accessible', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('group_id', 'user_id', 'item_type', 'item_id')
)
op.add_column('articles', sa.Column('url', sa.String(length=200), nullable=False))
# ### end Alembic commands ###


def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('articles', 'url')
op.drop_table('operate_permissions')
op.drop_table('delete_applications')
# ### end Alembic commands ###
34 changes: 34 additions & 0 deletions alembic/versions/6a0b40746e6c_note表group_id.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""Note表group_id

Revision ID: 6a0b40746e6c
Revises: 618f8bcbc41e
Create Date: 2025-05-27 23:56:45.363419

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = '6a0b40746e6c'
down_revision: Union[str, None] = '618f8bcbc41e'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('notes', sa.Column('group_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'notes', 'groups', ['group_id'], ['id'])
# ### end Alembic commands ###


def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'notes', type_='foreignkey')
op.drop_column('notes', 'group_id')
# ### end Alembic commands ###
38 changes: 38 additions & 0 deletions alembic/versions/9256d579f585_组织管理数据库.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
"""组织管理数据库

Revision ID: 9256d579f585
Revises: 6a0b40746e6c
Create Date: 2025-05-28 23:35:01.332825

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = '9256d579f585'
down_revision: Union[str, None] = '6a0b40746e6c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('articles_ibfk_2', 'articles', type_='foreignkey')
op.create_foreign_key(None, 'articles', 'groups', ['group_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('notes_ibfk_4', 'notes', type_='foreignkey')
op.create_foreign_key(None, 'notes', 'groups', ['group_id'], ['id'], ondelete='CASCADE')
# ### end Alembic commands ###


def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'notes', type_='foreignkey')
op.create_foreign_key('notes_ibfk_4', 'notes', 'groups', ['group_id'], ['id'])
op.drop_constraint(None, 'articles', type_='foreignkey')
op.create_foreign_key('articles_ibfk_2', 'articles', 'groups', ['group_id'], ['id'])
# ### end Alembic commands ###
71 changes: 34 additions & 37 deletions app/api/v1/endpoints/article.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@
from typing import Optional, List
import os
import io
import uuid
from zipfile import ZipFile
import zipfile
import tempfile

from app.utils.get_db import get_db
from app.utils.auth import get_current_user
from app.curd.article import crud_upload_to_self_folder, crud_get_self_folders, crud_get_articles_in_folder, crud_self_create_folder, crud_self_article_to_recycle_bin, crud_self_folder_to_recycle_bin, crud_read_article, crud_import_self_folder, crud_export_self_folder,crud_create_tag, crud_delete_tag, crud_get_article_tags, crud_all_tags_order, crud_change_folder_name, crud_change_article_name, crud_article_statistic, crud_self_tree, crud_self_article_statistic, crud_items_in_recycle_bin, crud_delete_forever, crud_recover
from app.curd.article import crud_upload_to_self_folder, crud_get_self_folders, crud_get_articles_in_folder, crud_self_create_folder, crud_self_article_to_recycle_bin, crud_self_folder_to_recycle_bin, crud_annotate_self_article, crud_read_article, crud_read_article_by_url, crud_import_self_folder, crud_export_self_folder,crud_create_tag, crud_delete_tag, crud_get_article_tags, crud_all_tags_order, crud_change_folder_name, crud_change_article_name, crud_article_statistic, crud_self_tree, crud_self_article_statistic, crud_items_in_recycle_bin, crud_delete_forever, crud_recover
from app.schemas.article import SelfCreateFolder

router = APIRouter()
Expand All @@ -24,19 +25,19 @@ async def upload_to_self_folder(folder_id: int = Query(...), article: UploadFile
raise HTTPException(status_code=405, detail="File uploaded must be a PDF.")
await article.seek(0) # 重置文件指针位置

# 用文件名(不带扩展名)作为 Article 名称
name = os.path.splitext(article.filename)[0]

# 新建 Article 记录
article_id = await crud_upload_to_self_folder(name, folder_id, db)

# 存储到云存储位置
os.makedirs("/lhcos-data", exist_ok=True)
save_path = os.path.join("/lhcos-data", f"{article_id}.pdf")
with open(save_path, "wb") as f:
url = f"/lhcos-data/{uuid.uuid4()}.pdf"
with open(url, "wb") as f:
content = await article.read()
f.write(content)

# 用文件名(不带扩展名)作为 Article 名称
name = os.path.splitext(article.filename)[0]

# 新建 Article 记录
article_id = await crud_upload_to_self_folder(name, folder_id, url, db)

return {"msg": "Article created successfully.", "article_id": article_id}

@router.get("/getSelfFolders", response_model="dict")
Expand Down Expand Up @@ -84,26 +85,24 @@ async def self_folder_to_recycle_bin(folder_id: int = Query(...), db: AsyncSessi
return {"msg": "Folder is moved to recycle bin"}

@router.post("/annotateSelfArticle", response_model="dict")
async def annotate_self_article(article_id: int = Query(...), article: UploadFile = File(...)):
async def annotate_self_article(article_id: int = Query(...), article: UploadFile = File(...), db: AsyncSession = Depends(get_db)):
# 将新文件存储到云存储位置
os.makedirs("/lhcos-data", exist_ok=True)
save_path = os.path.join("/lhcos-data", f"{article_id}.pdf")
with open(save_path, "wb") as f:
url = await crud_annotate_self_article(article_id, db)
with open(url, "wb") as f:
content = await article.read()
f.write(content)

return {"msg": "Article annotated successfully."}

@router.get("/readArticle", response_class=FileResponse)
async def read_article(article_id: int = Query(...), db: AsyncSession = Depends(get_db)):
article_name, url = await crud_read_article(article_id, db)
return FileResponse(path=url, filename=f"{article_name}.pdf", media_type='application/pdf')

file_path = f"/lhcos-data/{article_id}.pdf"

# 查询文件名
article_name = await crud_read_article(article_id, db)

# 返回结果
return FileResponse(path=file_path, filename=f"{article_name}.pdf", media_type='application/pdf')
@router.get("/readArticleByUrl", response_model="dict")
async def read_article_by_url(article_id: int = Query(...), db: AsyncSession = Depends(get_db)):
url, update_time = await crud_read_article_by_url(article_id, db)
return {"article_url": url, "update_time": update_time}

@router.post("/importSelfFolder", response_model="dict")
async def import_self_folder(folder_name: str = Query(...), zip: UploadFile = File(...), db: AsyncSession = Depends(get_db), user: dict = Depends(get_current_user)):
Expand All @@ -118,34 +117,30 @@ async def import_self_folder(folder_name: str = Query(...), zip: UploadFile = Fi
zip_file = ZipFile(io.BytesIO(zip_bytes))
article_names = [os.path.splitext(os.path.basename(name))[0] for name in zip_file.namelist() if name.endswith('.pdf')]

# 记入数据库
result = await crud_import_self_folder(folder_name, article_names, user_id, db)

# 存储文献到云存储
urls = [f"/lhcos-data/{uuid.uuid4()}.pdf" for article_name in article_names]
os.makedirs("/lhcos-data", exist_ok=True)
for i in range(0, len(result), 2):
article_id = result[i]
article_name = result[i + 1]
pdf_filename_in_zip = f"{article_name}.pdf"
with zip_file.open(pdf_filename_in_zip) as source_file:
target_path = os.path.join("/lhcos-data", f"{article_id}.pdf")
for i in range(0, len(article_names)):
article_name_in_zip = f"{article_names[i]}.pdf"
with zip_file.open(article_name_in_zip) as source_file:
target_path = urls[i]
with open(target_path, "wb") as out_file:
out_file.write(source_file.read())

# 记入数据库
await crud_import_self_folder(folder_name, article_names, urls, user_id, db)
return {"msg": "Successfully import articles"}

@router.get("/exportSelfFolder", response_class=FileResponse)
async def export_self_folder(background_tasks: BackgroundTasks, folder_id: int = Query(...), db: AsyncSession = Depends(get_db)):
zip_name, article_ids, article_names = await crud_export_self_folder(folder_id, db)
zip_name, article_ids, article_names, article_urls = await crud_export_self_folder(folder_id, db)

tmp_dir = tempfile.gettempdir()
zip_path = os.path.join(tmp_dir, f"{zip_name}.zip")

with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
for article_id, article_name in zip(article_ids, article_names):
pdf_path = os.path.join("/lhcos-data", f"{article_id}.pdf")
arcname = f"{article_name}.pdf" # 压缩包内的文件名
zipf.write(pdf_path, arcname=arcname)
for article_id, article_name, article_url in zip(article_ids, article_names, article_urls):
zipf.write(article_url, arcname=f"{article_name}.pdf") # 将对应位置上的文献写入压缩包,写入时的文件名为文献名

background_tasks.add_task(os.remove, zip_path)

Expand All @@ -159,8 +154,8 @@ async def export_self_folder(background_tasks: BackgroundTasks, folder_id: int =
async def create_tag(article_id: int = Body(...), content: str = Body(...), db: AsyncSession = Depends(get_db)):
if len(content) > 30:
raise HTTPException(status_code=405, detail="Invalid tag content, longer than 30")
await crud_create_tag(article_id, content, db)
return {"msg": "Tag Created Successfully"}
tag_id = await crud_create_tag(article_id, content, db)
return {"msg": "Tag Created Successfully", "tag_id": tag_id}

@router.delete("/deleteTag", response_model="dict")
async def delete_tag(tag_id: int = Query(...), db: AsyncSession = Depends(get_db)):
Expand Down Expand Up @@ -213,7 +208,9 @@ async def items_in_recycle_bin(page_number: Optional[int] = Query(None, ge=1), p

@router.delete("/deleteForever", response_model=dict)
async def delete_forever(type: int = Query(...), id: int = Query(...), db: AsyncSession = Depends(get_db)):
await crud_delete_forever(type, id, db)
article_urls = await crud_delete_forever(type, id, db)
for article_url in article_urls:
os.remove(article_url)
return {"msg": "Item and its child nodes deleted forever successfully"}

@router.post("/recover", response_model=dict)
Expand Down
15 changes: 12 additions & 3 deletions app/api/v1/endpoints/articleDB.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from fastapi import APIRouter, HTTPException, Depends, UploadFile, Form, File
from sqlalchemy.ext.asyncio import AsyncSession
from app.utils.get_db import get_db
from app.schemas.articleDB import UploadArticle, GetArticle, DeLArticle, GetResponse, SearchArticle
from app.curd.articleDB import create_article_in_db, get_article_in_db, get_article_in_db_by_id, get_article_info_in_db_by_id, search_article_in_db
from app.schemas.articleDB import UploadArticle, GetArticle, DeLArticle, GetResponse, SearchArticle, RecommendArticle
from app.curd.articleDB import create_article_in_db, get_article_in_db, get_article_in_db_by_id, get_article_info_in_db_by_id, search_article_in_db, recommend_article_in_db
from app.core.config import settings
import os
import uuid
Expand Down Expand Up @@ -120,6 +120,15 @@ async def copy_article(folder_id: int, article_id: int, db: AsyncSession = Depen
raise HTTPException(status_code=500, detail=str(e))
return {"msg": "Article copied successfully", "new_article_id": new_article_id}



@router.get("/recommend", response_model=dict)
async def recommend_article(recommend_article: RecommendArticle = Depends(), db: AsyncSession = Depends(get_db)):
articles = await recommend_article_in_db(db=db, recommend_article=recommend_article)
return {
"pagination": {
"total_count": recommend_article.size,
},
"articles": [articles.model_dump() for articles in articles]
}


Loading