diff --git a/app/api/endpoints/system.py b/app/api/endpoints/system.py index 645b507b..489ee415 100644 --- a/app/api/endpoints/system.py +++ b/app/api/endpoints/system.py @@ -11,7 +11,6 @@ from typing import Optional, Union, Annotated import aiofiles import pillow_avif # noqa 用于自动注册AVIF支持 from PIL import Image -from app.helper.sites import SitesHelper from fastapi import APIRouter, Body, Depends, HTTPException, Header, Request, Response from fastapi.responses import StreamingResponse @@ -30,6 +29,7 @@ from app.helper.mediaserver import MediaServerHelper from app.helper.message import MessageHelper from app.helper.progress import ProgressHelper from app.helper.rule import RuleHelper +from app.helper.sites import SitesHelper from app.helper.subscribe import SubscribeHelper from app.helper.system import SystemHelper from app.log import logger @@ -396,8 +396,9 @@ async def get_logging(request: Request, length: Optional[int] = 50, logfile: Opt # 返回全部日志作为文本响应 if not log_path.exists(): return Response(content="日志文件不存在!", media_type="text/plain") - with open(log_path, "r", encoding='utf-8') as file: - text = file.read() + # 使用 aiofiles 异步读取文件 + async with aiofiles.open(log_path, mode="r", encoding="utf-8") as file: + text = await file.read() # 倒序输出 text = "\n".join(text.split("\n")[::-1]) return Response(content=text, media_type="text/plain") diff --git a/app/api/servcookie.py b/app/api/servcookie.py index 4361db33..13cb13c1 100644 --- a/app/api/servcookie.py +++ b/app/api/servcookie.py @@ -2,6 +2,7 @@ import gzip import json from typing import Annotated, Callable, Any, Dict, Optional +import aiofiles from fastapi import APIRouter, Depends, HTTPException, Path, Request, Response from fastapi.responses import PlainTextResponse from fastapi.routing import APIRoute @@ -19,7 +20,7 @@ class GzipRequest(Request): body = await super().body() if "gzip" in self.headers.getlist("Content-Encoding"): body = gzip.decompress(body) - self._body = body # noqa + self._body = body # noqa return self._body @@ -66,17 +67,17 @@ async def update_cookie(req: schemas.CookieData): """ file_path = settings.COOKIE_PATH / f"{req.uuid}.json" content = json.dumps({"encrypted": req.encrypted}) - with open(file_path, encoding="utf-8", mode="w") as file: - file.write(content) - with open(file_path, encoding="utf-8", mode="r") as file: - read_content = file.read() + async with aiofiles.open(file_path, encoding="utf-8", mode="w") as file: + await file.write(content) + async with aiofiles.open(file_path, encoding="utf-8", mode="r") as file: + read_content = await file.read() if read_content == content: return {"action": "done"} else: return {"action": "error"} -def load_encrypt_data(uuid: str) -> Dict[str, Any]: +async def load_encrypt_data(uuid: str) -> Dict[str, Any]: """ 加载本地加密原始数据 """ @@ -87,8 +88,8 @@ def load_encrypt_data(uuid: str) -> Dict[str, Any]: raise HTTPException(status_code=404, detail="Item not found") # 读取文件 - with open(file_path, encoding="utf-8", mode="r") as file: - read_content = file.read() + async with aiofiles.open(file_path, encoding="utf-8", mode="r") as file: + read_content = await file.read() data = json.loads(read_content.encode("utf-8")) return data @@ -120,7 +121,7 @@ async def get_cookie( """ GET 下载加密数据 """ - return load_encrypt_data(uuid) + return await load_encrypt_data(uuid) @cookie_router.post("/get/{uuid}") @@ -130,5 +131,5 @@ async def post_cookie( """ POST 下载加密数据 """ - data = load_encrypt_data(uuid) + data = await load_encrypt_data(uuid) return get_decrypted_cookie_data(uuid, request.password, data["encrypted"]) diff --git a/app/helper/async_task.py b/app/helper/async_task.py new file mode 100644 index 00000000..0519ecba --- /dev/null +++ b/app/helper/async_task.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/app/main.py b/app/main.py index 9dfcde73..b54565b8 100644 --- a/app/main.py +++ b/app/main.py @@ -25,7 +25,7 @@ setproctitle.setproctitle(settings.PROJECT_NAME) # uvicorn服务 Server = uvicorn.Server(Config(app, host=settings.HOST, port=settings.PORT, - reload=settings.DEV, workers=multiprocessing.cpu_count(), + reload=settings.DEV, workers=multiprocessing.cpu_count() * 2 + 1, timeout_graceful_shutdown=60))