feat: fix search, poster serving, and add hover overlay UI for cards

- Fix search store exports to match component expectations (inputValue,
  bangumiList, onSearch) and transform data to SearchResult format
- Fix poster endpoint path check that incorrectly blocked all requests
- Add resolvePosterUrl utility to handle both external URLs and local paths
- Move tags into hover overlay on homepage cards and calendar cards
- Show title and tags on poster hover with dark semi-transparent styling
- Add downloader API, store, and page
- Update backend to async patterns and uv migration changes
- Remove .claude/settings.local.json from tracking

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Estrella Pan
2026-01-23 21:20:12 +01:00
parent 0408ecdd61
commit a98a162500
52 changed files with 2269 additions and 1727 deletions

View File

@@ -1,87 +0,0 @@
{
"permissions": {
"allow": [
"Bash(git -C /Users/estrella/Developer/AutoBangumi/Auto_Bangumi log --oneline -10)",
"Bash(git checkout:*)",
"Bash(python -m pytest:*)",
"Bash(python:*)",
"Bash(uv init:*)",
"Bash(uv sync:*)",
"Bash(uv run pytest:*)",
"Skill(feature-dev:feature-dev)",
"Bash(uv pip install:*)",
"Bash(uv run python:*)",
"Bash(curl:*)",
"Bash(pkill:*)",
"Bash(uv pip show:*)",
"Bash(uv pip list:*)",
"Bash(uv lock:*)",
"Bash(lsof:*)",
"Bash(kill:*)",
"Bash(VITE_API_URL=http://localhost:18080 pnpm dev:*)",
"Bash(pnpm dev:*)",
"Bash(pnpm add:*)",
"Bash(git add:*)",
"Bash(git commit:*)",
"Skill(planning-with-files)",
"Bash(../.venv/bin/python -m pytest test/test_database.py -v)",
"Bash(.venv/bin/python:*)",
"Bash(ruff check:*)",
"Bash(ls:*)",
"Bash(/Users/estrella/Developer/AutoBangumi/Auto_Bangumi/backend/.venv/bin/python:*)",
"Bash(uv run ruff check:*)",
"Bash(git rm:*)",
"Bash(git push:*)",
"Bash(gh pr create:*)",
"Bash(gh pr checks:*)",
"Bash(gh run view:*)",
"Bash(git ls-tree:*)",
"Bash(while read f)",
"Bash(do git show \"HEAD:$f\")",
"Bash(done)",
"Bash(git reset:*)",
"Skill(ui-ux-pro-max)",
"Bash(tree:*)",
"Bash(git stash:*)",
"Bash(python3:*)",
"Bash(pnpm build:*)",
"Bash(pnpm install:*)",
"Bash(uv venv:*)",
"Bash(../.venv/bin/python:*)",
"Bash(xargs:*)",
"Skill(agent-browser)",
"Bash(agent-browser open:*)",
"Bash(agent-browser screenshot:*)",
"Bash(agent-browser snapshot:*)",
"Bash(agent-browser eval \"JSON.stringify\\(window.__consoleErrors || ''no errors captured''\\)\")",
"Bash(agent-browser eval:*)",
"Bash(agent-browser close:*)",
"Bash(agent-browser reload:*)",
"Bash(agent-browser fill:*)",
"Bash(agent-browser click:*)",
"Skill(commit-commands:commit)",
"Bash(gh api:*)",
"Bash(git fetch:*)",
"Bash(git rebase:*)",
"Bash(grep:*)",
"Bash(git merge:*)",
"Bash(agent-browser scroll:*)",
"Bash(agent-browser find text \"Passkey Settings\" click)",
"Bash(agent-browser find:*)",
"Bash(agent-browser find text \"添加\" click)",
"Bash(npx vue-tsc:*)",
"Bash(npx vite build:*)",
"WebSearch",
"WebFetch(domain:bangumi.github.io)",
"WebFetch(domain:raw.githubusercontent.com)",
"WebFetch(domain:api.bgm.tv)",
"Bash(__NEW_LINE_e2219f405dac932c__ echo \"\")",
"Bash(__NEW_LINE_7c1bb4605ef4ad2a__ echo \"\")",
"Bash(source ../.venv/bin/activate)",
"Bash(source:*)",
"Bash(npx vite:*)",
"Bash(agent-browser press:*)",
"Bash(agent-browser get:*)"
]
}
}

4
.gitignore vendored
View File

@@ -216,3 +216,7 @@ dev-dist
# test file # test file
test.* test.*
# local config
/backend/config/
.claude/settings.local.json

View File

@@ -40,8 +40,8 @@ app = create_app()
@app.get("/posters/{path:path}", tags=["posters"]) @app.get("/posters/{path:path}", tags=["posters"])
def posters(path: str): def posters(path: str):
# only allow access to files in the posters directory # prevent path traversal
if not path.startswith("posters/"): if ".." in path:
return HTMLResponse(status_code=403) return HTMLResponse(status_code=403)
return FileResponse(f"data/posters/{path}") return FileResponse(f"data/posters/{path}")

View File

@@ -1,33 +1,35 @@
import asyncio
import functools
import logging import logging
import threading
import time
from .timeout import timeout from .timeout import timeout
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
lock = threading.Lock() _lock = asyncio.Lock()
def qb_connect_failed_wait(func): def qb_connect_failed_wait(func):
def wrapper(*args, **kwargs): @functools.wraps(func)
async def wrapper(*args, **kwargs):
times = 0 times = 0
while times < 5: while times < 5:
try: try:
return func(*args, **kwargs) return await func(*args, **kwargs)
except Exception as e: except Exception as e:
logger.debug(f"URL: {args[0]}") logger.debug(f"URL: {args[0]}")
logger.warning(e) logger.warning(e)
logger.warning("Cannot connect to qBittorrent. Wait 5 min and retry...") logger.warning("Cannot connect to qBittorrent. Wait 5 min and retry...")
time.sleep(300) await asyncio.sleep(300)
times += 1 times += 1
return wrapper return wrapper
def api_failed(func): def api_failed(func):
def wrapper(*args, **kwargs): @functools.wraps(func)
async def wrapper(*args, **kwargs):
try: try:
return func(*args, **kwargs) return await func(*args, **kwargs)
except Exception as e: except Exception as e:
logger.debug(f"URL: {args[0]}") logger.debug(f"URL: {args[0]}")
logger.warning("Wrong API response.") logger.warning("Wrong API response.")
@@ -37,8 +39,9 @@ def api_failed(func):
def locked(func): def locked(func):
def wrapper(*args, **kwargs): @functools.wraps(func)
with lock: async def wrapper(*args, **kwargs):
return func(*args, **kwargs) async with _lock:
return await func(*args, **kwargs)
return wrapper return wrapper

View File

@@ -3,6 +3,7 @@ from fastapi import APIRouter
from .auth import router as auth_router from .auth import router as auth_router
from .bangumi import router as bangumi_router from .bangumi import router as bangumi_router
from .config import router as config_router from .config import router as config_router
from .downloader import router as downloader_router
from .log import router as log_router from .log import router as log_router
from .passkey import router as passkey_router from .passkey import router as passkey_router
from .program import router as program_router from .program import router as program_router
@@ -19,5 +20,6 @@ v1.include_router(log_router)
v1.include_router(program_router) v1.include_router(program_router)
v1.include_router(bangumi_router) v1.include_router(bangumi_router)
v1.include_router(config_router) v1.include_router(config_router)
v1.include_router(downloader_router)
v1.include_router(rss_router) v1.include_router(rss_router)
v1.include_router(search_router) v1.include_router(search_router)

View File

@@ -45,7 +45,7 @@ async def update_rule(
data: BangumiUpdate, data: BangumiUpdate,
): ):
with TorrentManager() as manager: with TorrentManager() as manager:
resp = manager.update_rule(bangumi_id, data) resp = await manager.update_rule(bangumi_id, data)
return u_response(resp) return u_response(resp)
@@ -56,7 +56,7 @@ async def update_rule(
) )
async def delete_rule(bangumi_id: str, file: bool = False): async def delete_rule(bangumi_id: str, file: bool = False):
with TorrentManager() as manager: with TorrentManager() as manager:
resp = manager.delete_rule(bangumi_id, file) resp = await manager.delete_rule(bangumi_id, file)
return u_response(resp) return u_response(resp)
@@ -68,7 +68,7 @@ async def delete_rule(bangumi_id: str, file: bool = False):
async def delete_many_rule(bangumi_id: list, file: bool = False): async def delete_many_rule(bangumi_id: list, file: bool = False):
with TorrentManager() as manager: with TorrentManager() as manager:
for i in bangumi_id: for i in bangumi_id:
resp = manager.delete_rule(i, file) resp = await manager.delete_rule(i, file)
return u_response(resp) return u_response(resp)
@@ -79,7 +79,7 @@ async def delete_many_rule(bangumi_id: list, file: bool = False):
) )
async def disable_rule(bangumi_id: str, file: bool = False): async def disable_rule(bangumi_id: str, file: bool = False):
with TorrentManager() as manager: with TorrentManager() as manager:
resp = manager.disable_rule(bangumi_id, file) resp = await manager.disable_rule(bangumi_id, file)
return u_response(resp) return u_response(resp)
@@ -91,7 +91,7 @@ async def disable_rule(bangumi_id: str, file: bool = False):
async def disable_many_rule(bangumi_id: list, file: bool = False): async def disable_many_rule(bangumi_id: list, file: bool = False):
with TorrentManager() as manager: with TorrentManager() as manager:
for i in bangumi_id: for i in bangumi_id:
resp = manager.disable_rule(i, file) resp = await manager.disable_rule(i, file)
return u_response(resp) return u_response(resp)
@@ -111,9 +111,9 @@ async def enable_rule(bangumi_id: str):
response_model=APIResponse, response_model=APIResponse,
dependencies=[Depends(get_current_user)], dependencies=[Depends(get_current_user)],
) )
async def refresh_poster(): async def refresh_poster_all():
with TorrentManager() as manager: with TorrentManager() as manager:
resp = manager.refresh_poster() resp = await manager.refresh_poster()
return u_response(resp) return u_response(resp)
@router.get( @router.get(
@@ -121,9 +121,9 @@ async def refresh_poster():
response_model=APIResponse, response_model=APIResponse,
dependencies=[Depends(get_current_user)], dependencies=[Depends(get_current_user)],
) )
async def refresh_poster(bangumi_id: int): async def refresh_poster_one(bangumi_id: int):
with TorrentManager() as manager: with TorrentManager() as manager:
resp = manager.refind_poster(bangumi_id) resp = await manager.refind_poster(bangumi_id)
return u_response(resp) return u_response(resp)
@@ -134,7 +134,7 @@ async def refresh_poster(bangumi_id: int):
) )
async def refresh_calendar(): async def refresh_calendar():
with TorrentManager() as manager: with TorrentManager() as manager:
resp = manager.refresh_calendar() resp = await manager.refresh_calendar()
return u_response(resp) return u_response(resp)

View File

@@ -0,0 +1,46 @@
from fastapi import APIRouter, Depends
from pydantic import BaseModel
from module.downloader import DownloadClient
from module.security.api import get_current_user
router = APIRouter(prefix="/downloader", tags=["downloader"])
class TorrentHashesRequest(BaseModel):
hashes: list[str]
class TorrentDeleteRequest(BaseModel):
hashes: list[str]
delete_files: bool = False
@router.get("/torrents", dependencies=[Depends(get_current_user)])
async def get_torrents():
async with DownloadClient() as client:
return await client.get_torrent_info(category="Bangumi", status_filter=None)
@router.post("/torrents/pause", dependencies=[Depends(get_current_user)])
async def pause_torrents(req: TorrentHashesRequest):
hashes = "|".join(req.hashes)
async with DownloadClient() as client:
await client.pause_torrent(hashes)
return {"msg_en": "Torrents paused", "msg_zh": "种子已暂停"}
@router.post("/torrents/resume", dependencies=[Depends(get_current_user)])
async def resume_torrents(req: TorrentHashesRequest):
hashes = "|".join(req.hashes)
async with DownloadClient() as client:
await client.resume_torrent(hashes)
return {"msg_en": "Torrents resumed", "msg_zh": "种子已恢复"}
@router.post("/torrents/delete", dependencies=[Depends(get_current_user)])
async def delete_torrents(req: TorrentDeleteRequest):
hashes = "|".join(req.hashes)
async with DownloadClient() as client:
await client.delete_torrent(hashes, delete_files=req.delete_files)
return {"msg_en": "Torrents deleted", "msg_zh": "种子已删除"}

View File

@@ -25,7 +25,7 @@ async def get_rss():
) )
async def add_rss(rss: RSSItem): async def add_rss(rss: RSSItem):
with RSSEngine() as engine: with RSSEngine() as engine:
result = engine.add_rss(rss.url, rss.name, rss.aggregate, rss.parser) result = await engine.add_rss(rss.url, rss.name, rss.aggregate, rss.parser)
return u_response(result) return u_response(result)
@@ -133,12 +133,13 @@ async def update_rss(
dependencies=[Depends(get_current_user)], dependencies=[Depends(get_current_user)],
) )
async def refresh_all(): async def refresh_all():
with RSSEngine() as engine, DownloadClient() as client: async with DownloadClient() as client:
engine.refresh_rss(client) with RSSEngine() as engine:
return JSONResponse( await engine.refresh_rss(client)
status_code=200, return JSONResponse(
content={"msg_en": "Refresh all RSS successfully.", "msg_zh": "刷新 RSS 成功。"}, status_code=200,
) content={"msg_en": "Refresh all RSS successfully.", "msg_zh": "刷新 RSS 成功。"},
)
@router.get( @router.get(
@@ -147,12 +148,13 @@ async def refresh_all():
dependencies=[Depends(get_current_user)], dependencies=[Depends(get_current_user)],
) )
async def refresh_rss(rss_id: int): async def refresh_rss(rss_id: int):
with RSSEngine() as engine, DownloadClient() as client: async with DownloadClient() as client:
engine.refresh_rss(client, rss_id) with RSSEngine() as engine:
return JSONResponse( await engine.refresh_rss(client, rss_id)
status_code=200, return JSONResponse(
content={"msg_en": "Refresh RSS successfully.", "msg_zh": "刷新 RSS 成功。"}, status_code=200,
) content={"msg_en": "Refresh RSS successfully.", "msg_zh": "刷新 RSS 成功。"},
)
@router.get( @router.get(
@@ -175,7 +177,7 @@ analyser = RSSAnalyser()
"/analysis", response_model=Bangumi, dependencies=[Depends(get_current_user)] "/analysis", response_model=Bangumi, dependencies=[Depends(get_current_user)]
) )
async def analysis(rss: RSSItem): async def analysis(rss: RSSItem):
data = analyser.link_to_data(rss) data = await analyser.link_to_data(rss)
if isinstance(data, Bangumi): if isinstance(data, Bangumi):
return data return data
else: else:
@@ -186,8 +188,8 @@ async def analysis(rss: RSSItem):
"/collect", response_model=APIResponse, dependencies=[Depends(get_current_user)] "/collect", response_model=APIResponse, dependencies=[Depends(get_current_user)]
) )
async def download_collection(data: Bangumi): async def download_collection(data: Bangumi):
with SeasonCollector() as collector: async with SeasonCollector() as collector:
resp = collector.collect_season(data, data.rss_link) resp = await collector.collect_season(data, data.rss_link)
return u_response(resp) return u_response(resp)
@@ -195,6 +197,5 @@ async def download_collection(data: Bangumi):
"/subscribe", response_model=APIResponse, dependencies=[Depends(get_current_user)] "/subscribe", response_model=APIResponse, dependencies=[Depends(get_current_user)]
) )
async def subscribe(data: Bangumi, rss: RSSItem): async def subscribe(data: Bangumi, rss: RSSItem):
with SeasonCollector() as collector: resp = await SeasonCollector.subscribe_season(data, parser=rss.parser)
resp = collector.subscribe_season(data, parser=rss.parser) return u_response(resp)
return u_response(resp)

View File

@@ -18,10 +18,13 @@ async def search_torrents(site: str = "mikan", keywords: str = Query(None)):
if not keywords: if not keywords:
return [] return []
keywords = keywords.split(" ") keywords = keywords.split(" ")
with SearchTorrent() as st:
return EventSourceResponse( async def event_generator():
content=st.analyse_keyword(keywords=keywords, site=site), async with SearchTorrent() as st:
) async for item in st.analyse_keyword(keywords=keywords, site=site):
yield item
return EventSourceResponse(content=event_generator())
@router.get( @router.get(

View File

@@ -1,14 +1,14 @@
import logging
import asyncio import asyncio
import logging
from module.conf import VERSION, settings from module.conf import VERSION, settings
from module.models import ResponseModel from module.models import ResponseModel
from module.update import ( from module.update import (
cache_image,
data_migration, data_migration,
first_run, first_run,
from_30_to_31, from_30_to_31,
start_up, start_up,
cache_image,
) )
from .sub_thread import RenameThread, RSSThread from .sub_thread import RenameThread, RSSThread
@@ -51,11 +51,11 @@ class Program(RenameThread, RSSThread):
data_migration() data_migration()
elif self.version_update: elif self.version_update:
# Update database # Update database
from_30_to_31() await from_30_to_31()
logger.info("[Core] Database updated.") logger.info("[Core] Database updated.")
if not self.img_cache: if not self.img_cache:
logger.info("[Core] No image cache exists, create image cache.") logger.info("[Core] No image cache exists, create image cache.")
cache_image() await cache_image()
await self.start() await self.start()
async def start(self): async def start(self):

View File

@@ -1,5 +1,4 @@
import threading import asyncio
import time
from module.conf import settings from module.conf import settings
from module.downloader import DownloadClient from module.downloader import DownloadClient
@@ -13,69 +12,74 @@ from .status import ProgramStatus
class RSSThread(ProgramStatus): class RSSThread(ProgramStatus):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self._rss_thread = threading.Thread( self._rss_task: asyncio.Task | None = None
target=self.rss_loop,
)
self.analyser = RSSAnalyser() self.analyser = RSSAnalyser()
def rss_loop(self): async def rss_loop(self):
while not self.stop_event.is_set(): while not self.stop_event.is_set():
with DownloadClient() as client, RSSEngine() as engine: async with DownloadClient() as client:
# Analyse RSS with RSSEngine() as engine:
rss_list = engine.rss.search_aggregate() # Analyse RSS
for rss in rss_list: rss_list = engine.rss.search_aggregate()
self.analyser.rss_to_data(rss, engine) for rss in rss_list:
# Run RSS Engine await self.analyser.rss_to_data(rss, engine)
engine.refresh_rss(client) # Run RSS Engine
await engine.refresh_rss(client)
if settings.bangumi_manage.eps_complete: if settings.bangumi_manage.eps_complete:
eps_complete() await eps_complete()
self.stop_event.wait(settings.program.rss_time) try:
await asyncio.wait_for(
self.stop_event.wait(),
timeout=settings.program.rss_time,
)
except asyncio.TimeoutError:
pass
def rss_start(self): def rss_start(self):
self.rss_thread.start() self._rss_task = asyncio.create_task(self.rss_loop())
def rss_stop(self): async def rss_stop(self):
if self._rss_thread.is_alive(): if self._rss_task and not self._rss_task.done():
self._rss_thread.join() self.stop_event.set()
self._rss_task.cancel()
@property try:
def rss_thread(self): await self._rss_task
if not self._rss_thread.is_alive(): except asyncio.CancelledError:
self._rss_thread = threading.Thread( pass
target=self.rss_loop, self._rss_task = None
)
return self._rss_thread
class RenameThread(ProgramStatus): class RenameThread(ProgramStatus):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self._rename_thread = threading.Thread( self._rename_task: asyncio.Task | None = None
target=self.rename_loop,
)
def rename_loop(self): async def rename_loop(self):
while not self.stop_event.is_set(): while not self.stop_event.is_set():
with Renamer() as renamer: async with Renamer() as renamer:
renamed_info = renamer.rename() renamed_info = await renamer.rename()
if settings.notification.enable: if settings.notification.enable:
with PostNotification() as notifier: async with PostNotification() as notifier:
for info in renamed_info: for info in renamed_info:
notifier.send_msg(info) await notifier.send_msg(info)
time.sleep(2) await asyncio.sleep(2)
self.stop_event.wait(settings.program.rename_time) try:
await asyncio.wait_for(
self.stop_event.wait(),
timeout=settings.program.rename_time,
)
except asyncio.TimeoutError:
pass
def rename_start(self): def rename_start(self):
self.rename_thread.start() self._rename_task = asyncio.create_task(self.rename_loop())
def rename_stop(self): async def rename_stop(self):
if self._rename_thread.is_alive(): if self._rename_task and not self._rename_task.done():
self._rename_thread.join() self.stop_event.set()
self._rename_task.cancel()
@property try:
def rename_thread(self): await self._rename_task
if not self._rename_thread.is_alive(): except asyncio.CancelledError:
self._rename_thread = threading.Thread( pass
target=self.rename_loop, self._rename_task = None
)
return self._rename_thread

View File

@@ -2,9 +2,8 @@ import logging
import time import time
from typing import Optional from typing import Optional
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.sql import func from sqlalchemy.sql import func
from sqlmodel import and_, delete, false, or_, select from sqlmodel import Session, and_, delete, false, or_, select
from module.models import Bangumi, BangumiUpdate from module.models import Bangumi, BangumiUpdate
@@ -23,32 +22,32 @@ def _invalidate_bangumi_cache():
class BangumiDatabase: class BangumiDatabase:
def __init__(self, session: AsyncSession): def __init__(self, session: Session):
self.session = session self.session = session
async def add(self, data: Bangumi) -> bool: def add(self, data: Bangumi) -> bool:
statement = select(Bangumi).where(Bangumi.title_raw == data.title_raw) statement = select(Bangumi).where(Bangumi.title_raw == data.title_raw)
result = await self.session.execute(statement) result = self.session.execute(statement)
bangumi = result.scalar_one_or_none() bangumi = result.scalar_one_or_none()
if bangumi: if bangumi:
return False return False
self.session.add(data) self.session.add(data)
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
logger.debug(f"[Database] Insert {data.official_title} into database.") logger.debug(f"[Database] Insert {data.official_title} into database.")
return True return True
async def add_all(self, datas: list[Bangumi]): def add_all(self, datas: list[Bangumi]):
self.session.add_all(datas) self.session.add_all(datas)
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
logger.debug(f"[Database] Insert {len(datas)} bangumi into database.") logger.debug(f"[Database] Insert {len(datas)} bangumi into database.")
async def update(self, data: Bangumi | BangumiUpdate, _id: int = None) -> bool: def update(self, data: Bangumi | BangumiUpdate, _id: int = None) -> bool:
if _id and isinstance(data, BangumiUpdate): if _id and isinstance(data, BangumiUpdate):
db_data = await self.session.get(Bangumi, _id) db_data = self.session.get(Bangumi, _id)
elif isinstance(data, Bangumi): elif isinstance(data, Bangumi):
db_data = await self.session.get(Bangumi, data.id) db_data = self.session.get(Bangumi, data.id)
else: else:
return False return False
if not db_data: if not db_data:
@@ -57,70 +56,70 @@ class BangumiDatabase:
for key, value in bangumi_data.items(): for key, value in bangumi_data.items():
setattr(db_data, key, value) setattr(db_data, key, value)
self.session.add(db_data) self.session.add(db_data)
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
logger.debug(f"[Database] Update {data.official_title}") logger.debug(f"[Database] Update {data.official_title}")
return True return True
async def update_all(self, datas: list[Bangumi]): def update_all(self, datas: list[Bangumi]):
self.session.add_all(datas) self.session.add_all(datas)
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
logger.debug(f"[Database] Update {len(datas)} bangumi.") logger.debug(f"[Database] Update {len(datas)} bangumi.")
async def update_rss(self, title_raw: str, rss_set: str): def update_rss(self, title_raw: str, rss_set: str):
statement = select(Bangumi).where(Bangumi.title_raw == title_raw) statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
result = await self.session.execute(statement) result = self.session.execute(statement)
bangumi = result.scalar_one_or_none() bangumi = result.scalar_one_or_none()
if bangumi: if bangumi:
bangumi.rss_link = rss_set bangumi.rss_link = rss_set
bangumi.added = False bangumi.added = False
self.session.add(bangumi) self.session.add(bangumi)
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
logger.debug(f"[Database] Update {title_raw} rss_link to {rss_set}.") logger.debug(f"[Database] Update {title_raw} rss_link to {rss_set}.")
async def update_poster(self, title_raw: str, poster_link: str): def update_poster(self, title_raw: str, poster_link: str):
statement = select(Bangumi).where(Bangumi.title_raw == title_raw) statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
result = await self.session.execute(statement) result = self.session.execute(statement)
bangumi = result.scalar_one_or_none() bangumi = result.scalar_one_or_none()
if bangumi: if bangumi:
bangumi.poster_link = poster_link bangumi.poster_link = poster_link
self.session.add(bangumi) self.session.add(bangumi)
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
logger.debug(f"[Database] Update {title_raw} poster_link to {poster_link}.") logger.debug(f"[Database] Update {title_raw} poster_link to {poster_link}.")
async def delete_one(self, _id: int): def delete_one(self, _id: int):
statement = select(Bangumi).where(Bangumi.id == _id) statement = select(Bangumi).where(Bangumi.id == _id)
result = await self.session.execute(statement) result = self.session.execute(statement)
bangumi = result.scalar_one_or_none() bangumi = result.scalar_one_or_none()
if bangumi: if bangumi:
await self.session.delete(bangumi) self.session.delete(bangumi)
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
logger.debug(f"[Database] Delete bangumi id: {_id}.") logger.debug(f"[Database] Delete bangumi id: {_id}.")
async def delete_all(self): def delete_all(self):
statement = delete(Bangumi) statement = delete(Bangumi)
await self.session.execute(statement) self.session.execute(statement)
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
async def search_all(self) -> list[Bangumi]: def search_all(self) -> list[Bangumi]:
global _bangumi_cache, _bangumi_cache_time global _bangumi_cache, _bangumi_cache_time
now = time.time() now = time.time()
if _bangumi_cache is not None and (now - _bangumi_cache_time) < _BANGUMI_CACHE_TTL: if _bangumi_cache is not None and (now - _bangumi_cache_time) < _BANGUMI_CACHE_TTL:
return _bangumi_cache return _bangumi_cache
statement = select(Bangumi) statement = select(Bangumi)
result = await self.session.execute(statement) result = self.session.execute(statement)
_bangumi_cache = list(result.scalars().all()) _bangumi_cache = list(result.scalars().all())
_bangumi_cache_time = now _bangumi_cache_time = now
return _bangumi_cache return _bangumi_cache
async def search_id(self, _id: int) -> Optional[Bangumi]: def search_id(self, _id: int) -> Optional[Bangumi]:
statement = select(Bangumi).where(Bangumi.id == _id) statement = select(Bangumi).where(Bangumi.id == _id)
result = await self.session.execute(statement) result = self.session.execute(statement)
bangumi = result.scalar_one_or_none() bangumi = result.scalar_one_or_none()
if bangumi is None: if bangumi is None:
logger.warning(f"[Database] Cannot find bangumi id: {_id}.") logger.warning(f"[Database] Cannot find bangumi id: {_id}.")
@@ -129,19 +128,19 @@ class BangumiDatabase:
logger.debug(f"[Database] Find bangumi id: {_id}.") logger.debug(f"[Database] Find bangumi id: {_id}.")
return bangumi return bangumi
async def match_poster(self, bangumi_name: str) -> str: def match_poster(self, bangumi_name: str) -> str:
statement = select(Bangumi).where( statement = select(Bangumi).where(
func.instr(bangumi_name, Bangumi.official_title) > 0 func.instr(bangumi_name, Bangumi.official_title) > 0
) )
result = await self.session.execute(statement) result = self.session.execute(statement)
data = result.scalar_one_or_none() data = result.scalar_one_or_none()
if data: if data:
return data.poster_link return data.poster_link
else: else:
return "" return ""
async def match_list(self, torrent_list: list, rss_link: str) -> list: def match_list(self, torrent_list: list, rss_link: str) -> list:
match_datas = await self.search_all() match_datas = self.search_all()
if not match_datas: if not match_datas:
return torrent_list return torrent_list
# Build index for faster lookup # Build index for faster lookup
@@ -162,29 +161,29 @@ class BangumiDatabase:
unmatched.append(torrent) unmatched.append(torrent)
# Batch commit all rss_link updates # Batch commit all rss_link updates
if rss_updated: if rss_updated:
await self.session.commit() self.session.commit()
_invalidate_bangumi_cache() _invalidate_bangumi_cache()
logger.debug(f"[Database] Batch updated rss_link for {len(rss_updated)} bangumi.") logger.debug(f"[Database] Batch updated rss_link for {len(rss_updated)} bangumi.")
return unmatched return unmatched
async def match_torrent(self, torrent_name: str) -> Optional[Bangumi]: def match_torrent(self, torrent_name: str) -> Optional[Bangumi]:
statement = select(Bangumi).where( statement = select(Bangumi).where(
and_( and_(
func.instr(torrent_name, Bangumi.title_raw) > 0, func.instr(torrent_name, Bangumi.title_raw) > 0,
Bangumi.deleted == false(), Bangumi.deleted == false(),
) )
) )
result = await self.session.execute(statement) result = self.session.execute(statement)
return result.scalar_one_or_none() return result.scalar_one_or_none()
async def not_complete(self) -> list[Bangumi]: def not_complete(self) -> list[Bangumi]:
condition = select(Bangumi).where( condition = select(Bangumi).where(
and_(Bangumi.eps_collect == false(), Bangumi.deleted == false()) and_(Bangumi.eps_collect == false(), Bangumi.deleted == false())
) )
result = await self.session.execute(condition) result = self.session.execute(condition)
return list(result.scalars().all()) return list(result.scalars().all())
async def not_added(self) -> list[Bangumi]: def not_added(self) -> list[Bangumi]:
conditions = select(Bangumi).where( conditions = select(Bangumi).where(
or_( or_(
Bangumi.added == 0, Bangumi.added == 0,
@@ -192,20 +191,20 @@ class BangumiDatabase:
Bangumi.save_path is None, Bangumi.save_path is None,
) )
) )
result = await self.session.execute(conditions) result = self.session.execute(conditions)
return list(result.scalars().all()) return list(result.scalars().all())
async def disable_rule(self, _id: int): def disable_rule(self, _id: int):
statement = select(Bangumi).where(Bangumi.id == _id) statement = select(Bangumi).where(Bangumi.id == _id)
result = await self.session.execute(statement) result = self.session.execute(statement)
bangumi = result.scalar_one_or_none() bangumi = result.scalar_one_or_none()
if bangumi: if bangumi:
bangumi.deleted = True bangumi.deleted = True
self.session.add(bangumi) self.session.add(bangumi)
await self.session.commit() self.session.commit()
logger.debug(f"[Database] Disable rule {bangumi.title_raw}.") logger.debug(f"[Database] Disable rule {bangumi.title_raw}.")
async def search_rss(self, rss_link: str) -> list[Bangumi]: def search_rss(self, rss_link: str) -> list[Bangumi]:
statement = select(Bangumi).where(func.instr(rss_link, Bangumi.rss_link) > 0) statement = select(Bangumi).where(func.instr(rss_link, Bangumi.rss_link) > 0)
result = await self.session.execute(statement) result = self.session.execute(statement)
return list(result.scalars().all()) return list(result.scalars().all())

View File

@@ -1,7 +1,6 @@
import logging import logging
from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import Session, and_, delete, select
from sqlmodel import and_, delete, select
from module.models import RSSItem, RSSUpdate from module.models import RSSItem, RSSUpdate
@@ -9,12 +8,12 @@ logger = logging.getLogger(__name__)
class RSSDatabase: class RSSDatabase:
def __init__(self, session: AsyncSession): def __init__(self, session: Session):
self.session = session self.session = session
async def add(self, data: RSSItem) -> bool: def add(self, data: RSSItem) -> bool:
statement = select(RSSItem).where(RSSItem.url == data.url) statement = select(RSSItem).where(RSSItem.url == data.url)
result = await self.session.execute(statement) result = self.session.execute(statement)
db_data = result.scalar_one_or_none() db_data = result.scalar_one_or_none()
if db_data: if db_data:
logger.debug(f"RSS Item {data.url} already exists.") logger.debug(f"RSS Item {data.url} already exists.")
@@ -22,26 +21,26 @@ class RSSDatabase:
else: else:
logger.debug(f"RSS Item {data.url} not exists, adding...") logger.debug(f"RSS Item {data.url} not exists, adding...")
self.session.add(data) self.session.add(data)
await self.session.commit() self.session.commit()
await self.session.refresh(data) self.session.refresh(data)
return True return True
async def add_all(self, data: list[RSSItem]): def add_all(self, data: list[RSSItem]):
if not data: if not data:
return return
urls = [item.url for item in data] urls = [item.url for item in data]
statement = select(RSSItem.url).where(RSSItem.url.in_(urls)) statement = select(RSSItem.url).where(RSSItem.url.in_(urls))
result = await self.session.execute(statement) result = self.session.execute(statement)
existing_urls = set(result.scalars().all()) existing_urls = set(result.scalars().all())
new_items = [item for item in data if item.url not in existing_urls] new_items = [item for item in data if item.url not in existing_urls]
if new_items: if new_items:
self.session.add_all(new_items) self.session.add_all(new_items)
await self.session.commit() self.session.commit()
logger.debug(f"Batch inserted {len(new_items)} RSS items.") logger.debug(f"Batch inserted {len(new_items)} RSS items.")
async def update(self, _id: int, data: RSSUpdate) -> bool: def update(self, _id: int, data: RSSUpdate) -> bool:
statement = select(RSSItem).where(RSSItem.id == _id) statement = select(RSSItem).where(RSSItem.id == _id)
result = await self.session.execute(statement) result = self.session.execute(statement)
db_data = result.scalar_one_or_none() db_data = result.scalar_one_or_none()
if not db_data: if not db_data:
return False return False
@@ -49,61 +48,61 @@ class RSSDatabase:
for key, value in dict_data.items(): for key, value in dict_data.items():
setattr(db_data, key, value) setattr(db_data, key, value)
self.session.add(db_data) self.session.add(db_data)
await self.session.commit() self.session.commit()
return True return True
async def enable(self, _id: int) -> bool: def enable(self, _id: int) -> bool:
statement = select(RSSItem).where(RSSItem.id == _id) statement = select(RSSItem).where(RSSItem.id == _id)
result = await self.session.execute(statement) result = self.session.execute(statement)
db_data = result.scalar_one_or_none() db_data = result.scalar_one_or_none()
if not db_data: if not db_data:
return False return False
db_data.enabled = True db_data.enabled = True
self.session.add(db_data) self.session.add(db_data)
await self.session.commit() self.session.commit()
return True return True
async def disable(self, _id: int) -> bool: def disable(self, _id: int) -> bool:
statement = select(RSSItem).where(RSSItem.id == _id) statement = select(RSSItem).where(RSSItem.id == _id)
result = await self.session.execute(statement) result = self.session.execute(statement)
db_data = result.scalar_one_or_none() db_data = result.scalar_one_or_none()
if not db_data: if not db_data:
return False return False
db_data.enabled = False db_data.enabled = False
self.session.add(db_data) self.session.add(db_data)
await self.session.commit() self.session.commit()
return True return True
async def search_id(self, _id: int) -> RSSItem | None: def search_id(self, _id: int) -> RSSItem | None:
return await self.session.get(RSSItem, _id) return self.session.get(RSSItem, _id)
async def search_all(self) -> list[RSSItem]: def search_all(self) -> list[RSSItem]:
result = await self.session.execute(select(RSSItem)) result = self.session.execute(select(RSSItem))
return list(result.scalars().all()) return list(result.scalars().all())
async def search_active(self) -> list[RSSItem]: def search_active(self) -> list[RSSItem]:
result = await self.session.execute( result = self.session.execute(
select(RSSItem).where(RSSItem.enabled) select(RSSItem).where(RSSItem.enabled)
) )
return list(result.scalars().all()) return list(result.scalars().all())
async def search_aggregate(self) -> list[RSSItem]: def search_aggregate(self) -> list[RSSItem]:
result = await self.session.execute( result = self.session.execute(
select(RSSItem).where(and_(RSSItem.aggregate, RSSItem.enabled)) select(RSSItem).where(and_(RSSItem.aggregate, RSSItem.enabled))
) )
return list(result.scalars().all()) return list(result.scalars().all())
async def delete(self, _id: int) -> bool: def delete(self, _id: int) -> bool:
condition = delete(RSSItem).where(RSSItem.id == _id) condition = delete(RSSItem).where(RSSItem.id == _id)
try: try:
await self.session.execute(condition) self.session.execute(condition)
await self.session.commit() self.session.commit()
return True return True
except Exception as e: except Exception as e:
logger.error(f"Delete RSS Item failed. Because: {e}") logger.error(f"Delete RSS Item failed. Because: {e}")
return False return False
async def delete_all(self): def delete_all(self):
condition = delete(RSSItem) condition = delete(RSSItem)
await self.session.execute(condition) self.session.execute(condition)
await self.session.commit() self.session.commit()

View File

@@ -1,7 +1,6 @@
import logging import logging
from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import Session, select
from sqlmodel import select
from module.models import Torrent from module.models import Torrent
@@ -9,54 +8,54 @@ logger = logging.getLogger(__name__)
class TorrentDatabase: class TorrentDatabase:
def __init__(self, session: AsyncSession): def __init__(self, session: Session):
self.session = session self.session = session
async def add(self, data: Torrent): def add(self, data: Torrent):
self.session.add(data) self.session.add(data)
await self.session.commit() self.session.commit()
logger.debug(f"Insert {data.name} in database.") logger.debug(f"Insert {data.name} in database.")
async def add_all(self, datas: list[Torrent]): def add_all(self, datas: list[Torrent]):
self.session.add_all(datas) self.session.add_all(datas)
await self.session.commit() self.session.commit()
logger.debug(f"Insert {len(datas)} torrents in database.") logger.debug(f"Insert {len(datas)} torrents in database.")
async def update(self, data: Torrent): def update(self, data: Torrent):
self.session.add(data) self.session.add(data)
await self.session.commit() self.session.commit()
logger.debug(f"Update {data.name} in database.") logger.debug(f"Update {data.name} in database.")
async def update_all(self, datas: list[Torrent]): def update_all(self, datas: list[Torrent]):
self.session.add_all(datas) self.session.add_all(datas)
await self.session.commit() self.session.commit()
async def update_one_user(self, data: Torrent): def update_one_user(self, data: Torrent):
self.session.add(data) self.session.add(data)
await self.session.commit() self.session.commit()
logger.debug(f"Update {data.name} in database.") logger.debug(f"Update {data.name} in database.")
async def search(self, _id: int) -> Torrent | None: def search(self, _id: int) -> Torrent | None:
result = await self.session.execute( result = self.session.execute(
select(Torrent).where(Torrent.id == _id) select(Torrent).where(Torrent.id == _id)
) )
return result.scalar_one_or_none() return result.scalar_one_or_none()
async def search_all(self) -> list[Torrent]: def search_all(self) -> list[Torrent]:
result = await self.session.execute(select(Torrent)) result = self.session.execute(select(Torrent))
return list(result.scalars().all()) return list(result.scalars().all())
async def search_rss(self, rss_id: int) -> list[Torrent]: def search_rss(self, rss_id: int) -> list[Torrent]:
result = await self.session.execute( result = self.session.execute(
select(Torrent).where(Torrent.rss_id == rss_id) select(Torrent).where(Torrent.rss_id == rss_id)
) )
return list(result.scalars().all()) return list(result.scalars().all())
async def check_new(self, torrents_list: list[Torrent]) -> list[Torrent]: def check_new(self, torrents_list: list[Torrent]) -> list[Torrent]:
if not torrents_list: if not torrents_list:
return [] return []
urls = [t.url for t in torrents_list] urls = [t.url for t in torrents_list]
statement = select(Torrent.url).where(Torrent.url.in_(urls)) statement = select(Torrent.url).where(Torrent.url.in_(urls))
result = await self.session.execute(statement) result = self.session.execute(statement)
existing_urls = set(result.scalars().all()) existing_urls = set(result.scalars().all())
return [t for t in torrents_list if t.url not in existing_urls] return [t for t in torrents_list if t.url not in existing_urls]

View File

@@ -1,8 +1,7 @@
import logging import logging
from fastapi import HTTPException from fastapi import HTTPException
from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import Session, select
from sqlmodel import select
from module.models import ResponseModel from module.models import ResponseModel
from module.models.user import User, UserUpdate from module.models.user import User, UserUpdate
@@ -12,21 +11,21 @@ logger = logging.getLogger(__name__)
class UserDatabase: class UserDatabase:
def __init__(self, session: AsyncSession): def __init__(self, session: Session):
self.session = session self.session = session
async def get_user(self, username: str) -> User: def get_user(self, username: str) -> User:
statement = select(User).where(User.username == username) statement = select(User).where(User.username == username)
result = await self.session.execute(statement) result = self.session.exec(statement)
user = result.scalar_one_or_none() user = result.first()
if not user: if not user:
raise HTTPException(status_code=404, detail="User not found") raise HTTPException(status_code=404, detail="User not found")
return user return user
async def auth_user(self, user: User) -> ResponseModel: def auth_user(self, user: User) -> ResponseModel:
statement = select(User).where(User.username == user.username) statement = select(User).where(User.username == user.username)
result = await self.session.execute(statement) result = self.session.exec(statement)
db_user = result.scalar_one_or_none() db_user = result.first()
if not user.password: if not user.password:
return ResponseModel( return ResponseModel(
status_code=401, status_code=401,
@@ -55,10 +54,10 @@ class UserDatabase:
msg_zh="登录成功", msg_zh="登录成功",
) )
async def update_user(self, username: str, update_user: UserUpdate) -> User: def update_user(self, username: str, update_user: UserUpdate) -> User:
statement = select(User).where(User.username == username) statement = select(User).where(User.username == username)
result = await self.session.execute(statement) result = self.session.exec(statement)
db_user = result.scalar_one_or_none() db_user = result.first()
if not db_user: if not db_user:
raise HTTPException(status_code=404, detail="User not found") raise HTTPException(status_code=404, detail="User not found")
if update_user.username: if update_user.username:
@@ -66,18 +65,18 @@ class UserDatabase:
if update_user.password: if update_user.password:
db_user.password = get_password_hash(update_user.password) db_user.password = get_password_hash(update_user.password)
self.session.add(db_user) self.session.add(db_user)
await self.session.commit() self.session.commit()
return db_user return db_user
async def add_default_user(self): def add_default_user(self):
statement = select(User) statement = select(User)
try: try:
result = await self.session.execute(statement) result = self.session.exec(statement)
users = list(result.scalars().all()) users = list(result.all())
except Exception: except Exception:
users = [] users = []
if len(users) != 0: if len(users) != 0:
return return
user = User(username="admin", password=get_password_hash("adminadmin")) user = User(username="admin", password=get_password_hash("adminadmin"))
self.session.add(user) self.session.add(user)
await self.session.commit() self.session.commit()

View File

@@ -1,29 +1,69 @@
import asyncio
import logging import logging
import time
from aria2p import API, Client, ClientException import httpx
from module.conf import settings from module.conf import settings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class QbDownloader: class Aria2Downloader:
def __init__(self, host, username, password): def __init__(self, host: str, username: str, password: str):
while True: self.host = host
try: self.secret = password
self._client = API(Client(host=host, port=6800, secret=password)) self._client: httpx.AsyncClient | None = None
break self._rpc_url = f"{host}/jsonrpc"
except ClientException: self._id = 0
logger.warning(
f"Can't login Aria2 Server {host} by {username}, retry in {settings.connect_retry_interval}"
)
time.sleep(settings.connect_retry_interval)
def torrents_add(self, urls, save_path, category): async def _call(self, method: str, params: list = None):
return self._client.add_torrent( self._id += 1
is_paused=settings.dev_debug, if params is None:
torrent_file_path=urls, params = []
save_path=save_path, # Prepend token
category=category, full_params = [f"token:{self.secret}"] + params
) payload = {
"jsonrpc": "2.0",
"id": self._id,
"method": f"aria2.{method}",
"params": full_params,
}
resp = await self._client.post(self._rpc_url, json=payload)
result = resp.json()
if "error" in result:
raise Exception(f"Aria2 RPC error: {result['error']}")
return result.get("result")
async def auth(self, retry=3):
self._client = httpx.AsyncClient(timeout=httpx.Timeout(connect=3.1, read=10.0, write=10.0, pool=10.0))
times = 0
while times < retry:
try:
await self._call("getVersion")
return True
except Exception as e:
logger.warning(
f"Can't login Aria2 Server {self.host}, retry in 5 seconds. Error: {e}"
)
await asyncio.sleep(5)
times += 1
return False
async def logout(self):
if self._client:
await self._client.aclose()
self._client = None
async def add_torrents(self, torrent_urls, torrent_files, save_path, category):
import base64
options = {"dir": save_path}
if torrent_urls:
urls = torrent_urls if isinstance(torrent_urls, list) else [torrent_urls]
for url in urls:
await self._call("addUri", [[url], options])
if torrent_files:
files = torrent_files if isinstance(torrent_files, list) else [torrent_files]
for f in files:
b64 = base64.b64encode(f).decode()
await self._call("addTorrent", [b64, [], options])
return True

View File

@@ -1,12 +1,7 @@
import asyncio
import logging import logging
import time
from qbittorrentapi import Client, LoginFailed import httpx
from qbittorrentapi.exceptions import (
APIConnectionError,
Conflict409Error,
Forbidden403Error,
)
from module.ab_decorator import qb_connect_failed_wait from module.ab_decorator import qb_connect_failed_wait
@@ -15,138 +10,233 @@ logger = logging.getLogger(__name__)
class QbDownloader: class QbDownloader:
def __init__(self, host: str, username: str, password: str, ssl: bool): def __init__(self, host: str, username: str, password: str, ssl: bool):
self._client: Client = Client( if "://" not in host:
host=host, scheme = "https" if ssl else "http"
username=username, self.host = f"{scheme}://{host}"
password=password, else:
VERIFY_WEBUI_CERTIFICATE=ssl, self.host = host
DISABLE_LOGGING_DEBUG_OUTPUT=True,
REQUESTS_ARGS={"timeout": (3.1, 10)},
)
self.host = host
self.username = username self.username = username
self.password = password
self.ssl = ssl
self._client: httpx.AsyncClient | None = None
def auth(self, retry=3): def _url(self, endpoint: str) -> str:
return f"{self.host}/api/v2/{endpoint}"
async def auth(self, retry=3):
times = 0 times = 0
timeout = httpx.Timeout(connect=3.1, read=10.0, write=10.0, pool=10.0)
self._client = httpx.AsyncClient(
timeout=timeout, verify=self.ssl
)
while times < retry: while times < retry:
try: try:
self._client.auth_log_in() resp = await self._client.post(
return True self._url("auth/login"),
except LoginFailed: data={"username": self.username, "password": self.password},
logger.error(
f"Can't login qBittorrent Server {self.host} by {self.username}, retry in {5} seconds."
) )
time.sleep(5) if resp.status_code == 200 and resp.text == "Ok.":
times += 1 return True
except Forbidden403Error: elif resp.status_code == 403:
logger.error("Login refused by qBittorrent Server") logger.error("Login refused by qBittorrent Server")
logger.info("Please release the IP in qBittorrent Server") logger.info("Please release the IP in qBittorrent Server")
break break
except APIConnectionError: else:
logger.error(
f"Can't login qBittorrent Server {self.host} by {self.username}, retry in 5 seconds."
)
await asyncio.sleep(5)
times += 1
except httpx.ConnectError:
logger.error("Cannot connect to qBittorrent Server") logger.error("Cannot connect to qBittorrent Server")
logger.info("Please check the IP and port in WebUI settings") logger.info("Please check the IP and port in WebUI settings")
time.sleep(10) await asyncio.sleep(10)
times += 1 times += 1
except Exception as e: except Exception as e:
logger.error(f"Unknown error: {e}") logger.error(f"Unknown error: {e}")
break break
return False return False
def logout(self): async def logout(self):
self._client.auth_log_out() if self._client:
try:
await self._client.post(self._url("auth/logout"))
except Exception:
pass
await self._client.aclose()
self._client = None
def check_host(self): async def check_host(self):
try: try:
self._client.app_version() resp = await self._client.get(self._url("app/version"))
return True return resp.status_code == 200
except APIConnectionError: except (httpx.ConnectError, httpx.RequestError):
return False return False
def check_rss(self, rss_link: str): def check_rss(self, rss_link: str):
pass pass
@qb_connect_failed_wait @qb_connect_failed_wait
def prefs_init(self, prefs): async def prefs_init(self, prefs):
return self._client.app_set_preferences(prefs=prefs) resp = await self._client.post(
self._url("app/setPreferences"),
data={"json": __import__("json").dumps(prefs)},
)
return resp
@qb_connect_failed_wait @qb_connect_failed_wait
def get_app_prefs(self): async def get_app_prefs(self):
return self._client.app_preferences() resp = await self._client.get(self._url("app/preferences"))
return resp.json()
def add_category(self, category): async def add_category(self, category):
return self._client.torrents_createCategory(name=category) await self._client.post(
self._url("torrents/createCategory"),
data={"category": category, "savePath": ""},
)
@qb_connect_failed_wait @qb_connect_failed_wait
def torrents_info(self, status_filter, category, tag=None): async def torrents_info(self, status_filter, category, tag=None):
return self._client.torrents_info( params = {}
status_filter=status_filter, category=category, tag=tag if status_filter:
params["filter"] = status_filter
if category:
params["category"] = category
if tag:
params["tag"] = tag
resp = await self._client.get(self._url("torrents/info"), params=params)
return resp.json()
async def add_torrents(self, torrent_urls, torrent_files, save_path, category):
data = {
"savepath": save_path,
"category": category,
"paused": "false",
"autoTMM": "false",
"contentLayout": "NoSubfolder",
}
files = {}
if torrent_urls:
if isinstance(torrent_urls, list):
data["urls"] = "\n".join(torrent_urls)
else:
data["urls"] = torrent_urls
if torrent_files:
if isinstance(torrent_files, list):
for i, f in enumerate(torrent_files):
files[f"torrents_{i}"] = (f"torrent_{i}.torrent", f, "application/x-bittorrent")
else:
files["torrents"] = ("torrent.torrent", torrent_files, "application/x-bittorrent")
resp = await self._client.post(
self._url("torrents/add"),
data=data,
files=files if files else None,
)
return resp.text == "Ok."
async def torrents_delete(self, hash, delete_files: bool = True):
await self._client.post(
self._url("torrents/delete"),
data={"hashes": hash, "deleteFiles": str(delete_files).lower()},
) )
def add_torrents(self, torrent_urls, torrent_files, save_path, category): async def torrents_pause(self, hashes: str):
resp = self._client.torrents_add( await self._client.post(
is_paused=False, self._url("torrents/pause"),
urls=torrent_urls, data={"hashes": hashes},
torrent_files=torrent_files,
save_path=save_path,
category=category,
use_auto_torrent_management=False,
content_layout="NoSubFolder"
) )
return resp == "Ok."
def torrents_delete(self, hash): async def torrents_resume(self, hashes: str):
return self._client.torrents_delete(delete_files=True, torrent_hashes=hash) await self._client.post(
self._url("torrents/resume"),
data={"hashes": hashes},
)
def torrents_rename_file(self, torrent_hash, old_path, new_path) -> bool: async def torrents_rename_file(self, torrent_hash, old_path, new_path) -> bool:
try: try:
self._client.torrents_rename_file( resp = await self._client.post(
torrent_hash=torrent_hash, old_path=old_path, new_path=new_path self._url("torrents/renameFile"),
data={"hash": torrent_hash, "oldPath": old_path, "newPath": new_path},
) )
return True if resp.status_code == 409:
except Conflict409Error: logger.debug(f"Conflict409Error: {old_path} >> {new_path}")
logger.debug(f"Conflict409Error: {old_path} >> {new_path}") return False
return resp.status_code == 200
except Exception:
return False return False
def rss_add_feed(self, url, item_path): async def rss_add_feed(self, url, item_path):
try: resp = await self._client.post(
self._client.rss_add_feed(url, item_path) self._url("rss/addFeed"),
except Conflict409Error: data={"url": url, "path": item_path},
)
if resp.status_code == 409:
logger.warning(f"[Downloader] RSS feed {url} already exists") logger.warning(f"[Downloader] RSS feed {url} already exists")
def rss_remove_item(self, item_path): async def rss_remove_item(self, item_path):
try: resp = await self._client.post(
self._client.rss_remove_item(item_path) self._url("rss/removeItem"),
except Conflict409Error: data={"path": item_path},
)
if resp.status_code == 409:
logger.warning(f"[Downloader] RSS item {item_path} does not exist") logger.warning(f"[Downloader] RSS item {item_path} does not exist")
def rss_get_feeds(self): async def rss_get_feeds(self):
return self._client.rss_items() resp = await self._client.get(self._url("rss/items"))
return resp.json()
def rss_set_rule(self, rule_name, rule_def): async def rss_set_rule(self, rule_name, rule_def):
self._client.rss_set_rule(rule_name, rule_def) import json
await self._client.post(
self._url("rss/setRule"),
data={"ruleName": rule_name, "ruleDef": json.dumps(rule_def)},
)
def move_torrent(self, hashes, new_location): async def move_torrent(self, hashes, new_location):
self._client.torrents_set_location(new_location, hashes) await self._client.post(
self._url("torrents/setLocation"),
data={"hashes": hashes, "location": new_location},
)
def get_download_rule(self): async def get_download_rule(self):
return self._client.rss_rules() resp = await self._client.get(self._url("rss/rules"))
return resp.json()
def get_torrent_path(self, _hash): async def get_torrent_path(self, _hash):
return self._client.torrents_info(hashes=_hash)[0].save_path resp = await self._client.get(
self._url("torrents/info"), params={"hashes": _hash}
)
torrents = resp.json()
if torrents:
return torrents[0].get("save_path", "")
return ""
def set_category(self, _hash, category): async def set_category(self, _hash, category):
try: resp = await self._client.post(
self._client.torrents_set_category(category, hashes=_hash) self._url("torrents/setCategory"),
except Conflict409Error: data={"hashes": _hash, "category": category},
)
if resp.status_code == 409:
logger.warning(f"[Downloader] Category {category} does not exist") logger.warning(f"[Downloader] Category {category} does not exist")
self.add_category(category) await self.add_category(category)
self._client.torrents_set_category(category, hashes=_hash) await self._client.post(
self._url("torrents/setCategory"),
data={"hashes": _hash, "category": category},
)
def check_connection(self): async def check_connection(self):
return self._client.app_version() resp = await self._client.get(self._url("app/version"))
return resp.text
def remove_rule(self, rule_name): async def remove_rule(self, rule_name):
self._client.rss_remove_rule(rule_name) await self._client.post(
self._url("rss/removeRule"),
data={"ruleName": rule_name},
)
def add_tag(self, _hash, tag): async def add_tag(self, _hash, tag):
self._client.torrents_add_tags(tags=tag, hashes=_hash) await self._client.post(
self._url("torrents/addTags"),
data={"hashes": _hash, "tags": tag},
)

View File

@@ -17,7 +17,6 @@ class DownloadClient(TorrentPath):
@staticmethod @staticmethod
def __getClient(): def __getClient():
# TODO 多下载器支持
type = settings.downloader.type type = settings.downloader.type
host = settings.downloader.host host = settings.downloader.host
username = settings.downloader.username username = settings.downloader.username
@@ -27,49 +26,53 @@ class DownloadClient(TorrentPath):
from .client.qb_downloader import QbDownloader from .client.qb_downloader import QbDownloader
return QbDownloader(host, username, password, ssl) return QbDownloader(host, username, password, ssl)
elif type == "aria2":
from .client.aria2_downloader import Aria2Downloader
return Aria2Downloader(host, username, password)
else: else:
logger.error(f"[Downloader] Unsupported downloader type: {type}") logger.error(f"[Downloader] Unsupported downloader type: {type}")
raise Exception(f"Unsupported downloader type: {type}") raise Exception(f"Unsupported downloader type: {type}")
def __enter__(self): async def __aenter__(self):
if not self.authed: if not self.authed:
self.auth() await self.auth()
else: else:
logger.error("[Downloader] Already authed.") logger.error("[Downloader] Already authed.")
return self return self
def __exit__(self, exc_type, exc_val, exc_tb): async def __aexit__(self, exc_type, exc_val, exc_tb):
if self.authed: if self.authed:
self.client.logout() await self.client.logout()
self.authed = False self.authed = False
def auth(self): async def auth(self):
self.authed = self.client.auth() self.authed = await self.client.auth()
if self.authed: if self.authed:
logger.debug("[Downloader] Authed.") logger.debug("[Downloader] Authed.")
else: else:
logger.error("[Downloader] Auth failed.") logger.error("[Downloader] Auth failed.")
def check_host(self): async def check_host(self):
return self.client.check_host() return await self.client.check_host()
def init_downloader(self): async def init_downloader(self):
prefs = { prefs = {
"rss_auto_downloading_enabled": True, "rss_auto_downloading_enabled": True,
"rss_max_articles_per_feed": 500, "rss_max_articles_per_feed": 500,
"rss_processing_enabled": True, "rss_processing_enabled": True,
"rss_refresh_interval": 30, "rss_refresh_interval": 30,
} }
self.client.prefs_init(prefs=prefs) await self.client.prefs_init(prefs=prefs)
try: try:
self.client.add_category("BangumiCollection") await self.client.add_category("BangumiCollection")
except Exception: except Exception:
logger.debug("[Downloader] Cannot add new category, maybe already exists.") logger.debug("[Downloader] Cannot add new category, maybe already exists.")
if settings.downloader.path == "": if settings.downloader.path == "":
prefs = self.client.get_app_prefs() prefs = await self.client.get_app_prefs()
settings.downloader.path = self._join_path(prefs["save_path"], "Bangumi") settings.downloader.path = self._join_path(prefs["save_path"], "Bangumi")
def set_rule(self, data: Bangumi): async def set_rule(self, data: Bangumi):
data.rule_name = self._rule_name(data) data.rule_name = self._rule_name(data)
data.save_path = self._gen_save_path(data) data.save_path = self._gen_save_path(data)
rule = { rule = {
@@ -87,37 +90,43 @@ class DownloadClient(TorrentPath):
"assignedCategory": "Bangumi", "assignedCategory": "Bangumi",
"savePath": data.save_path, "savePath": data.save_path,
} }
self.client.rss_set_rule(rule_name=data.rule_name, rule_def=rule) await self.client.rss_set_rule(rule_name=data.rule_name, rule_def=rule)
data.added = True data.added = True
logger.info( logger.info(
f"[Downloader] Add {data.official_title} Season {data.season} to auto download rules." f"[Downloader] Add {data.official_title} Season {data.season} to auto download rules."
) )
def set_rules(self, bangumi_info: list[Bangumi]): async def set_rules(self, bangumi_info: list[Bangumi]):
logger.debug("[Downloader] Start adding rules.") logger.debug("[Downloader] Start adding rules.")
for info in bangumi_info: for info in bangumi_info:
self.set_rule(info) await self.set_rule(info)
logger.debug("[Downloader] Finished.") logger.debug("[Downloader] Finished.")
def get_torrent_info(self, category="Bangumi", status_filter="completed", tag=None): async def get_torrent_info(self, category="Bangumi", status_filter="completed", tag=None):
return self.client.torrents_info( return await self.client.torrents_info(
status_filter=status_filter, category=category, tag=tag status_filter=status_filter, category=category, tag=tag
) )
def rename_torrent_file(self, _hash, old_path, new_path) -> bool: async def rename_torrent_file(self, _hash, old_path, new_path) -> bool:
logger.info(f"{old_path} >> {new_path}") logger.info(f"{old_path} >> {new_path}")
return self.client.torrents_rename_file( return await self.client.torrents_rename_file(
torrent_hash=_hash, old_path=old_path, new_path=new_path torrent_hash=_hash, old_path=old_path, new_path=new_path
) )
def delete_torrent(self, hashes): async def delete_torrent(self, hashes, delete_files: bool = True):
self.client.torrents_delete(hashes) await self.client.torrents_delete(hashes, delete_files=delete_files)
logger.info("[Downloader] Remove torrents.") logger.info("[Downloader] Remove torrents.")
def add_torrent(self, torrent: Torrent | list, bangumi: Bangumi) -> bool: async def pause_torrent(self, hashes: str):
await self.client.torrents_pause(hashes)
async def resume_torrent(self, hashes: str):
await self.client.torrents_resume(hashes)
async def add_torrent(self, torrent: Torrent | list, bangumi: Bangumi) -> bool:
if not bangumi.save_path: if not bangumi.save_path:
bangumi.save_path = self._gen_save_path(bangumi) bangumi.save_path = self._gen_save_path(bangumi)
with RequestContent() as req: async with RequestContent() as req:
if isinstance(torrent, list): if isinstance(torrent, list):
if len(torrent) == 0: if len(torrent) == 0:
logger.debug(f"[Downloader] No torrent found: {bangumi.official_title}") logger.debug(f"[Downloader] No torrent found: {bangumi.official_title}")
@@ -126,16 +135,16 @@ class DownloadClient(TorrentPath):
torrent_url = [t.url for t in torrent] torrent_url = [t.url for t in torrent]
torrent_file = None torrent_file = None
else: else:
torrent_file = [req.get_content(t.url) for t in torrent] torrent_file = [await req.get_content(t.url) for t in torrent]
torrent_url = None torrent_url = None
else: else:
if "magnet" in torrent.url: if "magnet" in torrent.url:
torrent_url = torrent.url torrent_url = torrent.url
torrent_file = None torrent_file = None
else: else:
torrent_file = req.get_content(torrent.url) torrent_file = await req.get_content(torrent.url)
torrent_url = None torrent_url = None
if self.client.add_torrents( if await self.client.add_torrents(
torrent_urls=torrent_url, torrent_urls=torrent_url,
torrent_files=torrent_file, torrent_files=torrent_file,
save_path=bangumi.save_path, save_path=bangumi.save_path,
@@ -147,28 +156,28 @@ class DownloadClient(TorrentPath):
logger.debug(f"[Downloader] Torrent added before: {bangumi.official_title}") logger.debug(f"[Downloader] Torrent added before: {bangumi.official_title}")
return False return False
def move_torrent(self, hashes, location): async def move_torrent(self, hashes, location):
self.client.move_torrent(hashes=hashes, new_location=location) await self.client.move_torrent(hashes=hashes, new_location=location)
# RSS Parts # RSS Parts
def add_rss_feed(self, rss_link, item_path="Mikan_RSS"): async def add_rss_feed(self, rss_link, item_path="Mikan_RSS"):
self.client.rss_add_feed(url=rss_link, item_path=item_path) await self.client.rss_add_feed(url=rss_link, item_path=item_path)
def remove_rss_feed(self, item_path): async def remove_rss_feed(self, item_path):
self.client.rss_remove_item(item_path=item_path) await self.client.rss_remove_item(item_path=item_path)
def get_rss_feed(self): async def get_rss_feed(self):
return self.client.rss_get_feeds() return await self.client.rss_get_feeds()
def get_download_rules(self): async def get_download_rules(self):
return self.client.get_download_rule() return await self.client.get_download_rule()
def get_torrent_path(self, hashes): async def get_torrent_path(self, hashes):
return self.client.get_torrent_path(hashes) return await self.client.get_torrent_path(hashes)
def set_category(self, hashes, category): async def set_category(self, hashes, category):
self.client.set_category(hashes, category) await self.client.set_category(hashes, category)
def remove_rule(self, rule_name): async def remove_rule(self, rule_name):
self.client.remove_rule(rule_name) await self.client.remove_rule(rule_name)
logger.info(f"[Downloader] Delete rule: {rule_name}") logger.info(f"[Downloader] Delete rule: {rule_name}")

View File

@@ -9,16 +9,17 @@ logger = logging.getLogger(__name__)
class SeasonCollector(DownloadClient): class SeasonCollector(DownloadClient):
def collect_season(self, bangumi: Bangumi, link: str = None): async def collect_season(self, bangumi: Bangumi, link: str = None):
logger.info( logger.info(
f"Start collecting {bangumi.official_title} Season {bangumi.season}..." f"Start collecting {bangumi.official_title} Season {bangumi.season}..."
) )
with SearchTorrent() as st, RSSEngine() as engine: async with SearchTorrent() as st:
if not link: if not link:
torrents = st.search_season(bangumi) torrents = await st.search_season(bangumi)
else: else:
torrents = st.get_torrents(link, bangumi.filter.replace(",", "|")) torrents = await st.get_torrents(link, bangumi.filter.replace(",", "|"))
if self.add_torrent(torrents, bangumi): with RSSEngine() as engine:
if await self.add_torrent(torrents, bangumi):
logger.info( logger.info(
f"Collections of {bangumi.official_title} Season {bangumi.season} completed." f"Collections of {bangumi.official_title} Season {bangumi.season} completed."
) )
@@ -46,29 +47,29 @@ class SeasonCollector(DownloadClient):
) )
@staticmethod @staticmethod
def subscribe_season(data: Bangumi, parser: str = "mikan"): async def subscribe_season(data: Bangumi, parser: str = "mikan"):
with RSSEngine() as engine: with RSSEngine() as engine:
data.added = True data.added = True
data.eps_collect = True data.eps_collect = True
engine.add_rss( await engine.add_rss(
rss_link=data.rss_link, rss_link=data.rss_link,
name=data.official_title, name=data.official_title,
aggregate=False, aggregate=False,
parser=parser, parser=parser,
) )
result = engine.download_bangumi(data) result = await engine.download_bangumi(data)
engine.bangumi.add(data) engine.bangumi.add(data)
return result return result
def eps_complete(): async def eps_complete():
with RSSEngine() as engine: with RSSEngine() as engine:
datas = engine.bangumi.not_complete() datas = engine.bangumi.not_complete()
if datas: if datas:
logger.info("Start collecting full season...") logger.info("Start collecting full season...")
for data in datas: for data in datas:
if not data.eps_collect: if not data.eps_collect:
with SeasonCollector() as collector: async with SeasonCollector() as collector:
collector.collect_season(data) await collector.collect_season(data)
data.eps_collect = True data.eps_collect = True
engine.bangumi.update_all(datas) engine.bangumi.update_all(datas)

View File

@@ -48,7 +48,7 @@ class Renamer(DownloadClient):
logger.error(f"[Renamer] Unknown rename method: {method}") logger.error(f"[Renamer] Unknown rename method: {method}")
return file_info.media_path return file_info.media_path
def rename_file( async def rename_file(
self, self,
torrent_name: str, torrent_name: str,
media_path: str, media_path: str,
@@ -67,7 +67,7 @@ class Renamer(DownloadClient):
new_path = self.gen_path(ep, bangumi_name, method=method) new_path = self.gen_path(ep, bangumi_name, method=method)
if media_path != new_path: if media_path != new_path:
if new_path not in self.check_pool.keys(): if new_path not in self.check_pool.keys():
if self.rename_torrent_file( if await self.rename_torrent_file(
_hash=_hash, old_path=media_path, new_path=new_path _hash=_hash, old_path=media_path, new_path=new_path
): ):
return Notification( return Notification(
@@ -78,10 +78,10 @@ class Renamer(DownloadClient):
else: else:
logger.warning(f"[Renamer] {media_path} parse failed") logger.warning(f"[Renamer] {media_path} parse failed")
if settings.bangumi_manage.remove_bad_torrent: if settings.bangumi_manage.remove_bad_torrent:
self.delete_torrent(hashes=_hash) await self.delete_torrent(hashes=_hash)
return None return None
def rename_collection( async def rename_collection(
self, self,
media_list: list[str], media_list: list[str],
bangumi_name: str, bangumi_name: str,
@@ -99,17 +99,17 @@ class Renamer(DownloadClient):
if ep: if ep:
new_path = self.gen_path(ep, bangumi_name, method=method) new_path = self.gen_path(ep, bangumi_name, method=method)
if media_path != new_path: if media_path != new_path:
renamed = self.rename_torrent_file( renamed = await self.rename_torrent_file(
_hash=_hash, old_path=media_path, new_path=new_path _hash=_hash, old_path=media_path, new_path=new_path
) )
if not renamed: if not renamed:
logger.warning(f"[Renamer] {media_path} rename failed") logger.warning(f"[Renamer] {media_path} rename failed")
# Delete bad torrent. # Delete bad torrent.
if settings.bangumi_manage.remove_bad_torrent: if settings.bangumi_manage.remove_bad_torrent:
self.delete_torrent(_hash) await self.delete_torrent(_hash)
break break
def rename_subtitles( async def rename_subtitles(
self, self,
subtitle_list: list[str], subtitle_list: list[str],
torrent_name: str, torrent_name: str,
@@ -130,17 +130,17 @@ class Renamer(DownloadClient):
if sub: if sub:
new_path = self.gen_path(sub, bangumi_name, method=method) new_path = self.gen_path(sub, bangumi_name, method=method)
if subtitle_path != new_path: if subtitle_path != new_path:
renamed = self.rename_torrent_file( renamed = await self.rename_torrent_file(
_hash=_hash, old_path=subtitle_path, new_path=new_path _hash=_hash, old_path=subtitle_path, new_path=new_path
) )
if not renamed: if not renamed:
logger.warning(f"[Renamer] {subtitle_path} rename failed") logger.warning(f"[Renamer] {subtitle_path} rename failed")
def rename(self) -> list[Notification]: async def rename(self) -> list[Notification]:
# Get torrent info # Get torrent info
logger.debug("[Renamer] Start rename process.") logger.debug("[Renamer] Start rename process.")
rename_method = settings.bangumi_manage.rename_method rename_method = settings.bangumi_manage.rename_method
torrents_info = self.get_torrent_info() torrents_info = await self.get_torrent_info()
renamed_info: list[Notification] = [] renamed_info: list[Notification] = []
for info in torrents_info: for info in torrents_info:
media_list, subtitle_list = self.check_files(info) media_list, subtitle_list = self.check_files(info)
@@ -154,19 +154,19 @@ class Renamer(DownloadClient):
} }
# Rename single media file # Rename single media file
if len(media_list) == 1: if len(media_list) == 1:
notify_info = self.rename_file(media_path=media_list[0], **kwargs) notify_info = await self.rename_file(media_path=media_list[0], **kwargs)
if notify_info: if notify_info:
renamed_info.append(notify_info) renamed_info.append(notify_info)
# Rename subtitle file # Rename subtitle file
if len(subtitle_list) > 0: if len(subtitle_list) > 0:
self.rename_subtitles(subtitle_list=subtitle_list, **kwargs) await self.rename_subtitles(subtitle_list=subtitle_list, **kwargs)
# Rename collection # Rename collection
elif len(media_list) > 1: elif len(media_list) > 1:
logger.info("[Renamer] Start rename collection") logger.info("[Renamer] Start rename collection")
self.rename_collection(media_list=media_list, **kwargs) await self.rename_collection(media_list=media_list, **kwargs)
if len(subtitle_list) > 0: if len(subtitle_list) > 0:
self.rename_subtitles(subtitle_list=subtitle_list, **kwargs) await self.rename_subtitles(subtitle_list=subtitle_list, **kwargs)
self.set_category(info.hash, "BangumiCollection") await self.set_category(info.hash, "BangumiCollection")
else: else:
logger.warning(f"[Renamer] {info.name} has no media file") logger.warning(f"[Renamer] {info.name} has no media file")
logger.debug("[Renamer] Rename process finished.") logger.debug("[Renamer] Rename process finished.")
@@ -177,12 +177,3 @@ class Renamer(DownloadClient):
pass pass
else: else:
self.delete_torrent(hashes=torrent_hash) self.delete_torrent(hashes=torrent_hash)
if __name__ == "__main__":
from module.conf import setup_logger
settings.log.debug_enable = True
setup_logger()
with Renamer() as renamer:
renamer.rename()

View File

@@ -11,17 +11,19 @@ logger = logging.getLogger(__name__)
class TorrentManager(Database): class TorrentManager(Database):
@staticmethod @staticmethod
def __match_torrents_list(data: Bangumi | BangumiUpdate) -> list: async def __match_torrents_list(data: Bangumi | BangumiUpdate) -> list:
with DownloadClient() as client: async with DownloadClient() as client:
torrents = client.get_torrent_info(status_filter=None) torrents = await client.get_torrent_info(status_filter=None)
return [ return [
torrent.hash for torrent in torrents if torrent.save_path == data.save_path torrent.get("hash", torrent.get("infohash_v1", ""))
for torrent in torrents
if torrent.get("save_path") == data.save_path
] ]
def delete_torrents(self, data: Bangumi, client: DownloadClient): async def delete_torrents(self, data: Bangumi, client: DownloadClient):
hash_list = self.__match_torrents_list(data) hash_list = await self.__match_torrents_list(data)
if hash_list: if hash_list:
client.delete_torrent(hash_list) await client.delete_torrent(hash_list)
logger.info(f"Delete rule and torrents for {data.official_title}") logger.info(f"Delete rule and torrents for {data.official_title}")
return ResponseModel( return ResponseModel(
status_code=200, status_code=200,
@@ -37,20 +39,21 @@ class TorrentManager(Database):
msg_zh=f"无法找到 {data.official_title} 的种子", msg_zh=f"无法找到 {data.official_title} 的种子",
) )
def delete_rule(self, _id: int | str, file: bool = False): async def delete_rule(self, _id: int | str, file: bool = False):
data = self.bangumi.search_id(int(_id)) data = self.bangumi.search_id(int(_id))
if isinstance(data, Bangumi): if isinstance(data, Bangumi):
with DownloadClient() as client: async with DownloadClient() as client:
self.rss.delete(data.official_title) self.rss.delete(data.official_title)
self.bangumi.delete_one(int(_id)) self.bangumi.delete_one(int(_id))
torrent_message = None
if file: if file:
torrent_message = self.delete_torrents(data, client) torrent_message = await self.delete_torrents(data, client)
logger.info(f"[Manager] Delete rule for {data.official_title}") logger.info(f"[Manager] Delete rule for {data.official_title}")
return ResponseModel( return ResponseModel(
status_code=200, status_code=200,
status=True, status=True,
msg_en=f"Delete rule for {data.official_title}. {torrent_message.msg_en if file else ''}", msg_en=f"Delete rule for {data.official_title}. {torrent_message.msg_en if file and torrent_message else ''}",
msg_zh=f"删除 {data.official_title} 规则。{torrent_message.msg_zh if file else ''}", msg_zh=f"删除 {data.official_title} 规则。{torrent_message.msg_zh if file and torrent_message else ''}",
) )
else: else:
return ResponseModel( return ResponseModel(
@@ -60,15 +63,14 @@ class TorrentManager(Database):
msg_zh=f"无法找到 id {_id}", msg_zh=f"无法找到 id {_id}",
) )
def disable_rule(self, _id: str | int, file: bool = False): async def disable_rule(self, _id: str | int, file: bool = False):
data = self.bangumi.search_id(int(_id)) data = self.bangumi.search_id(int(_id))
if isinstance(data, Bangumi): if isinstance(data, Bangumi):
with DownloadClient() as client: async with DownloadClient() as client:
# client.remove_rule(data.rule_name)
data.deleted = True data.deleted = True
self.bangumi.update(data) self.bangumi.update(data)
if file: if file:
torrent_message = self.delete_torrents(data, client) torrent_message = await self.delete_torrents(data, client)
return torrent_message return torrent_message
logger.info(f"[Manager] Disable rule for {data.official_title}") logger.info(f"[Manager] Disable rule for {data.official_title}")
return ResponseModel( return ResponseModel(
@@ -105,15 +107,15 @@ class TorrentManager(Database):
msg_zh=f"无法找到 id {_id}", msg_zh=f"无法找到 id {_id}",
) )
def update_rule(self, bangumi_id, data: BangumiUpdate): async def update_rule(self, bangumi_id, data: BangumiUpdate):
old_data: Bangumi = self.bangumi.search_id(bangumi_id) old_data: Bangumi = self.bangumi.search_id(bangumi_id)
if old_data: if old_data:
# Move torrent # Move torrent
match_list = self.__match_torrents_list(old_data) match_list = await self.__match_torrents_list(old_data)
with DownloadClient() as client: async with DownloadClient() as client:
path = client._gen_save_path(data) path = client._gen_save_path(data)
if match_list: if match_list:
client.move_torrent(match_list, path) await client.move_torrent(match_list, path)
data.save_path = path data.save_path = path
self.bangumi.update(data, bangumi_id) self.bangumi.update(data, bangumi_id)
return ResponseModel( return ResponseModel(
@@ -131,11 +133,11 @@ class TorrentManager(Database):
msg_zh=f"无法找到 id {bangumi_id} 的数据", msg_zh=f"无法找到 id {bangumi_id} 的数据",
) )
def refresh_poster(self): async def refresh_poster(self):
bangumis = self.bangumi.search_all() bangumis = self.bangumi.search_all()
for bangumi in bangumis: for bangumi in bangumis:
if not bangumi.poster_link: if not bangumi.poster_link:
TitleParser().tmdb_poster_parser(bangumi) await TitleParser().tmdb_poster_parser(bangumi)
self.bangumi.update_all(bangumis) self.bangumi.update_all(bangumis)
return ResponseModel( return ResponseModel(
status_code=200, status_code=200,
@@ -144,9 +146,9 @@ class TorrentManager(Database):
msg_zh="刷新海报链接成功。", msg_zh="刷新海报链接成功。",
) )
def refind_poster(self, bangumi_id: int): async def refind_poster(self, bangumi_id: int):
bangumi = self.bangumi.search_id(bangumi_id) bangumi = self.bangumi.search_id(bangumi_id)
TitleParser().tmdb_poster_parser(bangumi) await TitleParser().tmdb_poster_parser(bangumi)
self.bangumi.update(bangumi) self.bangumi.update(bangumi)
return ResponseModel( return ResponseModel(
status_code=200, status_code=200,
@@ -155,9 +157,9 @@ class TorrentManager(Database):
msg_zh="刷新海报链接成功。", msg_zh="刷新海报链接成功。",
) )
def refresh_calendar(self): async def refresh_calendar(self):
"""Fetch Bangumi.tv calendar and update air_weekday for all bangumi.""" """Fetch Bangumi.tv calendar and update air_weekday for all bangumi."""
calendar_items = fetch_bgm_calendar() calendar_items = await fetch_bgm_calendar()
if not calendar_items: if not calendar_items:
return ResponseModel( return ResponseModel(
status_code=500, status_code=500,
@@ -204,8 +206,3 @@ class TorrentManager(Database):
) )
else: else:
return data return data
if __name__ == "__main__":
with TorrentManager() as manager:
manager.refresh_poster()

View File

@@ -12,14 +12,14 @@ logger = logging.getLogger(__name__)
class RequestContent(RequestURL): class RequestContent(RequestURL):
def get_torrents( async def get_torrents(
self, self,
_url: str, _url: str,
_filter: str = None, _filter: str = None,
limit: int = None, limit: int = None,
retry: int = 3, retry: int = 3,
) -> list[Torrent]: ) -> list[Torrent]:
soup = self.get_xml(_url, retry) soup = await self.get_xml(_url, retry)
if soup: if soup:
torrent_titles, torrent_urls, torrent_homepage = rss_parser(soup) torrent_titles, torrent_urls, torrent_homepage = rss_parser(soup)
torrents: list[Torrent] = [] torrents: list[Torrent] = []
@@ -40,38 +40,40 @@ class RequestContent(RequestURL):
logger.warning(f"[Network] Failed to get torrents: {_url}") logger.warning(f"[Network] Failed to get torrents: {_url}")
return [] return []
def get_xml(self, _url, retry: int = 3) -> xml.etree.ElementTree.Element: async def get_xml(self, _url, retry: int = 3) -> xml.etree.ElementTree.Element:
req = self.get_url(_url, retry) req = await self.get_url(_url, retry)
if req: if req:
return xml.etree.ElementTree.fromstring(req.text) return xml.etree.ElementTree.fromstring(req.text)
# API JSON # API JSON
def get_json(self, _url) -> dict: async def get_json(self, _url) -> dict:
req = self.get_url(_url) req = await self.get_url(_url)
if req: if req:
return req.json() return req.json()
def post_json(self, _url, data: dict) -> dict: async def post_json(self, _url, data: dict) -> dict:
return self.post_url(_url, data).json() resp = await self.post_url(_url, data)
return resp.json()
def post_data(self, _url, data: dict) -> dict: async def post_data(self, _url, data: dict):
return self.post_url(_url, data) return await self.post_url(_url, data)
def post_files(self, _url, data: dict, files: dict) -> dict: async def post_files(self, _url, data: dict, files: dict):
return self.post_form(_url, data, files) return await self.post_form(_url, data, files)
def get_html(self, _url): async def get_html(self, _url):
return self.get_url(_url).text resp = await self.get_url(_url)
return resp.text
def get_content(self, _url): async def get_content(self, _url):
req = self.get_url(_url) req = await self.get_url(_url)
if req: if req:
return req.content return req.content
def check_connection(self, _url): async def check_connection(self, _url):
return self.check_url(_url) return await self.check_url(_url)
def get_rss_title(self, _url): async def get_rss_title(self, _url):
soup = self.get_xml(_url) soup = await self.get_xml(_url)
if soup: if soup:
return soup.find("./channel/title").text return soup.find("./channel/title").text

View File

@@ -1,9 +1,8 @@
import asyncio
import logging import logging
import socket
import time
import requests import httpx
import socks from httpx_socks import AsyncProxyTransport
from module.conf import settings from module.conf import settings
@@ -13,47 +12,50 @@ logger = logging.getLogger(__name__)
class RequestURL: class RequestURL:
def __init__(self): def __init__(self):
self.header = {"user-agent": "Mozilla/5.0", "Accept": "application/xml"} self.header = {"user-agent": "Mozilla/5.0", "Accept": "application/xml"}
self._socks5_proxy = False self._client: httpx.AsyncClient | None = None
def get_url(self, url, retry=3): async def get_url(self, url, retry=3):
try_time = 0 try_time = 0
while True: while True:
try: try:
req = self.session.get(url=url, headers=self.header, timeout=5) req = await self._client.get(url=url, headers=self.header)
logger.debug(f"[Network] Successfully connected to {url}. Status: {req.status_code}") logger.debug(f"[Network] Successfully connected to {url}. Status: {req.status_code}")
req.raise_for_status() req.raise_for_status()
return req return req
except requests.RequestException: except httpx.HTTPStatusError:
logger.debug(f"[Network] HTTP error from {url}.")
break
except httpx.RequestError:
logger.debug( logger.debug(
f"[Network] Cannot connect to {url}. Wait for 5 seconds." f"[Network] Cannot connect to {url}. Wait for 5 seconds."
) )
try_time += 1 try_time += 1
if try_time >= retry: if try_time >= retry:
break break
time.sleep(5) await asyncio.sleep(5)
except Exception as e: except Exception as e:
logger.debug(e) logger.debug(e)
break break
logger.error(f"[Network] Unable to connect to {url}, Please check your network settings") logger.error(f"[Network] Unable to connect to {url}, Please check your network settings")
return None return None
def post_url(self, url: str, data: dict, retry=3): async def post_url(self, url: str, data: dict, retry=3):
try_time = 0 try_time = 0
while True: while True:
try: try:
req = self.session.post( req = await self._client.post(
url=url, headers=self.header, data=data, timeout=5 url=url, headers=self.header, data=data
) )
req.raise_for_status() req.raise_for_status()
return req return req
except requests.RequestException: except httpx.RequestError:
logger.warning( logger.warning(
f"[Network] Cannot connect to {url}. Wait for 5 seconds." f"[Network] Cannot connect to {url}. Wait for 5 seconds."
) )
try_time += 1 try_time += 1
if try_time >= retry: if try_time >= retry:
break break
time.sleep(5) await asyncio.sleep(5)
except Exception as e: except Exception as e:
logger.debug(e) logger.debug(e)
break break
@@ -61,64 +63,54 @@ class RequestURL:
logger.warning("[Network] Please check DNS/Connection settings") logger.warning("[Network] Please check DNS/Connection settings")
return None return None
def check_url(self, url: str): async def check_url(self, url: str):
if "://" not in url: if "://" not in url:
url = f"http://{url}" url = f"http://{url}"
try: try:
req = requests.head(url=url, headers=self.header, timeout=5) req = await self._client.head(url=url, headers=self.header)
req.raise_for_status() req.raise_for_status()
return True return True
except requests.RequestException: except (httpx.RequestError, httpx.HTTPStatusError):
logger.debug(f"[Network] Cannot connect to {url}.") logger.debug(f"[Network] Cannot connect to {url}.")
return False return False
def post_form(self, url: str, data: dict, files): async def post_form(self, url: str, data: dict, files):
try: try:
req = self.session.post( req = await self._client.post(
url=url, headers=self.header, data=data, files=files, timeout=5 url=url, headers=self.header, data=data, files=files
) )
req.raise_for_status() req.raise_for_status()
return req return req
except requests.RequestException: except (httpx.RequestError, httpx.HTTPStatusError):
logger.warning(f"[Network] Cannot connect to {url}.") logger.warning(f"[Network] Cannot connect to {url}.")
return None return None
def __enter__(self): async def __aenter__(self):
self.session = requests.Session() timeout = httpx.Timeout(connect=5.0, read=10.0, write=10.0, pool=10.0)
if settings.proxy.enable: if settings.proxy.enable:
if "http" in settings.proxy.type: if "http" in settings.proxy.type:
if settings.proxy.username: if settings.proxy.username:
username=settings.proxy.username username = settings.proxy.username
password=settings.proxy.password password = settings.proxy.password
url = f"http://{username}:{password}@{settings.proxy.host}:{settings.proxy.port}" proxy_url = f"http://{username}:{password}@{settings.proxy.host}:{settings.proxy.port}"
self.session.proxies = {
"http": url,
"https": url,
}
else: else:
url = f"http://{settings.proxy.host}:{settings.proxy.port}" proxy_url = f"http://{settings.proxy.host}:{settings.proxy.port}"
self.session.proxies = { self._client = httpx.AsyncClient(proxy=proxy_url, timeout=timeout)
"http": url,
"https": url,
}
elif settings.proxy.type == "socks5": elif settings.proxy.type == "socks5":
self._socks5_proxy = True if settings.proxy.username:
socks.set_default_proxy( socks_url = f"socks5://{settings.proxy.username}:{settings.proxy.password}@{settings.proxy.host}:{settings.proxy.port}"
socks.SOCKS5, else:
addr=settings.proxy.host, socks_url = f"socks5://{settings.proxy.host}:{settings.proxy.port}"
port=settings.proxy.port, transport = AsyncProxyTransport.from_url(socks_url, rdns=True)
rdns=True, self._client = httpx.AsyncClient(transport=transport, timeout=timeout)
username=settings.proxy.username,
password=settings.proxy.password,
)
socket.socket = socks.socksocket
else: else:
logger.error(f"[Network] Unsupported proxy type: {settings.proxy.type}") logger.error(f"[Network] Unsupported proxy type: {settings.proxy.type}")
self._client = httpx.AsyncClient(timeout=timeout)
else:
self._client = httpx.AsyncClient(timeout=timeout)
return self return self
def __exit__(self, exc_type, exc_val, exc_tb): async def __aexit__(self, exc_type, exc_val, exc_tb):
if self._socks5_proxy: if self._client:
socks.set_default_proxy() await self._client.aclose()
socket.socket = socks.socksocket self._client = None
self._socks5_proxy = False
self.session.close()

View File

@@ -40,18 +40,18 @@ class PostNotification:
poster_path = db.bangumi.match_poster(notify.official_title) poster_path = db.bangumi.match_poster(notify.official_title)
notify.poster_path = poster_path notify.poster_path = poster_path
def send_msg(self, notify: Notification) -> bool: async def send_msg(self, notify: Notification) -> bool:
self._get_poster(notify) self._get_poster(notify)
try: try:
self.notifier.post_msg(notify) await self.notifier.post_msg(notify)
logger.debug(f"Send notification: {notify.official_title}") logger.debug(f"Send notification: {notify.official_title}")
except Exception as e: except Exception as e:
logger.warning(f"Failed to send notification: {e}") logger.warning(f"Failed to send notification: {e}")
return False return False
def __enter__(self): async def __aenter__(self):
self.notifier.__enter__() await self.notifier.__aenter__()
return self return self
def __exit__(self, exc_type, exc_val, exc_tb): async def __aexit__(self, exc_type, exc_val, exc_tb):
self.notifier.__exit__(exc_type, exc_val, exc_tb) await self.notifier.__aexit__(exc_type, exc_val, exc_tb)

View File

@@ -19,9 +19,9 @@ class BarkNotification(RequestContent):
""" """
return text.strip() return text.strip()
def post_msg(self, notify: Notification) -> bool: async def post_msg(self, notify: Notification) -> bool:
text = self.gen_message(notify) text = self.gen_message(notify)
data = {"title": notify.official_title, "body": text, "icon": notify.poster_path, "device_key": self.token} data = {"title": notify.official_title, "body": text, "icon": notify.poster_path, "device_key": self.token}
resp = self.post_data(self.notification_url, data) resp = await self.post_data(self.notification_url, data)
logger.debug(f"Bark notification: {resp.status_code}") logger.debug(f"Bark notification: {resp.status_code}")
return resp.status_code == 200 return resp.status_code == 200

View File

@@ -20,12 +20,12 @@ class ServerChanNotification(RequestContent):
""" """
return text.strip() return text.strip()
def post_msg(self, notify: Notification) -> bool: async def post_msg(self, notify: Notification) -> bool:
text = self.gen_message(notify) text = self.gen_message(notify)
data = { data = {
"title": notify.official_title, "title": notify.official_title,
"desp": text, "desp": text,
} }
resp = self.post_data(self.notification_url, data) resp = await self.post_data(self.notification_url, data)
logger.debug(f"ServerChan notification: {resp.status_code}") logger.debug(f"ServerChan notification: {resp.status_code}")
return resp.status_code == 200 return resp.status_code == 200

View File

@@ -19,9 +19,9 @@ class SlackNotification(RequestContent):
""" """
return text.strip() return text.strip()
def post_msg(self, notify: Notification) -> bool: async def post_msg(self, notify: Notification) -> bool:
text = self.gen_message(notify) text = self.gen_message(notify)
data = {"title": notify.official_title, "body": text, "device_key": self.token} data = {"title": notify.official_title, "body": text, "device_key": self.token}
resp = self.post_data(self.notification_url, data) resp = await self.post_data(self.notification_url, data)
logger.debug(f"Bark notification: {resp.status_code}") logger.debug(f"Bark notification: {resp.status_code}")
return resp.status_code == 200 return resp.status_code == 200

View File

@@ -21,7 +21,7 @@ class TelegramNotification(RequestContent):
""" """
return text.strip() return text.strip()
def post_msg(self, notify: Notification) -> bool: async def post_msg(self, notify: Notification) -> bool:
text = self.gen_message(notify) text = self.gen_message(notify)
data = { data = {
"chat_id": self.chat_id, "chat_id": self.chat_id,
@@ -31,8 +31,8 @@ class TelegramNotification(RequestContent):
} }
photo = load_image(notify.poster_path) photo = load_image(notify.poster_path)
if photo: if photo:
resp = self.post_files(self.photo_url, data, files={"photo": photo}) resp = await self.post_files(self.photo_url, data, files={"photo": photo})
else: else:
resp = self.post_data(self.message_url, data) resp = await self.post_data(self.message_url, data)
logger.debug(f"Telegram notification: {resp.status_code}") logger.debug(f"Telegram notification: {resp.status_code}")
return resp.status_code == 200 return resp.status_code == 200

View File

@@ -22,7 +22,7 @@ class WecomNotification(RequestContent):
""" """
return text.strip() return text.strip()
def post_msg(self, notify: Notification) -> bool: async def post_msg(self, notify: Notification) -> bool:
##Change message format to match Wecom push better ##Change message format to match Wecom push better
title = "【番剧更新】" + notify.official_title title = "【番剧更新】" + notify.official_title
msg = self.gen_message(notify) msg = self.gen_message(notify)
@@ -37,6 +37,6 @@ class WecomNotification(RequestContent):
"msg": msg, "msg": msg,
"picurl": picurl, "picurl": picurl,
} }
resp = self.post_data(self.notification_url, data) resp = await self.post_data(self.notification_url, data)
logger.debug(f"Wecom notification: {resp.status_code}") logger.debug(f"Wecom notification: {resp.status_code}")
return resp.status_code == 200 return resp.status_code == 200

View File

@@ -7,13 +7,13 @@ logger = logging.getLogger(__name__)
BGM_CALENDAR_URL = "https://api.bgm.tv/calendar" BGM_CALENDAR_URL = "https://api.bgm.tv/calendar"
def fetch_bgm_calendar() -> list[dict]: async def fetch_bgm_calendar() -> list[dict]:
"""Fetch the current season's broadcast calendar from Bangumi.tv API. """Fetch the current season's broadcast calendar from Bangumi.tv API.
Returns a flat list of anime items with their air_weekday (0=Mon, ..., 6=Sun). Returns a flat list of anime items with their air_weekday (0=Mon, ..., 6=Sun).
""" """
with RequestContent() as req: async with RequestContent() as req:
data = req.get_json(BGM_CALENDAR_URL) data = await req.get_json(BGM_CALENDAR_URL)
if not data: if not data:
logger.warning("[BGM Calendar] Failed to fetch calendar data.") logger.warning("[BGM Calendar] Failed to fetch calendar data.")

View File

@@ -5,10 +5,10 @@ def search_url(e):
return f"https://api.bgm.tv/search/subject/{e}?responseGroup=large" return f"https://api.bgm.tv/search/subject/{e}?responseGroup=large"
def bgm_parser(title): async def bgm_parser(title):
url = search_url(title) url = search_url(title)
with RequestContent() as req: async with RequestContent() as req:
contents = req.get_json(url) contents = await req.get_json(url)
if contents: if contents:
return contents[0] return contents[0]
else: else:

View File

@@ -7,10 +7,10 @@ from module.network import RequestContent
from module.utils import save_image from module.utils import save_image
def mikan_parser(homepage: str): async def mikan_parser(homepage: str):
root_path = parse_url(homepage).host root_path = parse_url(homepage).host
with RequestContent() as req: async with RequestContent() as req:
content = req.get_html(homepage) content = await req.get_html(homepage)
soup = BeautifulSoup(content, "html.parser") soup = BeautifulSoup(content, "html.parser")
poster_div = soup.find("div", {"class": "bangumi-poster"}).get("style") poster_div = soup.find("div", {"class": "bangumi-poster"}).get("style")
official_title = soup.select_one( official_title = soup.select_one(
@@ -20,7 +20,7 @@ def mikan_parser(homepage: str):
if poster_div: if poster_div:
poster_path = poster_div.split("url('")[1].split("')")[0] poster_path = poster_div.split("url('")[1].split("')")[0]
poster_path = poster_path.split("?")[0] poster_path = poster_path.split("?")[0]
img = req.get_content(f"https://{root_path}{poster_path}") img = await req.get_content(f"https://{root_path}{poster_path}")
suffix = poster_path.split(".")[-1] suffix = poster_path.split(".")[-1]
poster_link = save_image(img, suffix) poster_link = save_image(img, suffix)
return poster_link, official_title return poster_link, official_title
@@ -28,5 +28,6 @@ def mikan_parser(homepage: str):
if __name__ == '__main__': if __name__ == '__main__':
import asyncio
homepage = "https://mikanani.me/Home/Episode/c89b3c6f0c1c0567a618f5288b853823c87a9862" homepage = "https://mikanani.me/Home/Episode/c89b3c6f0c1c0567a618f5288b853823c87a9862"
print(mikan_parser(homepage)) print(asyncio.run(mikan_parser(homepage)))

View File

@@ -31,11 +31,11 @@ def info_url(e, key):
return f"{TMDB_URL}/3/tv/{e}?api_key={TMDB_API}&language={LANGUAGE[key]}" return f"{TMDB_URL}/3/tv/{e}?api_key={TMDB_API}&language={LANGUAGE[key]}"
def is_animation(tv_id, language) -> bool: async def is_animation(tv_id, language, req: RequestContent) -> bool:
url_info = info_url(tv_id, language) url_info = info_url(tv_id, language)
with RequestContent() as req: type_id = await req.get_json(url_info)
type_id = req.get_json(url_info)["genres"] if type_id:
for type in type_id: for type in type_id.get("genres", []):
if type.get("id") == 16: if type.get("id") == 16:
return True return True
return False return False
@@ -56,21 +56,27 @@ def get_season(seasons: list) -> tuple[int, str]:
return len(ss), ss[-1].get("poster_path") return len(ss), ss[-1].get("poster_path")
def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None: async def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
with RequestContent() as req: async with RequestContent() as req:
url = search_url(title) url = search_url(title)
contents = req.get_json(url).get("results") contents = await req.get_json(url)
if not contents:
return None
contents = contents.get("results")
if contents.__len__() == 0: if contents.__len__() == 0:
url = search_url(title.replace(" ", "")) url = search_url(title.replace(" ", ""))
contents = req.get_json(url).get("results") contents_resp = await req.get_json(url)
if not contents_resp:
return None
contents = contents_resp.get("results")
# 判断动画 # 判断动画
if contents: if contents:
for content in contents: for content in contents:
id = content["id"] id = content["id"]
if is_animation(id, language): if await is_animation(id, language, req):
break break
url_info = info_url(id, language) url_info = info_url(id, language)
info_content = req.get_json(url_info) info_content = await req.get_json(url_info)
season = [ season = [
{ {
"season": s.get("name"), "season": s.get("name"),
@@ -87,7 +93,7 @@ def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
year_number = info_content.get("first_air_date").split("-")[0] year_number = info_content.get("first_air_date").split("-")[0]
if poster_path: if poster_path:
if not test: if not test:
img = req.get_content(f"https://image.tmdb.org/t/p/w780{poster_path}") img = await req.get_content(f"https://image.tmdb.org/t/p/w780{poster_path}")
poster_link = save_image(img, "jpg") poster_link = save_image(img, "jpg")
else: else:
poster_link = "https://image.tmdb.org/t/p/w780" + poster_path poster_link = "https://image.tmdb.org/t/p/w780" + poster_path
@@ -107,4 +113,5 @@ def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
if __name__ == "__main__": if __name__ == "__main__":
print(tmdb_parser("魔法禁书目录", "zh")) import asyncio
print(asyncio.run(tmdb_parser("魔法禁书目录", "zh")))

View File

@@ -31,8 +31,8 @@ class TitleParser:
logger.warning(f"Cannot parse {torrent_path} with error {e}") logger.warning(f"Cannot parse {torrent_path} with error {e}")
@staticmethod @staticmethod
def tmdb_parser(title: str, season: int, language: str): async def tmdb_parser(title: str, season: int, language: str):
tmdb_info = tmdb_parser(title, language) tmdb_info = await tmdb_parser(title, language)
if tmdb_info: if tmdb_info:
logger.debug(f"TMDB Matched, official title is {tmdb_info.title}") logger.debug(f"TMDB Matched, official title is {tmdb_info.title}")
tmdb_season = tmdb_info.last_season if tmdb_info.last_season else season tmdb_season = tmdb_info.last_season if tmdb_info.last_season else season
@@ -43,8 +43,8 @@ class TitleParser:
return title, season, None, None return title, season, None, None
@staticmethod @staticmethod
def tmdb_poster_parser(bangumi: Bangumi): async def tmdb_poster_parser(bangumi: Bangumi):
tmdb_info = tmdb_parser(bangumi.official_title, settings.rss_parser.language) tmdb_info = await tmdb_parser(bangumi.official_title, settings.rss_parser.language)
if tmdb_info: if tmdb_info:
logger.debug(f"TMDB Matched, official title is {tmdb_info.title}") logger.debug(f"TMDB Matched, official title is {tmdb_info.title}")
bangumi.poster_link = tmdb_info.poster_link bangumi.poster_link = tmdb_info.poster_link
@@ -104,5 +104,5 @@ class TitleParser:
return None return None
@staticmethod @staticmethod
def mikan_parser(homepage: str) -> tuple[str, str]: async def mikan_parser(homepage: str) -> tuple[str, str]:
return mikan_parser(homepage) return await mikan_parser(homepage)

View File

@@ -12,17 +12,17 @@ logger = logging.getLogger(__name__)
class RSSAnalyser(TitleParser): class RSSAnalyser(TitleParser):
def official_title_parser(self, bangumi: Bangumi, rss: RSSItem, torrent: Torrent): async def official_title_parser(self, bangumi: Bangumi, rss: RSSItem, torrent: Torrent):
if rss.parser == "mikan": if rss.parser == "mikan":
try: try:
bangumi.poster_link, bangumi.official_title = self.mikan_parser( bangumi.poster_link, bangumi.official_title = await self.mikan_parser(
torrent.homepage torrent.homepage
) )
except AttributeError: except AttributeError:
logger.warning("[Parser] Mikan torrent has no homepage info.") logger.warning("[Parser] Mikan torrent has no homepage info.")
pass pass
elif rss.parser == "tmdb": elif rss.parser == "tmdb":
tmdb_title, season, year, poster_link = self.tmdb_parser( tmdb_title, season, year, poster_link = await self.tmdb_parser(
bangumi.official_title, bangumi.season, settings.rss_parser.language bangumi.official_title, bangumi.season, settings.rss_parser.language
) )
bangumi.official_title = tmdb_title bangumi.official_title = tmdb_title
@@ -34,45 +34,45 @@ class RSSAnalyser(TitleParser):
bangumi.official_title = re.sub(r"[/:.\\]", " ", bangumi.official_title) bangumi.official_title = re.sub(r"[/:.\\]", " ", bangumi.official_title)
@staticmethod @staticmethod
def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list[Torrent]: async def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list[Torrent]:
with RequestContent() as req: async with RequestContent() as req:
if full_parse: if full_parse:
rss_torrents = req.get_torrents(rss_link) rss_torrents = await req.get_torrents(rss_link)
else: else:
rss_torrents = req.get_torrents(rss_link, "\\d+-\\d+") rss_torrents = await req.get_torrents(rss_link, "\\d+-\\d+")
return rss_torrents return rss_torrents
def torrents_to_data( async def torrents_to_data(
self, torrents: list[Torrent], rss: RSSItem, full_parse: bool = True self, torrents: list[Torrent], rss: RSSItem, full_parse: bool = True
) -> list: ) -> list:
new_data = [] new_data = []
for torrent in torrents: for torrent in torrents:
bangumi = self.raw_parser(raw=torrent.name) bangumi = self.raw_parser(raw=torrent.name)
if bangumi and bangumi.title_raw not in [i.title_raw for i in new_data]: if bangumi and bangumi.title_raw not in [i.title_raw for i in new_data]:
self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent) await self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent)
if not full_parse: if not full_parse:
return [bangumi] return [bangumi]
new_data.append(bangumi) new_data.append(bangumi)
logger.info(f"[RSS] New bangumi founded: {bangumi.official_title}") logger.info(f"[RSS] New bangumi founded: {bangumi.official_title}")
return new_data return new_data
def torrent_to_data(self, torrent: Torrent, rss: RSSItem) -> Bangumi: async def torrent_to_data(self, torrent: Torrent, rss: RSSItem) -> Bangumi:
bangumi = self.raw_parser(raw=torrent.name) bangumi = self.raw_parser(raw=torrent.name)
if bangumi: if bangumi:
self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent) await self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent)
bangumi.rss_link = rss.url bangumi.rss_link = rss.url
return bangumi return bangumi
def rss_to_data( async def rss_to_data(
self, rss: RSSItem, engine: RSSEngine, full_parse: bool = True self, rss: RSSItem, engine: RSSEngine, full_parse: bool = True
) -> list[Bangumi]: ) -> list[Bangumi]:
rss_torrents = self.get_rss_torrents(rss.url, full_parse) rss_torrents = await self.get_rss_torrents(rss.url, full_parse)
torrents_to_add = engine.bangumi.match_list(rss_torrents, rss.url) torrents_to_add = engine.bangumi.match_list(rss_torrents, rss.url)
if not torrents_to_add: if not torrents_to_add:
logger.debug("[RSS] No new title has been found.") logger.debug("[RSS] No new title has been found.")
return [] return []
# New List # New List
new_data = self.torrents_to_data(torrents_to_add, rss, full_parse) new_data = await self.torrents_to_data(torrents_to_add, rss, full_parse)
if new_data: if new_data:
# Add to database # Add to database
engine.bangumi.add_all(new_data) engine.bangumi.add_all(new_data)
@@ -80,8 +80,8 @@ class RSSAnalyser(TitleParser):
else: else:
return [] return []
def link_to_data(self, rss: RSSItem) -> Bangumi | ResponseModel: async def link_to_data(self, rss: RSSItem) -> Bangumi | ResponseModel:
torrents = self.get_rss_torrents(rss.url, False) torrents = await self.get_rss_torrents(rss.url, False)
if not torrents: if not torrents:
return ResponseModel( return ResponseModel(
status=False, status=False,
@@ -90,7 +90,7 @@ class RSSAnalyser(TitleParser):
msg_zh="无法找到种子。", msg_zh="无法找到种子。",
) )
for torrent in torrents: for torrent in torrents:
data = self.torrent_to_data(torrent, rss) data = await self.torrent_to_data(torrent, rss)
if data: if data:
return data return data
return ResponseModel( return ResponseModel(
@@ -99,4 +99,3 @@ class RSSAnalyser(TitleParser):
msg_en="Cannot parse this link.", msg_en="Cannot parse this link.",
msg_zh="无法解析此链接。", msg_zh="无法解析此链接。",
) )

View File

@@ -16,9 +16,9 @@ class RSSEngine(Database):
self._to_refresh = False self._to_refresh = False
@staticmethod @staticmethod
def _get_torrents(rss: RSSItem) -> list[Torrent]: async def _get_torrents(rss: RSSItem) -> list[Torrent]:
with RequestContent() as req: async with RequestContent() as req:
torrents = req.get_torrents(rss.url) torrents = await req.get_torrents(rss.url)
# Add RSS ID # Add RSS ID
for torrent in torrents: for torrent in torrents:
torrent.rss_id = rss.id torrent.rss_id = rss.id
@@ -31,7 +31,7 @@ class RSSEngine(Database):
else: else:
return [] return []
def add_rss( async def add_rss(
self, self,
rss_link: str, rss_link: str,
name: str | None = None, name: str | None = None,
@@ -39,8 +39,8 @@ class RSSEngine(Database):
parser: str = "mikan", parser: str = "mikan",
): ):
if not name: if not name:
with RequestContent() as req: async with RequestContent() as req:
name = req.get_rss_title(rss_link) name = await req.get_rss_title(rss_link)
if not name: if not name:
return ResponseModel( return ResponseModel(
status=False, status=False,
@@ -94,8 +94,8 @@ class RSSEngine(Database):
msg_zh="删除 RSS 成功。", msg_zh="删除 RSS 成功。",
) )
def pull_rss(self, rss_item: RSSItem) -> list[Torrent]: async def pull_rss(self, rss_item: RSSItem) -> list[Torrent]:
torrents = self._get_torrents(rss_item) torrents = await self._get_torrents(rss_item)
new_torrents = self.torrent.check_new(torrents) new_torrents = self.torrent.check_new(torrents)
return new_torrents return new_torrents
@@ -110,7 +110,7 @@ class RSSEngine(Database):
return matched return matched
return None return None
def refresh_rss(self, client: DownloadClient, rss_id: Optional[int] = None): async def refresh_rss(self, client: DownloadClient, rss_id: Optional[int] = None):
# Get All RSS Items # Get All RSS Items
if not rss_id: if not rss_id:
rss_items: list[RSSItem] = self.rss.search_active() rss_items: list[RSSItem] = self.rss.search_active()
@@ -120,25 +120,25 @@ class RSSEngine(Database):
# From RSS Items, get all torrents # From RSS Items, get all torrents
logger.debug(f"[Engine] Get {len(rss_items)} RSS items") logger.debug(f"[Engine] Get {len(rss_items)} RSS items")
for rss_item in rss_items: for rss_item in rss_items:
new_torrents = self.pull_rss(rss_item) new_torrents = await self.pull_rss(rss_item)
# Get all enabled bangumi data # Get all enabled bangumi data
for torrent in new_torrents: for torrent in new_torrents:
matched_data = self.match_torrent(torrent) matched_data = self.match_torrent(torrent)
if matched_data: if matched_data:
if client.add_torrent(torrent, matched_data): if await client.add_torrent(torrent, matched_data):
logger.debug(f"[Engine] Add torrent {torrent.name} to client") logger.debug(f"[Engine] Add torrent {torrent.name} to client")
torrent.downloaded = True torrent.downloaded = True
# Add all torrents to database # Add all torrents to database
self.torrent.add_all(new_torrents) self.torrent.add_all(new_torrents)
def download_bangumi(self, bangumi: Bangumi): async def download_bangumi(self, bangumi: Bangumi):
with RequestContent() as req: async with RequestContent() as req:
torrents = req.get_torrents( torrents = await req.get_torrents(
bangumi.rss_link, bangumi.filter.replace(",", "|") bangumi.rss_link, bangumi.filter.replace(",", "|")
) )
if torrents: if torrents:
with DownloadClient() as client: async with DownloadClient() as client:
client.add_torrent(torrents, bangumi) await client.add_torrent(torrents, bangumi)
self.torrent.add_all(torrents) self.torrent.add_all(torrents)
return ResponseModel( return ResponseModel(
status=True, status=True,

View File

@@ -20,22 +20,20 @@ BangumiJSON: TypeAlias = str
class SearchTorrent(RequestContent, RSSAnalyser): class SearchTorrent(RequestContent, RSSAnalyser):
def search_torrents(self, rss_item: RSSItem) -> list[Torrent]: async def search_torrents(self, rss_item: RSSItem) -> list[Torrent]:
return self.get_torrents(rss_item.url) return await self.get_torrents(rss_item.url)
# torrents = self.get_torrents(rss_item.url)
# return torrents
def analyse_keyword( async def analyse_keyword(
self, keywords: list[str], site: str = "mikan", limit: int = 5 self, keywords: list[str], site: str = "mikan", limit: int = 5
) -> BangumiJSON: ):
rss_item = search_url(site, keywords) rss_item = search_url(site, keywords)
torrents = self.search_torrents(rss_item) torrents = await self.search_torrents(rss_item)
# yield for EventSourceResponse (Server Send) # yield for EventSourceResponse (Server Send)
exist_list = [] exist_list = []
for torrent in torrents: for torrent in torrents:
if len(exist_list) >= limit: if len(exist_list) >= limit:
break break
bangumi = self.torrent_to_data(torrent=torrent, rss=rss_item) bangumi = await self.torrent_to_data(torrent=torrent, rss=rss_item)
if bangumi: if bangumi:
special_link = self.special_url(bangumi, site).url special_link = self.special_url(bangumi, site).url
if special_link not in exist_list: if special_link not in exist_list:
@@ -49,7 +47,7 @@ class SearchTorrent(RequestContent, RSSAnalyser):
url = search_url(site, keywords) url = search_url(site, keywords)
return url return url
def search_season(self, data: Bangumi, site: str = "mikan") -> list[Torrent]: async def search_season(self, data: Bangumi, site: str = "mikan") -> list[Torrent]:
rss_item = self.special_url(data, site) rss_item = self.special_url(data, site)
torrents = self.search_torrents(rss_item) torrents = await self.search_torrents(rss_item)
return [torrent for torrent in torrents if data.title_raw in torrent.name] return [torrent for torrent in torrents if data.title_raw in torrent.name]

View File

@@ -2,12 +2,12 @@ import re
from urllib3.util import parse_url from urllib3.util import parse_url
from module.network import RequestContent
from module.rss import RSSEngine from module.rss import RSSEngine
from module.utils import save_image from module.utils import save_image
from module.network import RequestContent
def from_30_to_31(): async def from_30_to_31():
with RSSEngine() as db: with RSSEngine() as db:
db.migrate() db.migrate()
# Update poster link # Update poster link
@@ -29,18 +29,18 @@ def from_30_to_31():
aggregate = True aggregate = True
else: else:
aggregate = False aggregate = False
db.add_rss(rss_link=rss, aggregate=aggregate) await db.add_rss(rss_link=rss, aggregate=aggregate)
def cache_image(): async def cache_image():
with RSSEngine() as db, RequestContent() as req: with RSSEngine() as db:
bangumis = db.bangumi.search_all() bangumis = db.bangumi.search_all()
for bangumi in bangumis: async with RequestContent() as req:
if bangumi.poster_link: for bangumi in bangumis:
# Hash local path if bangumi.poster_link:
img = req.get_content(bangumi.poster_link) # Hash local path
suffix = bangumi.poster_link.split(".")[-1] img = await req.get_content(bangumi.poster_link)
img_path = save_image(img, suffix) suffix = bangumi.poster_link.split(".")[-1]
bangumi.poster_link = img_path img_path = save_image(img, suffix)
bangumi.poster_link = img_path
db.bangumi.update_all(bangumis) db.bangumi.update_all(bangumis)

View File

@@ -1,6 +1,6 @@
import json import json
import requests import httpx
def load(filename): def load(filename):
@@ -11,9 +11,9 @@ def load(filename):
def save(filename, obj): def save(filename, obj):
with open(filename, "w", encoding="utf-8") as f: with open(filename, "w", encoding="utf-8") as f:
json.dump(obj, f, indent=4, separators=(",", ": "), ensure_ascii=False) json.dump(obj, f, indent=4, separators=(",", ": "), ensure_ascii=False)
pass
def get(url): async def get(url):
req = requests.get(url) async with httpx.AsyncClient() as client:
return req.json() req = await client.get(url)
return req.json()

View File

@@ -1,12 +1,12 @@
from module.parser.analyser.tmdb_parser import tmdb_parser from module.parser.analyser.tmdb_parser import tmdb_parser
def test_tmdb_parser(): async def test_tmdb_parser():
bangumi_title = "海盗战记" bangumi_title = "海盗战记"
bangumi_year = "2019" bangumi_year = "2019"
bangumi_season = 2 bangumi_season = 2
tmdb_info = tmdb_parser(bangumi_title, "zh", test=True) tmdb_info = await tmdb_parser(bangumi_title, "zh", test=True)
assert tmdb_info.title == "冰海战记" assert tmdb_info.title == "冰海战记"
assert tmdb_info.year == bangumi_year assert tmdb_info.year == bangumi_year

1811
backend/uv.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,35 @@
import type { QbTorrentInfo } from '#/downloader';
import type { ApiSuccess } from '#/api';
export const apiDownloader = {
async getTorrents() {
const { data } = await axios.get<QbTorrentInfo[]>(
'api/v1/downloader/torrents'
);
return data!;
},
async pause(hashes: string[]) {
const { data } = await axios.post<ApiSuccess>(
'api/v1/downloader/torrents/pause',
{ hashes }
);
return data!;
},
async resume(hashes: string[]) {
const { data } = await axios.post<ApiSuccess>(
'api/v1/downloader/torrents/resume',
{ hashes }
);
return data!;
},
async deleteTorrents(hashes: string[], deleteFiles: boolean = false) {
const { data } = await axios.post<ApiSuccess>(
'api/v1/downloader/torrents/delete',
{ hashes, delete_files: deleteFiles }
);
return data!;
},
};

View File

@@ -2,7 +2,7 @@
import { ErrorPicture, Write } from '@icon-park/vue-next'; import { ErrorPicture, Write } from '@icon-park/vue-next';
import type { BangumiRule } from '#/bangumi'; import type { BangumiRule } from '#/bangumi';
withDefaults( const props = withDefaults(
defineProps<{ defineProps<{
type?: 'primary' | 'search' | 'mobile'; type?: 'primary' | 'search' | 'mobile';
bangumi: BangumiRule; bangumi: BangumiRule;
@@ -13,6 +13,8 @@ withDefaults(
); );
defineEmits(['click']); defineEmits(['click']);
const posterSrc = computed(() => resolvePosterUrl(props.bangumi.poster_link));
</script> </script>
<template> <template>
@@ -28,7 +30,7 @@ defineEmits(['click']);
> >
<div class="card-poster"> <div class="card-poster">
<template v-if="bangumi.poster_link"> <template v-if="bangumi.poster_link">
<img :src="bangumi.poster_link" :alt="bangumi.official_title" class="card-img" /> <img :src="posterSrc" :alt="bangumi.official_title" class="card-img" />
</template> </template>
<template v-else> <template v-else>
<div class="card-placeholder"> <div class="card-placeholder">
@@ -37,6 +39,14 @@ defineEmits(['click']);
</template> </template>
<div class="card-overlay"> <div class="card-overlay">
<div class="card-overlay-tags">
<ab-tag :title="`Season ${bangumi.season}`" type="primary" />
<ab-tag
v-if="bangumi.group_name"
:title="bangumi.group_name"
type="primary"
/>
</div>
<div class="card-edit-btn"> <div class="card-edit-btn">
<Write size="18" /> <Write size="18" />
</div> </div>
@@ -45,14 +55,6 @@ defineEmits(['click']);
<div class="card-info"> <div class="card-info">
<div class="card-title">{{ bangumi.official_title }}</div> <div class="card-title">{{ bangumi.official_title }}</div>
<div class="card-tags">
<ab-tag :title="`Season ${bangumi.season}`" type="primary" />
<ab-tag
v-if="bangumi.group_name"
:title="bangumi.group_name"
type="primary"
/>
</div>
</div> </div>
</div> </div>
@@ -62,7 +64,7 @@ defineEmits(['click']);
<div class="search-card-content"> <div class="search-card-content">
<div class="search-card-thumb"> <div class="search-card-thumb">
<template v-if="bangumi.poster_link"> <template v-if="bangumi.poster_link">
<img :src="bangumi.poster_link" :alt="bangumi.official_title" class="search-card-img" /> <img :src="posterSrc" :alt="bangumi.official_title" class="search-card-img" />
</template> </template>
<template v-else> <template v-else>
<div class="card-placeholder card-placeholder--small"> <div class="card-placeholder card-placeholder--small">
@@ -136,6 +138,7 @@ defineEmits(['click']);
position: absolute; position: absolute;
inset: 0; inset: 0;
display: flex; display: flex;
flex-direction: column;
align-items: center; align-items: center;
justify-content: center; justify-content: center;
opacity: 0; opacity: 0;
@@ -152,6 +155,24 @@ defineEmits(['click']);
} }
} }
.card-overlay-tags {
position: absolute;
bottom: 6px;
left: 6px;
right: 6px;
display: flex;
gap: 3px;
flex-wrap: wrap;
:deep(.tag) {
background: rgba(0, 0, 0, 0.5);
border-color: rgba(255, 255, 255, 0.4);
color: #fff;
font-size: 9px;
padding: 1px 6px;
}
}
.card-edit-btn { .card-edit-btn {
width: 40px; width: 40px;
height: 40px; height: 40px;
@@ -180,7 +201,6 @@ defineEmits(['click']);
overflow: hidden; overflow: hidden;
text-overflow: ellipsis; text-overflow: ellipsis;
white-space: nowrap; white-space: nowrap;
margin-bottom: 4px;
transition: color var(--transition-normal); transition: color var(--transition-normal);
} }

View File

@@ -80,6 +80,32 @@
"step2_desc": "Set your qBittorrent host address, username, and password.", "step2_desc": "Set your qBittorrent host address, username, and password.",
"step3_title": "Access Downloader", "step3_title": "Access Downloader",
"step3_desc": "Once configured, the downloader web UI will be embedded right here." "step3_desc": "Once configured, the downloader web UI will be embedded right here."
},
"empty_torrents": "No torrents in Bangumi category",
"selected": "selected",
"torrent": {
"name": "Name",
"progress": "Progress",
"status": "Status",
"size": "Size",
"dlspeed": "DL Speed",
"upspeed": "UP Speed",
"peers": "Seeds/Peers"
},
"state": {
"downloading": "Downloading",
"seeding": "Seeding",
"paused": "Paused",
"stalled": "Stalled",
"queued": "Queued",
"checking": "Checking",
"error": "Error",
"metadata": "Metadata"
},
"action": {
"pause": "Pause",
"resume": "Resume",
"delete": "Delete"
} }
}, },
"homepage": { "homepage": {

View File

@@ -80,6 +80,32 @@
"step2_desc": "设置 qBittorrent 的地址、用户名和密码。", "step2_desc": "设置 qBittorrent 的地址、用户名和密码。",
"step3_title": "访问下载器", "step3_title": "访问下载器",
"step3_desc": "配置完成后,下载器界面将直接嵌入此处。" "step3_desc": "配置完成后,下载器界面将直接嵌入此处。"
},
"empty_torrents": "Bangumi 分类中暂无种子",
"selected": "已选择",
"torrent": {
"name": "名称",
"progress": "进度",
"status": "状态",
"size": "大小",
"dlspeed": "下载速度",
"upspeed": "上传速度",
"peers": "做种/下载"
},
"state": {
"downloading": "下载中",
"seeding": "做种中",
"paused": "已暂停",
"stalled": "等待中",
"queued": "排队中",
"checking": "校验中",
"error": "错误",
"metadata": "获取元数据"
},
"action": {
"pause": "暂停",
"resume": "恢复",
"delete": "删除"
} }
}, },
"homepage": { "homepage": {

View File

@@ -7,6 +7,7 @@ definePage({
}); });
const { t } = useMyI18n(); const { t } = useMyI18n();
const posterSrc = (link: string | null | undefined) => resolvePosterUrl(link);
const { bangumi } = storeToRefs(useBangumiStore()); const { bangumi } = storeToRefs(useBangumiStore());
const { getAll, openEditPopup } = useBangumiStore(); const { getAll, openEditPopup } = useBangumiStore();
const { isMobile } = useBreakpointQuery(); const { isMobile } = useBreakpointQuery();
@@ -136,23 +137,23 @@ function isToday(index: number): boolean {
<div class="calendar-card-poster"> <div class="calendar-card-poster">
<img <img
v-if="item.poster_link" v-if="item.poster_link"
:src="item.poster_link" :src="posterSrc(item.poster_link)"
:alt="item.official_title" :alt="item.official_title"
class="calendar-card-img" class="calendar-card-img"
/> />
<div v-else class="calendar-card-placeholder"> <div v-else class="calendar-card-placeholder">
<ErrorPicture theme="outline" size="20" /> <ErrorPicture theme="outline" size="20" />
</div> </div>
</div> <div class="calendar-card-overlay">
<div class="calendar-card-info"> <div class="calendar-card-overlay-tags">
<div class="calendar-card-title">{{ item.official_title }}</div> <ab-tag :title="`S${item.season}`" type="primary" />
<div class="calendar-card-meta"> <ab-tag
<ab-tag :title="`S${item.season}`" type="primary" /> v-if="item.group_name"
<ab-tag :title="item.group_name"
v-if="item.group_name" type="primary"
:title="item.group_name" />
type="primary" </div>
/> <div class="calendar-card-overlay-title">{{ item.official_title }}</div>
</div> </div>
</div> </div>
</div> </div>
@@ -202,7 +203,7 @@ function isToday(index: number): boolean {
<div class="calendar-row-poster"> <div class="calendar-row-poster">
<img <img
v-if="item.poster_link" v-if="item.poster_link"
:src="item.poster_link" :src="posterSrc(item.poster_link)"
:alt="item.official_title" :alt="item.official_title"
class="calendar-row-img" class="calendar-row-img"
/> />
@@ -394,6 +395,7 @@ function isToday(index: number): boolean {
} }
.calendar-card-poster { .calendar-card-poster {
position: relative;
border-radius: var(--radius-sm); border-radius: var(--radius-sm);
overflow: hidden; overflow: hidden;
aspect-ratio: 2 / 3; aspect-ratio: 2 / 3;
@@ -417,25 +419,49 @@ function isToday(index: number): boolean {
transition: background-color var(--transition-normal); transition: background-color var(--transition-normal);
} }
.calendar-card-info { .calendar-card-overlay {
padding: 6px 2px 2px; position: absolute;
inset: 0;
opacity: 0;
background: rgba(0, 0, 0, 0.3);
backdrop-filter: blur(2px);
transition: opacity var(--transition-normal);
.calendar-card:hover & {
opacity: 1;
}
} }
.calendar-card-title { .calendar-card-overlay-title {
font-size: 12px; position: absolute;
top: 6px;
left: 6px;
right: 6px;
font-size: 11px;
font-weight: 500; font-weight: 500;
color: var(--color-text); color: #fff;
overflow: hidden; overflow: hidden;
text-overflow: ellipsis; text-overflow: ellipsis;
white-space: nowrap; white-space: nowrap;
margin-bottom: 4px; text-shadow: 0 1px 2px rgba(0, 0, 0, 0.5);
transition: color var(--transition-normal);
} }
.calendar-card-meta { .calendar-card-overlay-tags {
position: absolute;
bottom: 5px;
left: 5px;
right: 5px;
display: flex; display: flex;
gap: 3px; gap: 3px;
flex-wrap: wrap; flex-wrap: wrap;
:deep(.tag) {
background: rgba(0, 0, 0, 0.5);
border-color: rgba(255, 255, 255, 0.4);
color: #fff;
font-size: 9px;
padding: 1px 5px;
}
} }
// Empty day // Empty day

View File

@@ -1,30 +1,197 @@
<script lang="ts" setup> <script lang="tsx" setup>
import { NDataTable, NProgress, type DataTableColumns } from 'naive-ui';
import type { QbTorrentInfo, TorrentGroup } from '#/downloader';
definePage({ definePage({
name: 'Downloader', name: 'Downloader',
}); });
const { t } = useMyI18n();
const { config } = storeToRefs(useConfigStore()); const { config } = storeToRefs(useConfigStore());
const { getConfig } = useConfigStore(); const { getConfig } = useConfigStore();
const { groups, selectedHashes, loading } = storeToRefs(useDownloaderStore());
const {
getAll,
pauseSelected,
resumeSelected,
deleteSelected,
toggleHash,
toggleGroup,
clearSelection,
} = useDownloaderStore();
const isNull = computed(() => { const isNull = computed(() => {
return config.value.downloader.host === ''; return config.value.downloader.host === '';
}); });
const url = computed(() => { let timer: ReturnType<typeof setInterval> | null = null;
const downloader = config.value.downloader;
const host = downloader.host.replace(/http(s?)\:\/\//, '');
const protocol = downloader.ssl ? 'https' : 'http';
return `${protocol}://${host}`;
});
onActivated(() => { onActivated(() => {
getConfig(); getConfig();
if (!isNull.value) {
getAll();
timer = setInterval(getAll, 5000);
}
}); });
onDeactivated(() => {
if (timer) {
clearInterval(timer);
timer = null;
}
clearSelection();
});
function formatSize(bytes: number): string {
if (bytes === 0) return '0 B';
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(1024));
return (bytes / Math.pow(1024, i)).toFixed(1) + ' ' + units[i];
}
function formatSpeed(bytesPerSec: number): string {
if (bytesPerSec === 0) return '-';
return formatSize(bytesPerSec) + '/s';
}
function formatEta(seconds: number): string {
if (seconds <= 0 || seconds === 8640000) return '-';
if (seconds < 60) return `${seconds}s`;
if (seconds < 3600) return `${Math.floor(seconds / 60)}m`;
const h = Math.floor(seconds / 3600);
const m = Math.floor((seconds % 3600) / 60);
return `${h}h${m}m`;
}
function stateLabel(state: string): string {
const map: Record<string, string> = {
downloading: t('downloader.state.downloading'),
uploading: t('downloader.state.seeding'),
pausedDL: t('downloader.state.paused'),
pausedUP: t('downloader.state.paused'),
stalledDL: t('downloader.state.stalled'),
stalledUP: t('downloader.state.seeding'),
queuedDL: t('downloader.state.queued'),
queuedUP: t('downloader.state.queued'),
checkingDL: t('downloader.state.checking'),
checkingUP: t('downloader.state.checking'),
error: t('downloader.state.error'),
missingFiles: t('downloader.state.error'),
metaDL: t('downloader.state.metadata'),
};
return map[state] || state;
}
function stateType(state: string): string {
if (state.includes('paused')) return 'inactive';
if (state === 'downloading' || state === 'forcedDL') return 'active';
if (state.includes('UP') || state === 'uploading') return 'primary';
if (state === 'error' || state === 'missingFiles') return 'warn';
return 'primary';
}
function isGroupAllSelected(group: TorrentGroup): boolean {
return group.torrents.every((t) => selectedHashes.value.includes(t.hash));
}
function tableColumns(): DataTableColumns<QbTorrentInfo> {
return [
{
type: 'selection',
},
{
title: t('downloader.torrent.name'),
key: 'name',
ellipsis: { tooltip: true },
minWidth: 200,
},
{
title: t('downloader.torrent.progress'),
key: 'progress',
width: 160,
render(row: QbTorrentInfo) {
return (
<NProgress
type="line"
percentage={Math.round(row.progress * 100)}
indicator-placement="inside"
processing={row.state === 'downloading' || row.state === 'forcedDL'}
/>
);
},
},
{
title: t('downloader.torrent.status'),
key: 'state',
width: 100,
render(row: QbTorrentInfo) {
return <ab-tag type={stateType(row.state)} title={stateLabel(row.state)} />;
},
},
{
title: t('downloader.torrent.size'),
key: 'size',
width: 100,
render(row: QbTorrentInfo) {
return formatSize(row.size);
},
},
{
title: t('downloader.torrent.dlspeed'),
key: 'dlspeed',
width: 110,
render(row: QbTorrentInfo) {
return formatSpeed(row.dlspeed);
},
},
{
title: t('downloader.torrent.upspeed'),
key: 'upspeed',
width: 110,
render(row: QbTorrentInfo) {
return formatSpeed(row.upspeed);
},
},
{
title: 'ETA',
key: 'eta',
width: 80,
render(row: QbTorrentInfo) {
return formatEta(row.eta);
},
},
{
title: t('downloader.torrent.peers'),
key: 'peers',
width: 90,
render(row: QbTorrentInfo) {
return `${row.num_seeds} / ${row.num_leechs}`;
},
},
];
}
function tableRowKey(row: QbTorrentInfo) {
return row.hash;
}
function onCheckedChange(group: TorrentGroup, keys: string[]) {
const groupHashes = group.torrents.map((t) => t.hash);
const otherSelected = selectedHashes.value.filter(
(h) => !groupHashes.includes(h)
);
selectedHashes.value = [...otherSelected, ...keys];
}
function groupCheckedKeys(group: TorrentGroup): string[] {
return group.torrents
.filter((t) => selectedHashes.value.includes(t.hash))
.map((t) => t.hash);
}
</script> </script>
<template> <template>
<div class="page-embed"> <div class="page-downloader">
<div v-if="isNull" class="empty-guide"> <div v-if="isNull" class="empty-guide">
<div class="empty-guide-header anim-fade-in"> <div class="empty-guide-header anim-fade-in">
<div class="empty-guide-title">{{ $t('downloader.empty.title') }}</div> <div class="empty-guide-title">{{ $t('downloader.empty.title') }}</div>
@@ -62,30 +229,114 @@ onActivated(() => {
</RouterLink> </RouterLink>
</div> </div>
<iframe <div v-else class="downloader-content">
v-else <div v-if="groups.length === 0 && !loading" class="downloader-empty">
:src="url" {{ $t('downloader.empty_torrents') }}
frameborder="0" </div>
allowfullscreen="true"
class="embed-frame" <div v-else class="downloader-groups">
></iframe> <ab-fold-panel
v-for="group in groups"
:key="group.savePath"
:title="`${group.name} (${group.count})`"
:default-open="true"
>
<NDataTable
:columns="tableColumns()"
:data="group.torrents"
:row-key="tableRowKey"
:pagination="false"
:bordered="false"
:checked-row-keys="groupCheckedKeys(group)"
size="small"
@update:checked-row-keys="(keys: any) => onCheckedChange(group, keys as string[])"
/>
</ab-fold-panel>
</div>
<Transition name="fade">
<div v-if="selectedHashes.length > 0" class="action-bar">
<span class="action-bar-count">
{{ selectedHashes.length }} {{ $t('downloader.selected') }}
</span>
<div class="action-bar-buttons">
<ab-button @click="resumeSelected">{{ $t('downloader.action.resume') }}</ab-button>
<ab-button @click="pauseSelected">{{ $t('downloader.action.pause') }}</ab-button>
<ab-button type="warn" @click="deleteSelected(false)">{{ $t('downloader.action.delete') }}</ab-button>
</div>
</div>
</Transition>
</div>
</div> </div>
</template> </template>
<style lang="scss" scoped> <style lang="scss" scoped>
.page-embed { .page-downloader {
overflow: auto; overflow: auto;
flex-grow: 1; flex-grow: 1;
display: flex; display: flex;
flex-direction: column; flex-direction: column;
} }
.embed-frame { .downloader-content {
width: 100%; display: flex;
height: 100%; flex-direction: column;
flex: 1; flex: 1;
gap: 12px;
padding-bottom: 60px;
}
.downloader-groups {
display: flex;
flex-direction: column;
gap: 12px;
}
.downloader-empty {
display: flex;
align-items: center;
justify-content: center;
flex: 1;
color: var(--color-text-secondary);
font-size: 14px;
}
.action-bar {
position: fixed;
bottom: 24px;
left: 50%;
transform: translateX(-50%);
display: flex;
align-items: center;
gap: 16px;
padding: 10px 20px;
border-radius: var(--radius-md); border-radius: var(--radius-md);
background: var(--color-surface);
border: 1px solid var(--color-border); border: 1px solid var(--color-border);
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.12);
z-index: 100;
}
.action-bar-count {
font-size: 13px;
color: var(--color-text-secondary);
white-space: nowrap;
}
.action-bar-buttons {
display: flex;
gap: 8px;
}
.fade-enter-active,
.fade-leave-active {
transition: opacity 0.2s ease, transform 0.2s ease;
}
.fade-enter-from,
.fade-leave-to {
opacity: 0;
transform: translateX(-50%) translateY(8px);
} }
.empty-guide { .empty-guide {

View File

@@ -0,0 +1,117 @@
import type { QbTorrentInfo, TorrentGroup } from '#/downloader';
export const useDownloaderStore = defineStore('downloader', () => {
const torrents = ref<QbTorrentInfo[]>([]);
const selectedHashes = ref<string[]>([]);
const loading = ref(false);
const groups = computed<TorrentGroup[]>(() => {
const map = new Map<string, QbTorrentInfo[]>();
for (const t of torrents.value) {
const key = t.save_path;
if (!map.has(key)) {
map.set(key, []);
}
map.get(key)!.push(t);
}
const result: TorrentGroup[] = [];
for (const [savePath, items] of map) {
const parts = savePath.replace(/\/$/, '').split('/');
const name = parts[parts.length - 1] || savePath;
const totalSize = items.reduce((sum, t) => sum + t.size, 0);
const overallProgress =
totalSize > 0
? items.reduce((sum, t) => sum + t.size * t.progress, 0) / totalSize
: 0;
result.push({
name,
savePath,
totalSize,
overallProgress,
count: items.length,
torrents: items.sort((a, b) => b.added_on - a.added_on),
});
}
return result.sort((a, b) => a.name.localeCompare(b.name));
});
async function getAll() {
loading.value = true;
try {
torrents.value = await apiDownloader.getTorrents();
} catch {
torrents.value = [];
} finally {
loading.value = false;
}
}
const opts = {
showMessage: true,
onSuccess() {
getAll();
selectedHashes.value = [];
},
};
const { execute: pauseSelected } = useApi(
() => apiDownloader.pause(selectedHashes.value),
opts
);
const { execute: resumeSelected } = useApi(
() => apiDownloader.resume(selectedHashes.value),
opts
);
const { execute: deleteSelected } = useApi(
(deleteFiles: boolean = false) =>
apiDownloader.deleteTorrents(selectedHashes.value, deleteFiles),
opts
);
function toggleHash(hash: string) {
const idx = selectedHashes.value.indexOf(hash);
if (idx === -1) {
selectedHashes.value.push(hash);
} else {
selectedHashes.value.splice(idx, 1);
}
}
function toggleGroup(group: TorrentGroup) {
const groupHashes = group.torrents.map((t) => t.hash);
const allSelected = groupHashes.every((h) =>
selectedHashes.value.includes(h)
);
if (allSelected) {
selectedHashes.value = selectedHashes.value.filter(
(h) => !groupHashes.includes(h)
);
} else {
const toAdd = groupHashes.filter(
(h) => !selectedHashes.value.includes(h)
);
selectedHashes.value.push(...toAdd);
}
}
function clearSelection() {
selectedHashes.value = [];
}
return {
torrents,
groups,
selectedHashes,
loading,
getAll,
pauseSelected,
resumeSelected,
deleteSelected,
toggleHash,
toggleGroup,
clearSelection,
};
});

View File

@@ -14,6 +14,10 @@ export const useSearchStore = defineStore('search', () => {
const loading = computed(() => status.value !== 'CLOSED'); const loading = computed(() => status.value !== 'CLOSED');
const bangumiList = computed(() =>
searchData.value.map((item, index) => ({ order: index, value: item }))
);
async function getProviders() { async function getProviders() {
providers.value = await apiSearch.getProvider(); providers.value = await apiSearch.getProvider();
provider.value = providers.value[0]; provider.value = providers.value[0];
@@ -21,18 +25,20 @@ export const useSearchStore = defineStore('search', () => {
function clearSearch() { function clearSearch() {
keyword.value = ''; keyword.value = '';
searchData.value = [];
closeSearch();
} }
return { return {
keyword, inputValue: keyword,
loading, loading,
provider, provider,
providers, providers,
searchData, bangumiList,
clearSearch, clearSearch,
getProviders, getProviders,
openSearch, onSearch: openSearch,
closeSearch, closeSearch,
}; };
}); });

View File

@@ -0,0 +1,5 @@
export function resolvePosterUrl(link: string | null | undefined): string {
if (!link) return '';
if (link.startsWith('http://') || link.startsWith('https://')) return link;
return `/${link}`;
}

45
webui/types/downloader.ts Normal file
View File

@@ -0,0 +1,45 @@
export type QbTorrentState =
| 'error'
| 'missingFiles'
| 'uploading'
| 'pausedUP'
| 'queuedUP'
| 'stalledUP'
| 'checkingUP'
| 'forcedUP'
| 'allocating'
| 'downloading'
| 'metaDL'
| 'pausedDL'
| 'queuedDL'
| 'stalledDL'
| 'checkingDL'
| 'forcedDL'
| 'checkingResumeData'
| 'moving'
| 'unknown';
export interface QbTorrentInfo {
hash: string;
name: string;
size: number;
progress: number;
dlspeed: number;
upspeed: number;
num_seeds: number;
num_leechs: number;
state: QbTorrentState;
eta: number;
category: string;
save_path: string;
added_on: number;
}
export interface TorrentGroup {
name: string;
savePath: string;
totalSize: number;
overallProgress: number;
count: number;
torrents: QbTorrentInfo[];
}

View File

@@ -10,6 +10,7 @@ declare global {
const apiCheck: typeof import('../../src/api/check')['apiCheck'] const apiCheck: typeof import('../../src/api/check')['apiCheck']
const apiConfig: typeof import('../../src/api/config')['apiConfig'] const apiConfig: typeof import('../../src/api/config')['apiConfig']
const apiDownload: typeof import('../../src/api/download')['apiDownload'] const apiDownload: typeof import('../../src/api/download')['apiDownload']
const apiDownloader: typeof import('../../src/api/downloader')['apiDownloader']
const apiLog: typeof import('../../src/api/log')['apiLog'] const apiLog: typeof import('../../src/api/log')['apiLog']
const apiPasskey: typeof import('../../src/api/passkey')['apiPasskey'] const apiPasskey: typeof import('../../src/api/passkey')['apiPasskey']
const apiProgram: typeof import('../../src/api/program')['apiProgram'] const apiProgram: typeof import('../../src/api/program')['apiProgram']
@@ -70,6 +71,7 @@ declare global {
const readonly: typeof import('vue')['readonly'] const readonly: typeof import('vue')['readonly']
const ref: typeof import('vue')['ref'] const ref: typeof import('vue')['ref']
const resolveComponent: typeof import('vue')['resolveComponent'] const resolveComponent: typeof import('vue')['resolveComponent']
const resolvePosterUrl: typeof import('../../src/utils/poster')['resolvePosterUrl']
const setActivePinia: typeof import('pinia')['setActivePinia'] const setActivePinia: typeof import('pinia')['setActivePinia']
const setMapStoreSuffix: typeof import('pinia')['setMapStoreSuffix'] const setMapStoreSuffix: typeof import('pinia')['setMapStoreSuffix']
const shallowReactive: typeof import('vue')['shallowReactive'] const shallowReactive: typeof import('vue')['shallowReactive']
@@ -95,6 +97,7 @@ declare global {
const useCssModule: typeof import('vue')['useCssModule'] const useCssModule: typeof import('vue')['useCssModule']
const useCssVars: typeof import('vue')['useCssVars'] const useCssVars: typeof import('vue')['useCssVars']
const useDarkMode: typeof import('../../src/hooks/useDarkMode')['useDarkMode'] const useDarkMode: typeof import('../../src/hooks/useDarkMode')['useDarkMode']
const useDownloaderStore: typeof import('../../src/store/downloader')['useDownloaderStore']
const useI18n: typeof import('vue-i18n')['useI18n'] const useI18n: typeof import('vue-i18n')['useI18n']
const useIntervalFn: typeof import('@vueuse/core')['useIntervalFn'] const useIntervalFn: typeof import('@vueuse/core')['useIntervalFn']
const useLocalStorage: typeof import('@vueuse/core')['useLocalStorage'] const useLocalStorage: typeof import('@vueuse/core')['useLocalStorage']