mirror of
https://github.com/EstrellaXD/Auto_Bangumi.git
synced 2026-02-02 17:59:27 +08:00
feat: fix search, poster serving, and add hover overlay UI for cards
- Fix search store exports to match component expectations (inputValue, bangumiList, onSearch) and transform data to SearchResult format - Fix poster endpoint path check that incorrectly blocked all requests - Add resolvePosterUrl utility to handle both external URLs and local paths - Move tags into hover overlay on homepage cards and calendar cards - Show title and tags on poster hover with dark semi-transparent styling - Add downloader API, store, and page - Update backend to async patterns and uv migration changes - Remove .claude/settings.local.json from tracking Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,87 +0,0 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(git -C /Users/estrella/Developer/AutoBangumi/Auto_Bangumi log --oneline -10)",
|
||||
"Bash(git checkout:*)",
|
||||
"Bash(python -m pytest:*)",
|
||||
"Bash(python:*)",
|
||||
"Bash(uv init:*)",
|
||||
"Bash(uv sync:*)",
|
||||
"Bash(uv run pytest:*)",
|
||||
"Skill(feature-dev:feature-dev)",
|
||||
"Bash(uv pip install:*)",
|
||||
"Bash(uv run python:*)",
|
||||
"Bash(curl:*)",
|
||||
"Bash(pkill:*)",
|
||||
"Bash(uv pip show:*)",
|
||||
"Bash(uv pip list:*)",
|
||||
"Bash(uv lock:*)",
|
||||
"Bash(lsof:*)",
|
||||
"Bash(kill:*)",
|
||||
"Bash(VITE_API_URL=http://localhost:18080 pnpm dev:*)",
|
||||
"Bash(pnpm dev:*)",
|
||||
"Bash(pnpm add:*)",
|
||||
"Bash(git add:*)",
|
||||
"Bash(git commit:*)",
|
||||
"Skill(planning-with-files)",
|
||||
"Bash(../.venv/bin/python -m pytest test/test_database.py -v)",
|
||||
"Bash(.venv/bin/python:*)",
|
||||
"Bash(ruff check:*)",
|
||||
"Bash(ls:*)",
|
||||
"Bash(/Users/estrella/Developer/AutoBangumi/Auto_Bangumi/backend/.venv/bin/python:*)",
|
||||
"Bash(uv run ruff check:*)",
|
||||
"Bash(git rm:*)",
|
||||
"Bash(git push:*)",
|
||||
"Bash(gh pr create:*)",
|
||||
"Bash(gh pr checks:*)",
|
||||
"Bash(gh run view:*)",
|
||||
"Bash(git ls-tree:*)",
|
||||
"Bash(while read f)",
|
||||
"Bash(do git show \"HEAD:$f\")",
|
||||
"Bash(done)",
|
||||
"Bash(git reset:*)",
|
||||
"Skill(ui-ux-pro-max)",
|
||||
"Bash(tree:*)",
|
||||
"Bash(git stash:*)",
|
||||
"Bash(python3:*)",
|
||||
"Bash(pnpm build:*)",
|
||||
"Bash(pnpm install:*)",
|
||||
"Bash(uv venv:*)",
|
||||
"Bash(../.venv/bin/python:*)",
|
||||
"Bash(xargs:*)",
|
||||
"Skill(agent-browser)",
|
||||
"Bash(agent-browser open:*)",
|
||||
"Bash(agent-browser screenshot:*)",
|
||||
"Bash(agent-browser snapshot:*)",
|
||||
"Bash(agent-browser eval \"JSON.stringify\\(window.__consoleErrors || ''no errors captured''\\)\")",
|
||||
"Bash(agent-browser eval:*)",
|
||||
"Bash(agent-browser close:*)",
|
||||
"Bash(agent-browser reload:*)",
|
||||
"Bash(agent-browser fill:*)",
|
||||
"Bash(agent-browser click:*)",
|
||||
"Skill(commit-commands:commit)",
|
||||
"Bash(gh api:*)",
|
||||
"Bash(git fetch:*)",
|
||||
"Bash(git rebase:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(git merge:*)",
|
||||
"Bash(agent-browser scroll:*)",
|
||||
"Bash(agent-browser find text \"Passkey Settings\" click)",
|
||||
"Bash(agent-browser find:*)",
|
||||
"Bash(agent-browser find text \"添加\" click)",
|
||||
"Bash(npx vue-tsc:*)",
|
||||
"Bash(npx vite build:*)",
|
||||
"WebSearch",
|
||||
"WebFetch(domain:bangumi.github.io)",
|
||||
"WebFetch(domain:raw.githubusercontent.com)",
|
||||
"WebFetch(domain:api.bgm.tv)",
|
||||
"Bash(__NEW_LINE_e2219f405dac932c__ echo \"\")",
|
||||
"Bash(__NEW_LINE_7c1bb4605ef4ad2a__ echo \"\")",
|
||||
"Bash(source ../.venv/bin/activate)",
|
||||
"Bash(source:*)",
|
||||
"Bash(npx vite:*)",
|
||||
"Bash(agent-browser press:*)",
|
||||
"Bash(agent-browser get:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -216,3 +216,7 @@ dev-dist
|
||||
|
||||
# test file
|
||||
test.*
|
||||
|
||||
# local config
|
||||
/backend/config/
|
||||
.claude/settings.local.json
|
||||
|
||||
@@ -40,8 +40,8 @@ app = create_app()
|
||||
|
||||
@app.get("/posters/{path:path}", tags=["posters"])
|
||||
def posters(path: str):
|
||||
# only allow access to files in the posters directory
|
||||
if not path.startswith("posters/"):
|
||||
# prevent path traversal
|
||||
if ".." in path:
|
||||
return HTMLResponse(status_code=403)
|
||||
return FileResponse(f"data/posters/{path}")
|
||||
|
||||
|
||||
@@ -1,33 +1,35 @@
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
|
||||
from .timeout import timeout
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
lock = threading.Lock()
|
||||
_lock = asyncio.Lock()
|
||||
|
||||
|
||||
def qb_connect_failed_wait(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
times = 0
|
||||
while times < 5:
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
return await func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.debug(f"URL: {args[0]}")
|
||||
logger.warning(e)
|
||||
logger.warning("Cannot connect to qBittorrent. Wait 5 min and retry...")
|
||||
time.sleep(300)
|
||||
await asyncio.sleep(300)
|
||||
times += 1
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def api_failed(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
return await func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.debug(f"URL: {args[0]}")
|
||||
logger.warning("Wrong API response.")
|
||||
@@ -37,8 +39,9 @@ def api_failed(func):
|
||||
|
||||
|
||||
def locked(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
with lock:
|
||||
return func(*args, **kwargs)
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
async with _lock:
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
@@ -3,6 +3,7 @@ from fastapi import APIRouter
|
||||
from .auth import router as auth_router
|
||||
from .bangumi import router as bangumi_router
|
||||
from .config import router as config_router
|
||||
from .downloader import router as downloader_router
|
||||
from .log import router as log_router
|
||||
from .passkey import router as passkey_router
|
||||
from .program import router as program_router
|
||||
@@ -19,5 +20,6 @@ v1.include_router(log_router)
|
||||
v1.include_router(program_router)
|
||||
v1.include_router(bangumi_router)
|
||||
v1.include_router(config_router)
|
||||
v1.include_router(downloader_router)
|
||||
v1.include_router(rss_router)
|
||||
v1.include_router(search_router)
|
||||
|
||||
@@ -45,7 +45,7 @@ async def update_rule(
|
||||
data: BangumiUpdate,
|
||||
):
|
||||
with TorrentManager() as manager:
|
||||
resp = manager.update_rule(bangumi_id, data)
|
||||
resp = await manager.update_rule(bangumi_id, data)
|
||||
return u_response(resp)
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ async def update_rule(
|
||||
)
|
||||
async def delete_rule(bangumi_id: str, file: bool = False):
|
||||
with TorrentManager() as manager:
|
||||
resp = manager.delete_rule(bangumi_id, file)
|
||||
resp = await manager.delete_rule(bangumi_id, file)
|
||||
return u_response(resp)
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ async def delete_rule(bangumi_id: str, file: bool = False):
|
||||
async def delete_many_rule(bangumi_id: list, file: bool = False):
|
||||
with TorrentManager() as manager:
|
||||
for i in bangumi_id:
|
||||
resp = manager.delete_rule(i, file)
|
||||
resp = await manager.delete_rule(i, file)
|
||||
return u_response(resp)
|
||||
|
||||
|
||||
@@ -79,7 +79,7 @@ async def delete_many_rule(bangumi_id: list, file: bool = False):
|
||||
)
|
||||
async def disable_rule(bangumi_id: str, file: bool = False):
|
||||
with TorrentManager() as manager:
|
||||
resp = manager.disable_rule(bangumi_id, file)
|
||||
resp = await manager.disable_rule(bangumi_id, file)
|
||||
return u_response(resp)
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ async def disable_rule(bangumi_id: str, file: bool = False):
|
||||
async def disable_many_rule(bangumi_id: list, file: bool = False):
|
||||
with TorrentManager() as manager:
|
||||
for i in bangumi_id:
|
||||
resp = manager.disable_rule(i, file)
|
||||
resp = await manager.disable_rule(i, file)
|
||||
return u_response(resp)
|
||||
|
||||
|
||||
@@ -111,9 +111,9 @@ async def enable_rule(bangumi_id: str):
|
||||
response_model=APIResponse,
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
async def refresh_poster():
|
||||
async def refresh_poster_all():
|
||||
with TorrentManager() as manager:
|
||||
resp = manager.refresh_poster()
|
||||
resp = await manager.refresh_poster()
|
||||
return u_response(resp)
|
||||
|
||||
@router.get(
|
||||
@@ -121,9 +121,9 @@ async def refresh_poster():
|
||||
response_model=APIResponse,
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
async def refresh_poster(bangumi_id: int):
|
||||
async def refresh_poster_one(bangumi_id: int):
|
||||
with TorrentManager() as manager:
|
||||
resp = manager.refind_poster(bangumi_id)
|
||||
resp = await manager.refind_poster(bangumi_id)
|
||||
return u_response(resp)
|
||||
|
||||
|
||||
@@ -134,7 +134,7 @@ async def refresh_poster(bangumi_id: int):
|
||||
)
|
||||
async def refresh_calendar():
|
||||
with TorrentManager() as manager:
|
||||
resp = manager.refresh_calendar()
|
||||
resp = await manager.refresh_calendar()
|
||||
return u_response(resp)
|
||||
|
||||
|
||||
|
||||
46
backend/src/module/api/downloader.py
Normal file
46
backend/src/module/api/downloader.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from pydantic import BaseModel
|
||||
|
||||
from module.downloader import DownloadClient
|
||||
from module.security.api import get_current_user
|
||||
|
||||
router = APIRouter(prefix="/downloader", tags=["downloader"])
|
||||
|
||||
|
||||
class TorrentHashesRequest(BaseModel):
|
||||
hashes: list[str]
|
||||
|
||||
|
||||
class TorrentDeleteRequest(BaseModel):
|
||||
hashes: list[str]
|
||||
delete_files: bool = False
|
||||
|
||||
|
||||
@router.get("/torrents", dependencies=[Depends(get_current_user)])
|
||||
async def get_torrents():
|
||||
async with DownloadClient() as client:
|
||||
return await client.get_torrent_info(category="Bangumi", status_filter=None)
|
||||
|
||||
|
||||
@router.post("/torrents/pause", dependencies=[Depends(get_current_user)])
|
||||
async def pause_torrents(req: TorrentHashesRequest):
|
||||
hashes = "|".join(req.hashes)
|
||||
async with DownloadClient() as client:
|
||||
await client.pause_torrent(hashes)
|
||||
return {"msg_en": "Torrents paused", "msg_zh": "种子已暂停"}
|
||||
|
||||
|
||||
@router.post("/torrents/resume", dependencies=[Depends(get_current_user)])
|
||||
async def resume_torrents(req: TorrentHashesRequest):
|
||||
hashes = "|".join(req.hashes)
|
||||
async with DownloadClient() as client:
|
||||
await client.resume_torrent(hashes)
|
||||
return {"msg_en": "Torrents resumed", "msg_zh": "种子已恢复"}
|
||||
|
||||
|
||||
@router.post("/torrents/delete", dependencies=[Depends(get_current_user)])
|
||||
async def delete_torrents(req: TorrentDeleteRequest):
|
||||
hashes = "|".join(req.hashes)
|
||||
async with DownloadClient() as client:
|
||||
await client.delete_torrent(hashes, delete_files=req.delete_files)
|
||||
return {"msg_en": "Torrents deleted", "msg_zh": "种子已删除"}
|
||||
@@ -25,7 +25,7 @@ async def get_rss():
|
||||
)
|
||||
async def add_rss(rss: RSSItem):
|
||||
with RSSEngine() as engine:
|
||||
result = engine.add_rss(rss.url, rss.name, rss.aggregate, rss.parser)
|
||||
result = await engine.add_rss(rss.url, rss.name, rss.aggregate, rss.parser)
|
||||
return u_response(result)
|
||||
|
||||
|
||||
@@ -133,12 +133,13 @@ async def update_rss(
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
async def refresh_all():
|
||||
with RSSEngine() as engine, DownloadClient() as client:
|
||||
engine.refresh_rss(client)
|
||||
return JSONResponse(
|
||||
status_code=200,
|
||||
content={"msg_en": "Refresh all RSS successfully.", "msg_zh": "刷新 RSS 成功。"},
|
||||
)
|
||||
async with DownloadClient() as client:
|
||||
with RSSEngine() as engine:
|
||||
await engine.refresh_rss(client)
|
||||
return JSONResponse(
|
||||
status_code=200,
|
||||
content={"msg_en": "Refresh all RSS successfully.", "msg_zh": "刷新 RSS 成功。"},
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
@@ -147,12 +148,13 @@ async def refresh_all():
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
async def refresh_rss(rss_id: int):
|
||||
with RSSEngine() as engine, DownloadClient() as client:
|
||||
engine.refresh_rss(client, rss_id)
|
||||
return JSONResponse(
|
||||
status_code=200,
|
||||
content={"msg_en": "Refresh RSS successfully.", "msg_zh": "刷新 RSS 成功。"},
|
||||
)
|
||||
async with DownloadClient() as client:
|
||||
with RSSEngine() as engine:
|
||||
await engine.refresh_rss(client, rss_id)
|
||||
return JSONResponse(
|
||||
status_code=200,
|
||||
content={"msg_en": "Refresh RSS successfully.", "msg_zh": "刷新 RSS 成功。"},
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
@@ -175,7 +177,7 @@ analyser = RSSAnalyser()
|
||||
"/analysis", response_model=Bangumi, dependencies=[Depends(get_current_user)]
|
||||
)
|
||||
async def analysis(rss: RSSItem):
|
||||
data = analyser.link_to_data(rss)
|
||||
data = await analyser.link_to_data(rss)
|
||||
if isinstance(data, Bangumi):
|
||||
return data
|
||||
else:
|
||||
@@ -186,8 +188,8 @@ async def analysis(rss: RSSItem):
|
||||
"/collect", response_model=APIResponse, dependencies=[Depends(get_current_user)]
|
||||
)
|
||||
async def download_collection(data: Bangumi):
|
||||
with SeasonCollector() as collector:
|
||||
resp = collector.collect_season(data, data.rss_link)
|
||||
async with SeasonCollector() as collector:
|
||||
resp = await collector.collect_season(data, data.rss_link)
|
||||
return u_response(resp)
|
||||
|
||||
|
||||
@@ -195,6 +197,5 @@ async def download_collection(data: Bangumi):
|
||||
"/subscribe", response_model=APIResponse, dependencies=[Depends(get_current_user)]
|
||||
)
|
||||
async def subscribe(data: Bangumi, rss: RSSItem):
|
||||
with SeasonCollector() as collector:
|
||||
resp = collector.subscribe_season(data, parser=rss.parser)
|
||||
return u_response(resp)
|
||||
resp = await SeasonCollector.subscribe_season(data, parser=rss.parser)
|
||||
return u_response(resp)
|
||||
|
||||
@@ -18,10 +18,13 @@ async def search_torrents(site: str = "mikan", keywords: str = Query(None)):
|
||||
if not keywords:
|
||||
return []
|
||||
keywords = keywords.split(" ")
|
||||
with SearchTorrent() as st:
|
||||
return EventSourceResponse(
|
||||
content=st.analyse_keyword(keywords=keywords, site=site),
|
||||
)
|
||||
|
||||
async def event_generator():
|
||||
async with SearchTorrent() as st:
|
||||
async for item in st.analyse_keyword(keywords=keywords, site=site):
|
||||
yield item
|
||||
|
||||
return EventSourceResponse(content=event_generator())
|
||||
|
||||
|
||||
@router.get(
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import logging
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from module.conf import VERSION, settings
|
||||
from module.models import ResponseModel
|
||||
from module.update import (
|
||||
cache_image,
|
||||
data_migration,
|
||||
first_run,
|
||||
from_30_to_31,
|
||||
start_up,
|
||||
cache_image,
|
||||
)
|
||||
|
||||
from .sub_thread import RenameThread, RSSThread
|
||||
@@ -51,11 +51,11 @@ class Program(RenameThread, RSSThread):
|
||||
data_migration()
|
||||
elif self.version_update:
|
||||
# Update database
|
||||
from_30_to_31()
|
||||
await from_30_to_31()
|
||||
logger.info("[Core] Database updated.")
|
||||
if not self.img_cache:
|
||||
logger.info("[Core] No image cache exists, create image cache.")
|
||||
cache_image()
|
||||
await cache_image()
|
||||
await self.start()
|
||||
|
||||
async def start(self):
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import threading
|
||||
import time
|
||||
import asyncio
|
||||
|
||||
from module.conf import settings
|
||||
from module.downloader import DownloadClient
|
||||
@@ -13,69 +12,74 @@ from .status import ProgramStatus
|
||||
class RSSThread(ProgramStatus):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._rss_thread = threading.Thread(
|
||||
target=self.rss_loop,
|
||||
)
|
||||
self._rss_task: asyncio.Task | None = None
|
||||
self.analyser = RSSAnalyser()
|
||||
|
||||
def rss_loop(self):
|
||||
async def rss_loop(self):
|
||||
while not self.stop_event.is_set():
|
||||
with DownloadClient() as client, RSSEngine() as engine:
|
||||
# Analyse RSS
|
||||
rss_list = engine.rss.search_aggregate()
|
||||
for rss in rss_list:
|
||||
self.analyser.rss_to_data(rss, engine)
|
||||
# Run RSS Engine
|
||||
engine.refresh_rss(client)
|
||||
async with DownloadClient() as client:
|
||||
with RSSEngine() as engine:
|
||||
# Analyse RSS
|
||||
rss_list = engine.rss.search_aggregate()
|
||||
for rss in rss_list:
|
||||
await self.analyser.rss_to_data(rss, engine)
|
||||
# Run RSS Engine
|
||||
await engine.refresh_rss(client)
|
||||
if settings.bangumi_manage.eps_complete:
|
||||
eps_complete()
|
||||
self.stop_event.wait(settings.program.rss_time)
|
||||
await eps_complete()
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
self.stop_event.wait(),
|
||||
timeout=settings.program.rss_time,
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
pass
|
||||
|
||||
def rss_start(self):
|
||||
self.rss_thread.start()
|
||||
self._rss_task = asyncio.create_task(self.rss_loop())
|
||||
|
||||
def rss_stop(self):
|
||||
if self._rss_thread.is_alive():
|
||||
self._rss_thread.join()
|
||||
|
||||
@property
|
||||
def rss_thread(self):
|
||||
if not self._rss_thread.is_alive():
|
||||
self._rss_thread = threading.Thread(
|
||||
target=self.rss_loop,
|
||||
)
|
||||
return self._rss_thread
|
||||
async def rss_stop(self):
|
||||
if self._rss_task and not self._rss_task.done():
|
||||
self.stop_event.set()
|
||||
self._rss_task.cancel()
|
||||
try:
|
||||
await self._rss_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
self._rss_task = None
|
||||
|
||||
|
||||
class RenameThread(ProgramStatus):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._rename_thread = threading.Thread(
|
||||
target=self.rename_loop,
|
||||
)
|
||||
self._rename_task: asyncio.Task | None = None
|
||||
|
||||
def rename_loop(self):
|
||||
async def rename_loop(self):
|
||||
while not self.stop_event.is_set():
|
||||
with Renamer() as renamer:
|
||||
renamed_info = renamer.rename()
|
||||
async with Renamer() as renamer:
|
||||
renamed_info = await renamer.rename()
|
||||
if settings.notification.enable:
|
||||
with PostNotification() as notifier:
|
||||
async with PostNotification() as notifier:
|
||||
for info in renamed_info:
|
||||
notifier.send_msg(info)
|
||||
time.sleep(2)
|
||||
self.stop_event.wait(settings.program.rename_time)
|
||||
await notifier.send_msg(info)
|
||||
await asyncio.sleep(2)
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
self.stop_event.wait(),
|
||||
timeout=settings.program.rename_time,
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
pass
|
||||
|
||||
def rename_start(self):
|
||||
self.rename_thread.start()
|
||||
self._rename_task = asyncio.create_task(self.rename_loop())
|
||||
|
||||
def rename_stop(self):
|
||||
if self._rename_thread.is_alive():
|
||||
self._rename_thread.join()
|
||||
|
||||
@property
|
||||
def rename_thread(self):
|
||||
if not self._rename_thread.is_alive():
|
||||
self._rename_thread = threading.Thread(
|
||||
target=self.rename_loop,
|
||||
)
|
||||
return self._rename_thread
|
||||
async def rename_stop(self):
|
||||
if self._rename_task and not self._rename_task.done():
|
||||
self.stop_event.set()
|
||||
self._rename_task.cancel()
|
||||
try:
|
||||
await self._rename_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
self._rename_task = None
|
||||
|
||||
@@ -2,9 +2,8 @@ import logging
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.sql import func
|
||||
from sqlmodel import and_, delete, false, or_, select
|
||||
from sqlmodel import Session, and_, delete, false, or_, select
|
||||
|
||||
from module.models import Bangumi, BangumiUpdate
|
||||
|
||||
@@ -23,32 +22,32 @@ def _invalidate_bangumi_cache():
|
||||
|
||||
|
||||
class BangumiDatabase:
|
||||
def __init__(self, session: AsyncSession):
|
||||
def __init__(self, session: Session):
|
||||
self.session = session
|
||||
|
||||
async def add(self, data: Bangumi) -> bool:
|
||||
def add(self, data: Bangumi) -> bool:
|
||||
statement = select(Bangumi).where(Bangumi.title_raw == data.title_raw)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
bangumi = result.scalar_one_or_none()
|
||||
if bangumi:
|
||||
return False
|
||||
self.session.add(data)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
logger.debug(f"[Database] Insert {data.official_title} into database.")
|
||||
return True
|
||||
|
||||
async def add_all(self, datas: list[Bangumi]):
|
||||
def add_all(self, datas: list[Bangumi]):
|
||||
self.session.add_all(datas)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
logger.debug(f"[Database] Insert {len(datas)} bangumi into database.")
|
||||
|
||||
async def update(self, data: Bangumi | BangumiUpdate, _id: int = None) -> bool:
|
||||
def update(self, data: Bangumi | BangumiUpdate, _id: int = None) -> bool:
|
||||
if _id and isinstance(data, BangumiUpdate):
|
||||
db_data = await self.session.get(Bangumi, _id)
|
||||
db_data = self.session.get(Bangumi, _id)
|
||||
elif isinstance(data, Bangumi):
|
||||
db_data = await self.session.get(Bangumi, data.id)
|
||||
db_data = self.session.get(Bangumi, data.id)
|
||||
else:
|
||||
return False
|
||||
if not db_data:
|
||||
@@ -57,70 +56,70 @@ class BangumiDatabase:
|
||||
for key, value in bangumi_data.items():
|
||||
setattr(db_data, key, value)
|
||||
self.session.add(db_data)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
logger.debug(f"[Database] Update {data.official_title}")
|
||||
return True
|
||||
|
||||
async def update_all(self, datas: list[Bangumi]):
|
||||
def update_all(self, datas: list[Bangumi]):
|
||||
self.session.add_all(datas)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
logger.debug(f"[Database] Update {len(datas)} bangumi.")
|
||||
|
||||
async def update_rss(self, title_raw: str, rss_set: str):
|
||||
def update_rss(self, title_raw: str, rss_set: str):
|
||||
statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
bangumi = result.scalar_one_or_none()
|
||||
if bangumi:
|
||||
bangumi.rss_link = rss_set
|
||||
bangumi.added = False
|
||||
self.session.add(bangumi)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
logger.debug(f"[Database] Update {title_raw} rss_link to {rss_set}.")
|
||||
|
||||
async def update_poster(self, title_raw: str, poster_link: str):
|
||||
def update_poster(self, title_raw: str, poster_link: str):
|
||||
statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
bangumi = result.scalar_one_or_none()
|
||||
if bangumi:
|
||||
bangumi.poster_link = poster_link
|
||||
self.session.add(bangumi)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
logger.debug(f"[Database] Update {title_raw} poster_link to {poster_link}.")
|
||||
|
||||
async def delete_one(self, _id: int):
|
||||
def delete_one(self, _id: int):
|
||||
statement = select(Bangumi).where(Bangumi.id == _id)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
bangumi = result.scalar_one_or_none()
|
||||
if bangumi:
|
||||
await self.session.delete(bangumi)
|
||||
await self.session.commit()
|
||||
self.session.delete(bangumi)
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
logger.debug(f"[Database] Delete bangumi id: {_id}.")
|
||||
|
||||
async def delete_all(self):
|
||||
def delete_all(self):
|
||||
statement = delete(Bangumi)
|
||||
await self.session.execute(statement)
|
||||
await self.session.commit()
|
||||
self.session.execute(statement)
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
|
||||
async def search_all(self) -> list[Bangumi]:
|
||||
def search_all(self) -> list[Bangumi]:
|
||||
global _bangumi_cache, _bangumi_cache_time
|
||||
now = time.time()
|
||||
if _bangumi_cache is not None and (now - _bangumi_cache_time) < _BANGUMI_CACHE_TTL:
|
||||
return _bangumi_cache
|
||||
statement = select(Bangumi)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
_bangumi_cache = list(result.scalars().all())
|
||||
_bangumi_cache_time = now
|
||||
return _bangumi_cache
|
||||
|
||||
async def search_id(self, _id: int) -> Optional[Bangumi]:
|
||||
def search_id(self, _id: int) -> Optional[Bangumi]:
|
||||
statement = select(Bangumi).where(Bangumi.id == _id)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
bangumi = result.scalar_one_or_none()
|
||||
if bangumi is None:
|
||||
logger.warning(f"[Database] Cannot find bangumi id: {_id}.")
|
||||
@@ -129,19 +128,19 @@ class BangumiDatabase:
|
||||
logger.debug(f"[Database] Find bangumi id: {_id}.")
|
||||
return bangumi
|
||||
|
||||
async def match_poster(self, bangumi_name: str) -> str:
|
||||
def match_poster(self, bangumi_name: str) -> str:
|
||||
statement = select(Bangumi).where(
|
||||
func.instr(bangumi_name, Bangumi.official_title) > 0
|
||||
)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
data = result.scalar_one_or_none()
|
||||
if data:
|
||||
return data.poster_link
|
||||
else:
|
||||
return ""
|
||||
|
||||
async def match_list(self, torrent_list: list, rss_link: str) -> list:
|
||||
match_datas = await self.search_all()
|
||||
def match_list(self, torrent_list: list, rss_link: str) -> list:
|
||||
match_datas = self.search_all()
|
||||
if not match_datas:
|
||||
return torrent_list
|
||||
# Build index for faster lookup
|
||||
@@ -162,29 +161,29 @@ class BangumiDatabase:
|
||||
unmatched.append(torrent)
|
||||
# Batch commit all rss_link updates
|
||||
if rss_updated:
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
_invalidate_bangumi_cache()
|
||||
logger.debug(f"[Database] Batch updated rss_link for {len(rss_updated)} bangumi.")
|
||||
return unmatched
|
||||
|
||||
async def match_torrent(self, torrent_name: str) -> Optional[Bangumi]:
|
||||
def match_torrent(self, torrent_name: str) -> Optional[Bangumi]:
|
||||
statement = select(Bangumi).where(
|
||||
and_(
|
||||
func.instr(torrent_name, Bangumi.title_raw) > 0,
|
||||
Bangumi.deleted == false(),
|
||||
)
|
||||
)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def not_complete(self) -> list[Bangumi]:
|
||||
def not_complete(self) -> list[Bangumi]:
|
||||
condition = select(Bangumi).where(
|
||||
and_(Bangumi.eps_collect == false(), Bangumi.deleted == false())
|
||||
)
|
||||
result = await self.session.execute(condition)
|
||||
result = self.session.execute(condition)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def not_added(self) -> list[Bangumi]:
|
||||
def not_added(self) -> list[Bangumi]:
|
||||
conditions = select(Bangumi).where(
|
||||
or_(
|
||||
Bangumi.added == 0,
|
||||
@@ -192,20 +191,20 @@ class BangumiDatabase:
|
||||
Bangumi.save_path is None,
|
||||
)
|
||||
)
|
||||
result = await self.session.execute(conditions)
|
||||
result = self.session.execute(conditions)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def disable_rule(self, _id: int):
|
||||
def disable_rule(self, _id: int):
|
||||
statement = select(Bangumi).where(Bangumi.id == _id)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
bangumi = result.scalar_one_or_none()
|
||||
if bangumi:
|
||||
bangumi.deleted = True
|
||||
self.session.add(bangumi)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
logger.debug(f"[Database] Disable rule {bangumi.title_raw}.")
|
||||
|
||||
async def search_rss(self, rss_link: str) -> list[Bangumi]:
|
||||
def search_rss(self, rss_link: str) -> list[Bangumi]:
|
||||
statement = select(Bangumi).where(func.instr(rss_link, Bangumi.rss_link) > 0)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
return list(result.scalars().all())
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlmodel import and_, delete, select
|
||||
from sqlmodel import Session, and_, delete, select
|
||||
|
||||
from module.models import RSSItem, RSSUpdate
|
||||
|
||||
@@ -9,12 +8,12 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RSSDatabase:
|
||||
def __init__(self, session: AsyncSession):
|
||||
def __init__(self, session: Session):
|
||||
self.session = session
|
||||
|
||||
async def add(self, data: RSSItem) -> bool:
|
||||
def add(self, data: RSSItem) -> bool:
|
||||
statement = select(RSSItem).where(RSSItem.url == data.url)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
db_data = result.scalar_one_or_none()
|
||||
if db_data:
|
||||
logger.debug(f"RSS Item {data.url} already exists.")
|
||||
@@ -22,26 +21,26 @@ class RSSDatabase:
|
||||
else:
|
||||
logger.debug(f"RSS Item {data.url} not exists, adding...")
|
||||
self.session.add(data)
|
||||
await self.session.commit()
|
||||
await self.session.refresh(data)
|
||||
self.session.commit()
|
||||
self.session.refresh(data)
|
||||
return True
|
||||
|
||||
async def add_all(self, data: list[RSSItem]):
|
||||
def add_all(self, data: list[RSSItem]):
|
||||
if not data:
|
||||
return
|
||||
urls = [item.url for item in data]
|
||||
statement = select(RSSItem.url).where(RSSItem.url.in_(urls))
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
existing_urls = set(result.scalars().all())
|
||||
new_items = [item for item in data if item.url not in existing_urls]
|
||||
if new_items:
|
||||
self.session.add_all(new_items)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
logger.debug(f"Batch inserted {len(new_items)} RSS items.")
|
||||
|
||||
async def update(self, _id: int, data: RSSUpdate) -> bool:
|
||||
def update(self, _id: int, data: RSSUpdate) -> bool:
|
||||
statement = select(RSSItem).where(RSSItem.id == _id)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
db_data = result.scalar_one_or_none()
|
||||
if not db_data:
|
||||
return False
|
||||
@@ -49,61 +48,61 @@ class RSSDatabase:
|
||||
for key, value in dict_data.items():
|
||||
setattr(db_data, key, value)
|
||||
self.session.add(db_data)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
return True
|
||||
|
||||
async def enable(self, _id: int) -> bool:
|
||||
def enable(self, _id: int) -> bool:
|
||||
statement = select(RSSItem).where(RSSItem.id == _id)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
db_data = result.scalar_one_or_none()
|
||||
if not db_data:
|
||||
return False
|
||||
db_data.enabled = True
|
||||
self.session.add(db_data)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
return True
|
||||
|
||||
async def disable(self, _id: int) -> bool:
|
||||
def disable(self, _id: int) -> bool:
|
||||
statement = select(RSSItem).where(RSSItem.id == _id)
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
db_data = result.scalar_one_or_none()
|
||||
if not db_data:
|
||||
return False
|
||||
db_data.enabled = False
|
||||
self.session.add(db_data)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
return True
|
||||
|
||||
async def search_id(self, _id: int) -> RSSItem | None:
|
||||
return await self.session.get(RSSItem, _id)
|
||||
def search_id(self, _id: int) -> RSSItem | None:
|
||||
return self.session.get(RSSItem, _id)
|
||||
|
||||
async def search_all(self) -> list[RSSItem]:
|
||||
result = await self.session.execute(select(RSSItem))
|
||||
def search_all(self) -> list[RSSItem]:
|
||||
result = self.session.execute(select(RSSItem))
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def search_active(self) -> list[RSSItem]:
|
||||
result = await self.session.execute(
|
||||
def search_active(self) -> list[RSSItem]:
|
||||
result = self.session.execute(
|
||||
select(RSSItem).where(RSSItem.enabled)
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def search_aggregate(self) -> list[RSSItem]:
|
||||
result = await self.session.execute(
|
||||
def search_aggregate(self) -> list[RSSItem]:
|
||||
result = self.session.execute(
|
||||
select(RSSItem).where(and_(RSSItem.aggregate, RSSItem.enabled))
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def delete(self, _id: int) -> bool:
|
||||
def delete(self, _id: int) -> bool:
|
||||
condition = delete(RSSItem).where(RSSItem.id == _id)
|
||||
try:
|
||||
await self.session.execute(condition)
|
||||
await self.session.commit()
|
||||
self.session.execute(condition)
|
||||
self.session.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Delete RSS Item failed. Because: {e}")
|
||||
return False
|
||||
|
||||
async def delete_all(self):
|
||||
def delete_all(self):
|
||||
condition = delete(RSSItem)
|
||||
await self.session.execute(condition)
|
||||
await self.session.commit()
|
||||
self.session.execute(condition)
|
||||
self.session.commit()
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlmodel import select
|
||||
from sqlmodel import Session, select
|
||||
|
||||
from module.models import Torrent
|
||||
|
||||
@@ -9,54 +8,54 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TorrentDatabase:
|
||||
def __init__(self, session: AsyncSession):
|
||||
def __init__(self, session: Session):
|
||||
self.session = session
|
||||
|
||||
async def add(self, data: Torrent):
|
||||
def add(self, data: Torrent):
|
||||
self.session.add(data)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
logger.debug(f"Insert {data.name} in database.")
|
||||
|
||||
async def add_all(self, datas: list[Torrent]):
|
||||
def add_all(self, datas: list[Torrent]):
|
||||
self.session.add_all(datas)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
logger.debug(f"Insert {len(datas)} torrents in database.")
|
||||
|
||||
async def update(self, data: Torrent):
|
||||
def update(self, data: Torrent):
|
||||
self.session.add(data)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
logger.debug(f"Update {data.name} in database.")
|
||||
|
||||
async def update_all(self, datas: list[Torrent]):
|
||||
def update_all(self, datas: list[Torrent]):
|
||||
self.session.add_all(datas)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
|
||||
async def update_one_user(self, data: Torrent):
|
||||
def update_one_user(self, data: Torrent):
|
||||
self.session.add(data)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
logger.debug(f"Update {data.name} in database.")
|
||||
|
||||
async def search(self, _id: int) -> Torrent | None:
|
||||
result = await self.session.execute(
|
||||
def search(self, _id: int) -> Torrent | None:
|
||||
result = self.session.execute(
|
||||
select(Torrent).where(Torrent.id == _id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def search_all(self) -> list[Torrent]:
|
||||
result = await self.session.execute(select(Torrent))
|
||||
def search_all(self) -> list[Torrent]:
|
||||
result = self.session.execute(select(Torrent))
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def search_rss(self, rss_id: int) -> list[Torrent]:
|
||||
result = await self.session.execute(
|
||||
def search_rss(self, rss_id: int) -> list[Torrent]:
|
||||
result = self.session.execute(
|
||||
select(Torrent).where(Torrent.rss_id == rss_id)
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def check_new(self, torrents_list: list[Torrent]) -> list[Torrent]:
|
||||
def check_new(self, torrents_list: list[Torrent]) -> list[Torrent]:
|
||||
if not torrents_list:
|
||||
return []
|
||||
urls = [t.url for t in torrents_list]
|
||||
statement = select(Torrent.url).where(Torrent.url.in_(urls))
|
||||
result = await self.session.execute(statement)
|
||||
result = self.session.execute(statement)
|
||||
existing_urls = set(result.scalars().all())
|
||||
return [t for t in torrents_list if t.url not in existing_urls]
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import logging
|
||||
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlmodel import select
|
||||
from sqlmodel import Session, select
|
||||
|
||||
from module.models import ResponseModel
|
||||
from module.models.user import User, UserUpdate
|
||||
@@ -12,21 +11,21 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UserDatabase:
|
||||
def __init__(self, session: AsyncSession):
|
||||
def __init__(self, session: Session):
|
||||
self.session = session
|
||||
|
||||
async def get_user(self, username: str) -> User:
|
||||
def get_user(self, username: str) -> User:
|
||||
statement = select(User).where(User.username == username)
|
||||
result = await self.session.execute(statement)
|
||||
user = result.scalar_one_or_none()
|
||||
result = self.session.exec(statement)
|
||||
user = result.first()
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
return user
|
||||
|
||||
async def auth_user(self, user: User) -> ResponseModel:
|
||||
def auth_user(self, user: User) -> ResponseModel:
|
||||
statement = select(User).where(User.username == user.username)
|
||||
result = await self.session.execute(statement)
|
||||
db_user = result.scalar_one_or_none()
|
||||
result = self.session.exec(statement)
|
||||
db_user = result.first()
|
||||
if not user.password:
|
||||
return ResponseModel(
|
||||
status_code=401,
|
||||
@@ -55,10 +54,10 @@ class UserDatabase:
|
||||
msg_zh="登录成功",
|
||||
)
|
||||
|
||||
async def update_user(self, username: str, update_user: UserUpdate) -> User:
|
||||
def update_user(self, username: str, update_user: UserUpdate) -> User:
|
||||
statement = select(User).where(User.username == username)
|
||||
result = await self.session.execute(statement)
|
||||
db_user = result.scalar_one_or_none()
|
||||
result = self.session.exec(statement)
|
||||
db_user = result.first()
|
||||
if not db_user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
if update_user.username:
|
||||
@@ -66,18 +65,18 @@ class UserDatabase:
|
||||
if update_user.password:
|
||||
db_user.password = get_password_hash(update_user.password)
|
||||
self.session.add(db_user)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
return db_user
|
||||
|
||||
async def add_default_user(self):
|
||||
def add_default_user(self):
|
||||
statement = select(User)
|
||||
try:
|
||||
result = await self.session.execute(statement)
|
||||
users = list(result.scalars().all())
|
||||
result = self.session.exec(statement)
|
||||
users = list(result.all())
|
||||
except Exception:
|
||||
users = []
|
||||
if len(users) != 0:
|
||||
return
|
||||
user = User(username="admin", password=get_password_hash("adminadmin"))
|
||||
self.session.add(user)
|
||||
await self.session.commit()
|
||||
self.session.commit()
|
||||
|
||||
@@ -1,29 +1,69 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
|
||||
from aria2p import API, Client, ClientException
|
||||
import httpx
|
||||
|
||||
from module.conf import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class QbDownloader:
|
||||
def __init__(self, host, username, password):
|
||||
while True:
|
||||
try:
|
||||
self._client = API(Client(host=host, port=6800, secret=password))
|
||||
break
|
||||
except ClientException:
|
||||
logger.warning(
|
||||
f"Can't login Aria2 Server {host} by {username}, retry in {settings.connect_retry_interval}"
|
||||
)
|
||||
time.sleep(settings.connect_retry_interval)
|
||||
class Aria2Downloader:
|
||||
def __init__(self, host: str, username: str, password: str):
|
||||
self.host = host
|
||||
self.secret = password
|
||||
self._client: httpx.AsyncClient | None = None
|
||||
self._rpc_url = f"{host}/jsonrpc"
|
||||
self._id = 0
|
||||
|
||||
def torrents_add(self, urls, save_path, category):
|
||||
return self._client.add_torrent(
|
||||
is_paused=settings.dev_debug,
|
||||
torrent_file_path=urls,
|
||||
save_path=save_path,
|
||||
category=category,
|
||||
)
|
||||
async def _call(self, method: str, params: list = None):
|
||||
self._id += 1
|
||||
if params is None:
|
||||
params = []
|
||||
# Prepend token
|
||||
full_params = [f"token:{self.secret}"] + params
|
||||
payload = {
|
||||
"jsonrpc": "2.0",
|
||||
"id": self._id,
|
||||
"method": f"aria2.{method}",
|
||||
"params": full_params,
|
||||
}
|
||||
resp = await self._client.post(self._rpc_url, json=payload)
|
||||
result = resp.json()
|
||||
if "error" in result:
|
||||
raise Exception(f"Aria2 RPC error: {result['error']}")
|
||||
return result.get("result")
|
||||
|
||||
async def auth(self, retry=3):
|
||||
self._client = httpx.AsyncClient(timeout=httpx.Timeout(connect=3.1, read=10.0, write=10.0, pool=10.0))
|
||||
times = 0
|
||||
while times < retry:
|
||||
try:
|
||||
await self._call("getVersion")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Can't login Aria2 Server {self.host}, retry in 5 seconds. Error: {e}"
|
||||
)
|
||||
await asyncio.sleep(5)
|
||||
times += 1
|
||||
return False
|
||||
|
||||
async def logout(self):
|
||||
if self._client:
|
||||
await self._client.aclose()
|
||||
self._client = None
|
||||
|
||||
async def add_torrents(self, torrent_urls, torrent_files, save_path, category):
|
||||
import base64
|
||||
options = {"dir": save_path}
|
||||
if torrent_urls:
|
||||
urls = torrent_urls if isinstance(torrent_urls, list) else [torrent_urls]
|
||||
for url in urls:
|
||||
await self._call("addUri", [[url], options])
|
||||
if torrent_files:
|
||||
files = torrent_files if isinstance(torrent_files, list) else [torrent_files]
|
||||
for f in files:
|
||||
b64 = base64.b64encode(f).decode()
|
||||
await self._call("addTorrent", [b64, [], options])
|
||||
return True
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
|
||||
from qbittorrentapi import Client, LoginFailed
|
||||
from qbittorrentapi.exceptions import (
|
||||
APIConnectionError,
|
||||
Conflict409Error,
|
||||
Forbidden403Error,
|
||||
)
|
||||
import httpx
|
||||
|
||||
from module.ab_decorator import qb_connect_failed_wait
|
||||
|
||||
@@ -15,138 +10,233 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class QbDownloader:
|
||||
def __init__(self, host: str, username: str, password: str, ssl: bool):
|
||||
self._client: Client = Client(
|
||||
host=host,
|
||||
username=username,
|
||||
password=password,
|
||||
VERIFY_WEBUI_CERTIFICATE=ssl,
|
||||
DISABLE_LOGGING_DEBUG_OUTPUT=True,
|
||||
REQUESTS_ARGS={"timeout": (3.1, 10)},
|
||||
)
|
||||
self.host = host
|
||||
if "://" not in host:
|
||||
scheme = "https" if ssl else "http"
|
||||
self.host = f"{scheme}://{host}"
|
||||
else:
|
||||
self.host = host
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.ssl = ssl
|
||||
self._client: httpx.AsyncClient | None = None
|
||||
|
||||
def auth(self, retry=3):
|
||||
def _url(self, endpoint: str) -> str:
|
||||
return f"{self.host}/api/v2/{endpoint}"
|
||||
|
||||
async def auth(self, retry=3):
|
||||
times = 0
|
||||
timeout = httpx.Timeout(connect=3.1, read=10.0, write=10.0, pool=10.0)
|
||||
self._client = httpx.AsyncClient(
|
||||
timeout=timeout, verify=self.ssl
|
||||
)
|
||||
while times < retry:
|
||||
try:
|
||||
self._client.auth_log_in()
|
||||
return True
|
||||
except LoginFailed:
|
||||
logger.error(
|
||||
f"Can't login qBittorrent Server {self.host} by {self.username}, retry in {5} seconds."
|
||||
resp = await self._client.post(
|
||||
self._url("auth/login"),
|
||||
data={"username": self.username, "password": self.password},
|
||||
)
|
||||
time.sleep(5)
|
||||
times += 1
|
||||
except Forbidden403Error:
|
||||
logger.error("Login refused by qBittorrent Server")
|
||||
logger.info("Please release the IP in qBittorrent Server")
|
||||
break
|
||||
except APIConnectionError:
|
||||
if resp.status_code == 200 and resp.text == "Ok.":
|
||||
return True
|
||||
elif resp.status_code == 403:
|
||||
logger.error("Login refused by qBittorrent Server")
|
||||
logger.info("Please release the IP in qBittorrent Server")
|
||||
break
|
||||
else:
|
||||
logger.error(
|
||||
f"Can't login qBittorrent Server {self.host} by {self.username}, retry in 5 seconds."
|
||||
)
|
||||
await asyncio.sleep(5)
|
||||
times += 1
|
||||
except httpx.ConnectError:
|
||||
logger.error("Cannot connect to qBittorrent Server")
|
||||
logger.info("Please check the IP and port in WebUI settings")
|
||||
time.sleep(10)
|
||||
await asyncio.sleep(10)
|
||||
times += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Unknown error: {e}")
|
||||
break
|
||||
return False
|
||||
|
||||
def logout(self):
|
||||
self._client.auth_log_out()
|
||||
async def logout(self):
|
||||
if self._client:
|
||||
try:
|
||||
await self._client.post(self._url("auth/logout"))
|
||||
except Exception:
|
||||
pass
|
||||
await self._client.aclose()
|
||||
self._client = None
|
||||
|
||||
def check_host(self):
|
||||
async def check_host(self):
|
||||
try:
|
||||
self._client.app_version()
|
||||
return True
|
||||
except APIConnectionError:
|
||||
resp = await self._client.get(self._url("app/version"))
|
||||
return resp.status_code == 200
|
||||
except (httpx.ConnectError, httpx.RequestError):
|
||||
return False
|
||||
|
||||
def check_rss(self, rss_link: str):
|
||||
pass
|
||||
|
||||
@qb_connect_failed_wait
|
||||
def prefs_init(self, prefs):
|
||||
return self._client.app_set_preferences(prefs=prefs)
|
||||
async def prefs_init(self, prefs):
|
||||
resp = await self._client.post(
|
||||
self._url("app/setPreferences"),
|
||||
data={"json": __import__("json").dumps(prefs)},
|
||||
)
|
||||
return resp
|
||||
|
||||
@qb_connect_failed_wait
|
||||
def get_app_prefs(self):
|
||||
return self._client.app_preferences()
|
||||
async def get_app_prefs(self):
|
||||
resp = await self._client.get(self._url("app/preferences"))
|
||||
return resp.json()
|
||||
|
||||
def add_category(self, category):
|
||||
return self._client.torrents_createCategory(name=category)
|
||||
async def add_category(self, category):
|
||||
await self._client.post(
|
||||
self._url("torrents/createCategory"),
|
||||
data={"category": category, "savePath": ""},
|
||||
)
|
||||
|
||||
@qb_connect_failed_wait
|
||||
def torrents_info(self, status_filter, category, tag=None):
|
||||
return self._client.torrents_info(
|
||||
status_filter=status_filter, category=category, tag=tag
|
||||
async def torrents_info(self, status_filter, category, tag=None):
|
||||
params = {}
|
||||
if status_filter:
|
||||
params["filter"] = status_filter
|
||||
if category:
|
||||
params["category"] = category
|
||||
if tag:
|
||||
params["tag"] = tag
|
||||
resp = await self._client.get(self._url("torrents/info"), params=params)
|
||||
return resp.json()
|
||||
|
||||
async def add_torrents(self, torrent_urls, torrent_files, save_path, category):
|
||||
data = {
|
||||
"savepath": save_path,
|
||||
"category": category,
|
||||
"paused": "false",
|
||||
"autoTMM": "false",
|
||||
"contentLayout": "NoSubfolder",
|
||||
}
|
||||
files = {}
|
||||
if torrent_urls:
|
||||
if isinstance(torrent_urls, list):
|
||||
data["urls"] = "\n".join(torrent_urls)
|
||||
else:
|
||||
data["urls"] = torrent_urls
|
||||
if torrent_files:
|
||||
if isinstance(torrent_files, list):
|
||||
for i, f in enumerate(torrent_files):
|
||||
files[f"torrents_{i}"] = (f"torrent_{i}.torrent", f, "application/x-bittorrent")
|
||||
else:
|
||||
files["torrents"] = ("torrent.torrent", torrent_files, "application/x-bittorrent")
|
||||
|
||||
resp = await self._client.post(
|
||||
self._url("torrents/add"),
|
||||
data=data,
|
||||
files=files if files else None,
|
||||
)
|
||||
return resp.text == "Ok."
|
||||
|
||||
async def torrents_delete(self, hash, delete_files: bool = True):
|
||||
await self._client.post(
|
||||
self._url("torrents/delete"),
|
||||
data={"hashes": hash, "deleteFiles": str(delete_files).lower()},
|
||||
)
|
||||
|
||||
def add_torrents(self, torrent_urls, torrent_files, save_path, category):
|
||||
resp = self._client.torrents_add(
|
||||
is_paused=False,
|
||||
urls=torrent_urls,
|
||||
torrent_files=torrent_files,
|
||||
save_path=save_path,
|
||||
category=category,
|
||||
use_auto_torrent_management=False,
|
||||
content_layout="NoSubFolder"
|
||||
async def torrents_pause(self, hashes: str):
|
||||
await self._client.post(
|
||||
self._url("torrents/pause"),
|
||||
data={"hashes": hashes},
|
||||
)
|
||||
return resp == "Ok."
|
||||
|
||||
def torrents_delete(self, hash):
|
||||
return self._client.torrents_delete(delete_files=True, torrent_hashes=hash)
|
||||
async def torrents_resume(self, hashes: str):
|
||||
await self._client.post(
|
||||
self._url("torrents/resume"),
|
||||
data={"hashes": hashes},
|
||||
)
|
||||
|
||||
def torrents_rename_file(self, torrent_hash, old_path, new_path) -> bool:
|
||||
async def torrents_rename_file(self, torrent_hash, old_path, new_path) -> bool:
|
||||
try:
|
||||
self._client.torrents_rename_file(
|
||||
torrent_hash=torrent_hash, old_path=old_path, new_path=new_path
|
||||
resp = await self._client.post(
|
||||
self._url("torrents/renameFile"),
|
||||
data={"hash": torrent_hash, "oldPath": old_path, "newPath": new_path},
|
||||
)
|
||||
return True
|
||||
except Conflict409Error:
|
||||
logger.debug(f"Conflict409Error: {old_path} >> {new_path}")
|
||||
if resp.status_code == 409:
|
||||
logger.debug(f"Conflict409Error: {old_path} >> {new_path}")
|
||||
return False
|
||||
return resp.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def rss_add_feed(self, url, item_path):
|
||||
try:
|
||||
self._client.rss_add_feed(url, item_path)
|
||||
except Conflict409Error:
|
||||
async def rss_add_feed(self, url, item_path):
|
||||
resp = await self._client.post(
|
||||
self._url("rss/addFeed"),
|
||||
data={"url": url, "path": item_path},
|
||||
)
|
||||
if resp.status_code == 409:
|
||||
logger.warning(f"[Downloader] RSS feed {url} already exists")
|
||||
|
||||
def rss_remove_item(self, item_path):
|
||||
try:
|
||||
self._client.rss_remove_item(item_path)
|
||||
except Conflict409Error:
|
||||
async def rss_remove_item(self, item_path):
|
||||
resp = await self._client.post(
|
||||
self._url("rss/removeItem"),
|
||||
data={"path": item_path},
|
||||
)
|
||||
if resp.status_code == 409:
|
||||
logger.warning(f"[Downloader] RSS item {item_path} does not exist")
|
||||
|
||||
def rss_get_feeds(self):
|
||||
return self._client.rss_items()
|
||||
async def rss_get_feeds(self):
|
||||
resp = await self._client.get(self._url("rss/items"))
|
||||
return resp.json()
|
||||
|
||||
def rss_set_rule(self, rule_name, rule_def):
|
||||
self._client.rss_set_rule(rule_name, rule_def)
|
||||
async def rss_set_rule(self, rule_name, rule_def):
|
||||
import json
|
||||
await self._client.post(
|
||||
self._url("rss/setRule"),
|
||||
data={"ruleName": rule_name, "ruleDef": json.dumps(rule_def)},
|
||||
)
|
||||
|
||||
def move_torrent(self, hashes, new_location):
|
||||
self._client.torrents_set_location(new_location, hashes)
|
||||
async def move_torrent(self, hashes, new_location):
|
||||
await self._client.post(
|
||||
self._url("torrents/setLocation"),
|
||||
data={"hashes": hashes, "location": new_location},
|
||||
)
|
||||
|
||||
def get_download_rule(self):
|
||||
return self._client.rss_rules()
|
||||
async def get_download_rule(self):
|
||||
resp = await self._client.get(self._url("rss/rules"))
|
||||
return resp.json()
|
||||
|
||||
def get_torrent_path(self, _hash):
|
||||
return self._client.torrents_info(hashes=_hash)[0].save_path
|
||||
async def get_torrent_path(self, _hash):
|
||||
resp = await self._client.get(
|
||||
self._url("torrents/info"), params={"hashes": _hash}
|
||||
)
|
||||
torrents = resp.json()
|
||||
if torrents:
|
||||
return torrents[0].get("save_path", "")
|
||||
return ""
|
||||
|
||||
def set_category(self, _hash, category):
|
||||
try:
|
||||
self._client.torrents_set_category(category, hashes=_hash)
|
||||
except Conflict409Error:
|
||||
async def set_category(self, _hash, category):
|
||||
resp = await self._client.post(
|
||||
self._url("torrents/setCategory"),
|
||||
data={"hashes": _hash, "category": category},
|
||||
)
|
||||
if resp.status_code == 409:
|
||||
logger.warning(f"[Downloader] Category {category} does not exist")
|
||||
self.add_category(category)
|
||||
self._client.torrents_set_category(category, hashes=_hash)
|
||||
await self.add_category(category)
|
||||
await self._client.post(
|
||||
self._url("torrents/setCategory"),
|
||||
data={"hashes": _hash, "category": category},
|
||||
)
|
||||
|
||||
def check_connection(self):
|
||||
return self._client.app_version()
|
||||
async def check_connection(self):
|
||||
resp = await self._client.get(self._url("app/version"))
|
||||
return resp.text
|
||||
|
||||
def remove_rule(self, rule_name):
|
||||
self._client.rss_remove_rule(rule_name)
|
||||
async def remove_rule(self, rule_name):
|
||||
await self._client.post(
|
||||
self._url("rss/removeRule"),
|
||||
data={"ruleName": rule_name},
|
||||
)
|
||||
|
||||
def add_tag(self, _hash, tag):
|
||||
self._client.torrents_add_tags(tags=tag, hashes=_hash)
|
||||
async def add_tag(self, _hash, tag):
|
||||
await self._client.post(
|
||||
self._url("torrents/addTags"),
|
||||
data={"hashes": _hash, "tags": tag},
|
||||
)
|
||||
|
||||
@@ -17,7 +17,6 @@ class DownloadClient(TorrentPath):
|
||||
|
||||
@staticmethod
|
||||
def __getClient():
|
||||
# TODO 多下载器支持
|
||||
type = settings.downloader.type
|
||||
host = settings.downloader.host
|
||||
username = settings.downloader.username
|
||||
@@ -27,49 +26,53 @@ class DownloadClient(TorrentPath):
|
||||
from .client.qb_downloader import QbDownloader
|
||||
|
||||
return QbDownloader(host, username, password, ssl)
|
||||
elif type == "aria2":
|
||||
from .client.aria2_downloader import Aria2Downloader
|
||||
|
||||
return Aria2Downloader(host, username, password)
|
||||
else:
|
||||
logger.error(f"[Downloader] Unsupported downloader type: {type}")
|
||||
raise Exception(f"Unsupported downloader type: {type}")
|
||||
|
||||
def __enter__(self):
|
||||
async def __aenter__(self):
|
||||
if not self.authed:
|
||||
self.auth()
|
||||
await self.auth()
|
||||
else:
|
||||
logger.error("[Downloader] Already authed.")
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.authed:
|
||||
self.client.logout()
|
||||
await self.client.logout()
|
||||
self.authed = False
|
||||
|
||||
def auth(self):
|
||||
self.authed = self.client.auth()
|
||||
async def auth(self):
|
||||
self.authed = await self.client.auth()
|
||||
if self.authed:
|
||||
logger.debug("[Downloader] Authed.")
|
||||
else:
|
||||
logger.error("[Downloader] Auth failed.")
|
||||
|
||||
def check_host(self):
|
||||
return self.client.check_host()
|
||||
async def check_host(self):
|
||||
return await self.client.check_host()
|
||||
|
||||
def init_downloader(self):
|
||||
async def init_downloader(self):
|
||||
prefs = {
|
||||
"rss_auto_downloading_enabled": True,
|
||||
"rss_max_articles_per_feed": 500,
|
||||
"rss_processing_enabled": True,
|
||||
"rss_refresh_interval": 30,
|
||||
}
|
||||
self.client.prefs_init(prefs=prefs)
|
||||
await self.client.prefs_init(prefs=prefs)
|
||||
try:
|
||||
self.client.add_category("BangumiCollection")
|
||||
await self.client.add_category("BangumiCollection")
|
||||
except Exception:
|
||||
logger.debug("[Downloader] Cannot add new category, maybe already exists.")
|
||||
if settings.downloader.path == "":
|
||||
prefs = self.client.get_app_prefs()
|
||||
prefs = await self.client.get_app_prefs()
|
||||
settings.downloader.path = self._join_path(prefs["save_path"], "Bangumi")
|
||||
|
||||
def set_rule(self, data: Bangumi):
|
||||
async def set_rule(self, data: Bangumi):
|
||||
data.rule_name = self._rule_name(data)
|
||||
data.save_path = self._gen_save_path(data)
|
||||
rule = {
|
||||
@@ -87,37 +90,43 @@ class DownloadClient(TorrentPath):
|
||||
"assignedCategory": "Bangumi",
|
||||
"savePath": data.save_path,
|
||||
}
|
||||
self.client.rss_set_rule(rule_name=data.rule_name, rule_def=rule)
|
||||
await self.client.rss_set_rule(rule_name=data.rule_name, rule_def=rule)
|
||||
data.added = True
|
||||
logger.info(
|
||||
f"[Downloader] Add {data.official_title} Season {data.season} to auto download rules."
|
||||
)
|
||||
|
||||
def set_rules(self, bangumi_info: list[Bangumi]):
|
||||
async def set_rules(self, bangumi_info: list[Bangumi]):
|
||||
logger.debug("[Downloader] Start adding rules.")
|
||||
for info in bangumi_info:
|
||||
self.set_rule(info)
|
||||
await self.set_rule(info)
|
||||
logger.debug("[Downloader] Finished.")
|
||||
|
||||
def get_torrent_info(self, category="Bangumi", status_filter="completed", tag=None):
|
||||
return self.client.torrents_info(
|
||||
async def get_torrent_info(self, category="Bangumi", status_filter="completed", tag=None):
|
||||
return await self.client.torrents_info(
|
||||
status_filter=status_filter, category=category, tag=tag
|
||||
)
|
||||
|
||||
def rename_torrent_file(self, _hash, old_path, new_path) -> bool:
|
||||
async def rename_torrent_file(self, _hash, old_path, new_path) -> bool:
|
||||
logger.info(f"{old_path} >> {new_path}")
|
||||
return self.client.torrents_rename_file(
|
||||
return await self.client.torrents_rename_file(
|
||||
torrent_hash=_hash, old_path=old_path, new_path=new_path
|
||||
)
|
||||
|
||||
def delete_torrent(self, hashes):
|
||||
self.client.torrents_delete(hashes)
|
||||
async def delete_torrent(self, hashes, delete_files: bool = True):
|
||||
await self.client.torrents_delete(hashes, delete_files=delete_files)
|
||||
logger.info("[Downloader] Remove torrents.")
|
||||
|
||||
def add_torrent(self, torrent: Torrent | list, bangumi: Bangumi) -> bool:
|
||||
async def pause_torrent(self, hashes: str):
|
||||
await self.client.torrents_pause(hashes)
|
||||
|
||||
async def resume_torrent(self, hashes: str):
|
||||
await self.client.torrents_resume(hashes)
|
||||
|
||||
async def add_torrent(self, torrent: Torrent | list, bangumi: Bangumi) -> bool:
|
||||
if not bangumi.save_path:
|
||||
bangumi.save_path = self._gen_save_path(bangumi)
|
||||
with RequestContent() as req:
|
||||
async with RequestContent() as req:
|
||||
if isinstance(torrent, list):
|
||||
if len(torrent) == 0:
|
||||
logger.debug(f"[Downloader] No torrent found: {bangumi.official_title}")
|
||||
@@ -126,16 +135,16 @@ class DownloadClient(TorrentPath):
|
||||
torrent_url = [t.url for t in torrent]
|
||||
torrent_file = None
|
||||
else:
|
||||
torrent_file = [req.get_content(t.url) for t in torrent]
|
||||
torrent_file = [await req.get_content(t.url) for t in torrent]
|
||||
torrent_url = None
|
||||
else:
|
||||
if "magnet" in torrent.url:
|
||||
torrent_url = torrent.url
|
||||
torrent_file = None
|
||||
else:
|
||||
torrent_file = req.get_content(torrent.url)
|
||||
torrent_file = await req.get_content(torrent.url)
|
||||
torrent_url = None
|
||||
if self.client.add_torrents(
|
||||
if await self.client.add_torrents(
|
||||
torrent_urls=torrent_url,
|
||||
torrent_files=torrent_file,
|
||||
save_path=bangumi.save_path,
|
||||
@@ -147,28 +156,28 @@ class DownloadClient(TorrentPath):
|
||||
logger.debug(f"[Downloader] Torrent added before: {bangumi.official_title}")
|
||||
return False
|
||||
|
||||
def move_torrent(self, hashes, location):
|
||||
self.client.move_torrent(hashes=hashes, new_location=location)
|
||||
async def move_torrent(self, hashes, location):
|
||||
await self.client.move_torrent(hashes=hashes, new_location=location)
|
||||
|
||||
# RSS Parts
|
||||
def add_rss_feed(self, rss_link, item_path="Mikan_RSS"):
|
||||
self.client.rss_add_feed(url=rss_link, item_path=item_path)
|
||||
async def add_rss_feed(self, rss_link, item_path="Mikan_RSS"):
|
||||
await self.client.rss_add_feed(url=rss_link, item_path=item_path)
|
||||
|
||||
def remove_rss_feed(self, item_path):
|
||||
self.client.rss_remove_item(item_path=item_path)
|
||||
async def remove_rss_feed(self, item_path):
|
||||
await self.client.rss_remove_item(item_path=item_path)
|
||||
|
||||
def get_rss_feed(self):
|
||||
return self.client.rss_get_feeds()
|
||||
async def get_rss_feed(self):
|
||||
return await self.client.rss_get_feeds()
|
||||
|
||||
def get_download_rules(self):
|
||||
return self.client.get_download_rule()
|
||||
async def get_download_rules(self):
|
||||
return await self.client.get_download_rule()
|
||||
|
||||
def get_torrent_path(self, hashes):
|
||||
return self.client.get_torrent_path(hashes)
|
||||
async def get_torrent_path(self, hashes):
|
||||
return await self.client.get_torrent_path(hashes)
|
||||
|
||||
def set_category(self, hashes, category):
|
||||
self.client.set_category(hashes, category)
|
||||
async def set_category(self, hashes, category):
|
||||
await self.client.set_category(hashes, category)
|
||||
|
||||
def remove_rule(self, rule_name):
|
||||
self.client.remove_rule(rule_name)
|
||||
async def remove_rule(self, rule_name):
|
||||
await self.client.remove_rule(rule_name)
|
||||
logger.info(f"[Downloader] Delete rule: {rule_name}")
|
||||
|
||||
@@ -9,16 +9,17 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SeasonCollector(DownloadClient):
|
||||
def collect_season(self, bangumi: Bangumi, link: str = None):
|
||||
async def collect_season(self, bangumi: Bangumi, link: str = None):
|
||||
logger.info(
|
||||
f"Start collecting {bangumi.official_title} Season {bangumi.season}..."
|
||||
)
|
||||
with SearchTorrent() as st, RSSEngine() as engine:
|
||||
async with SearchTorrent() as st:
|
||||
if not link:
|
||||
torrents = st.search_season(bangumi)
|
||||
torrents = await st.search_season(bangumi)
|
||||
else:
|
||||
torrents = st.get_torrents(link, bangumi.filter.replace(",", "|"))
|
||||
if self.add_torrent(torrents, bangumi):
|
||||
torrents = await st.get_torrents(link, bangumi.filter.replace(",", "|"))
|
||||
with RSSEngine() as engine:
|
||||
if await self.add_torrent(torrents, bangumi):
|
||||
logger.info(
|
||||
f"Collections of {bangumi.official_title} Season {bangumi.season} completed."
|
||||
)
|
||||
@@ -46,29 +47,29 @@ class SeasonCollector(DownloadClient):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def subscribe_season(data: Bangumi, parser: str = "mikan"):
|
||||
async def subscribe_season(data: Bangumi, parser: str = "mikan"):
|
||||
with RSSEngine() as engine:
|
||||
data.added = True
|
||||
data.eps_collect = True
|
||||
engine.add_rss(
|
||||
await engine.add_rss(
|
||||
rss_link=data.rss_link,
|
||||
name=data.official_title,
|
||||
aggregate=False,
|
||||
parser=parser,
|
||||
)
|
||||
result = engine.download_bangumi(data)
|
||||
result = await engine.download_bangumi(data)
|
||||
engine.bangumi.add(data)
|
||||
return result
|
||||
|
||||
|
||||
def eps_complete():
|
||||
async def eps_complete():
|
||||
with RSSEngine() as engine:
|
||||
datas = engine.bangumi.not_complete()
|
||||
if datas:
|
||||
logger.info("Start collecting full season...")
|
||||
for data in datas:
|
||||
if not data.eps_collect:
|
||||
with SeasonCollector() as collector:
|
||||
collector.collect_season(data)
|
||||
async with SeasonCollector() as collector:
|
||||
await collector.collect_season(data)
|
||||
data.eps_collect = True
|
||||
engine.bangumi.update_all(datas)
|
||||
|
||||
@@ -48,7 +48,7 @@ class Renamer(DownloadClient):
|
||||
logger.error(f"[Renamer] Unknown rename method: {method}")
|
||||
return file_info.media_path
|
||||
|
||||
def rename_file(
|
||||
async def rename_file(
|
||||
self,
|
||||
torrent_name: str,
|
||||
media_path: str,
|
||||
@@ -67,7 +67,7 @@ class Renamer(DownloadClient):
|
||||
new_path = self.gen_path(ep, bangumi_name, method=method)
|
||||
if media_path != new_path:
|
||||
if new_path not in self.check_pool.keys():
|
||||
if self.rename_torrent_file(
|
||||
if await self.rename_torrent_file(
|
||||
_hash=_hash, old_path=media_path, new_path=new_path
|
||||
):
|
||||
return Notification(
|
||||
@@ -78,10 +78,10 @@ class Renamer(DownloadClient):
|
||||
else:
|
||||
logger.warning(f"[Renamer] {media_path} parse failed")
|
||||
if settings.bangumi_manage.remove_bad_torrent:
|
||||
self.delete_torrent(hashes=_hash)
|
||||
await self.delete_torrent(hashes=_hash)
|
||||
return None
|
||||
|
||||
def rename_collection(
|
||||
async def rename_collection(
|
||||
self,
|
||||
media_list: list[str],
|
||||
bangumi_name: str,
|
||||
@@ -99,17 +99,17 @@ class Renamer(DownloadClient):
|
||||
if ep:
|
||||
new_path = self.gen_path(ep, bangumi_name, method=method)
|
||||
if media_path != new_path:
|
||||
renamed = self.rename_torrent_file(
|
||||
renamed = await self.rename_torrent_file(
|
||||
_hash=_hash, old_path=media_path, new_path=new_path
|
||||
)
|
||||
if not renamed:
|
||||
logger.warning(f"[Renamer] {media_path} rename failed")
|
||||
# Delete bad torrent.
|
||||
if settings.bangumi_manage.remove_bad_torrent:
|
||||
self.delete_torrent(_hash)
|
||||
await self.delete_torrent(_hash)
|
||||
break
|
||||
|
||||
def rename_subtitles(
|
||||
async def rename_subtitles(
|
||||
self,
|
||||
subtitle_list: list[str],
|
||||
torrent_name: str,
|
||||
@@ -130,17 +130,17 @@ class Renamer(DownloadClient):
|
||||
if sub:
|
||||
new_path = self.gen_path(sub, bangumi_name, method=method)
|
||||
if subtitle_path != new_path:
|
||||
renamed = self.rename_torrent_file(
|
||||
renamed = await self.rename_torrent_file(
|
||||
_hash=_hash, old_path=subtitle_path, new_path=new_path
|
||||
)
|
||||
if not renamed:
|
||||
logger.warning(f"[Renamer] {subtitle_path} rename failed")
|
||||
|
||||
def rename(self) -> list[Notification]:
|
||||
async def rename(self) -> list[Notification]:
|
||||
# Get torrent info
|
||||
logger.debug("[Renamer] Start rename process.")
|
||||
rename_method = settings.bangumi_manage.rename_method
|
||||
torrents_info = self.get_torrent_info()
|
||||
torrents_info = await self.get_torrent_info()
|
||||
renamed_info: list[Notification] = []
|
||||
for info in torrents_info:
|
||||
media_list, subtitle_list = self.check_files(info)
|
||||
@@ -154,19 +154,19 @@ class Renamer(DownloadClient):
|
||||
}
|
||||
# Rename single media file
|
||||
if len(media_list) == 1:
|
||||
notify_info = self.rename_file(media_path=media_list[0], **kwargs)
|
||||
notify_info = await self.rename_file(media_path=media_list[0], **kwargs)
|
||||
if notify_info:
|
||||
renamed_info.append(notify_info)
|
||||
# Rename subtitle file
|
||||
if len(subtitle_list) > 0:
|
||||
self.rename_subtitles(subtitle_list=subtitle_list, **kwargs)
|
||||
await self.rename_subtitles(subtitle_list=subtitle_list, **kwargs)
|
||||
# Rename collection
|
||||
elif len(media_list) > 1:
|
||||
logger.info("[Renamer] Start rename collection")
|
||||
self.rename_collection(media_list=media_list, **kwargs)
|
||||
await self.rename_collection(media_list=media_list, **kwargs)
|
||||
if len(subtitle_list) > 0:
|
||||
self.rename_subtitles(subtitle_list=subtitle_list, **kwargs)
|
||||
self.set_category(info.hash, "BangumiCollection")
|
||||
await self.rename_subtitles(subtitle_list=subtitle_list, **kwargs)
|
||||
await self.set_category(info.hash, "BangumiCollection")
|
||||
else:
|
||||
logger.warning(f"[Renamer] {info.name} has no media file")
|
||||
logger.debug("[Renamer] Rename process finished.")
|
||||
@@ -177,12 +177,3 @@ class Renamer(DownloadClient):
|
||||
pass
|
||||
else:
|
||||
self.delete_torrent(hashes=torrent_hash)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from module.conf import setup_logger
|
||||
|
||||
settings.log.debug_enable = True
|
||||
setup_logger()
|
||||
with Renamer() as renamer:
|
||||
renamer.rename()
|
||||
|
||||
@@ -11,17 +11,19 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class TorrentManager(Database):
|
||||
@staticmethod
|
||||
def __match_torrents_list(data: Bangumi | BangumiUpdate) -> list:
|
||||
with DownloadClient() as client:
|
||||
torrents = client.get_torrent_info(status_filter=None)
|
||||
async def __match_torrents_list(data: Bangumi | BangumiUpdate) -> list:
|
||||
async with DownloadClient() as client:
|
||||
torrents = await client.get_torrent_info(status_filter=None)
|
||||
return [
|
||||
torrent.hash for torrent in torrents if torrent.save_path == data.save_path
|
||||
torrent.get("hash", torrent.get("infohash_v1", ""))
|
||||
for torrent in torrents
|
||||
if torrent.get("save_path") == data.save_path
|
||||
]
|
||||
|
||||
def delete_torrents(self, data: Bangumi, client: DownloadClient):
|
||||
hash_list = self.__match_torrents_list(data)
|
||||
async def delete_torrents(self, data: Bangumi, client: DownloadClient):
|
||||
hash_list = await self.__match_torrents_list(data)
|
||||
if hash_list:
|
||||
client.delete_torrent(hash_list)
|
||||
await client.delete_torrent(hash_list)
|
||||
logger.info(f"Delete rule and torrents for {data.official_title}")
|
||||
return ResponseModel(
|
||||
status_code=200,
|
||||
@@ -37,20 +39,21 @@ class TorrentManager(Database):
|
||||
msg_zh=f"无法找到 {data.official_title} 的种子",
|
||||
)
|
||||
|
||||
def delete_rule(self, _id: int | str, file: bool = False):
|
||||
async def delete_rule(self, _id: int | str, file: bool = False):
|
||||
data = self.bangumi.search_id(int(_id))
|
||||
if isinstance(data, Bangumi):
|
||||
with DownloadClient() as client:
|
||||
async with DownloadClient() as client:
|
||||
self.rss.delete(data.official_title)
|
||||
self.bangumi.delete_one(int(_id))
|
||||
torrent_message = None
|
||||
if file:
|
||||
torrent_message = self.delete_torrents(data, client)
|
||||
torrent_message = await self.delete_torrents(data, client)
|
||||
logger.info(f"[Manager] Delete rule for {data.official_title}")
|
||||
return ResponseModel(
|
||||
status_code=200,
|
||||
status=True,
|
||||
msg_en=f"Delete rule for {data.official_title}. {torrent_message.msg_en if file else ''}",
|
||||
msg_zh=f"删除 {data.official_title} 规则。{torrent_message.msg_zh if file else ''}",
|
||||
msg_en=f"Delete rule for {data.official_title}. {torrent_message.msg_en if file and torrent_message else ''}",
|
||||
msg_zh=f"删除 {data.official_title} 规则。{torrent_message.msg_zh if file and torrent_message else ''}",
|
||||
)
|
||||
else:
|
||||
return ResponseModel(
|
||||
@@ -60,15 +63,14 @@ class TorrentManager(Database):
|
||||
msg_zh=f"无法找到 id {_id}",
|
||||
)
|
||||
|
||||
def disable_rule(self, _id: str | int, file: bool = False):
|
||||
async def disable_rule(self, _id: str | int, file: bool = False):
|
||||
data = self.bangumi.search_id(int(_id))
|
||||
if isinstance(data, Bangumi):
|
||||
with DownloadClient() as client:
|
||||
# client.remove_rule(data.rule_name)
|
||||
async with DownloadClient() as client:
|
||||
data.deleted = True
|
||||
self.bangumi.update(data)
|
||||
if file:
|
||||
torrent_message = self.delete_torrents(data, client)
|
||||
torrent_message = await self.delete_torrents(data, client)
|
||||
return torrent_message
|
||||
logger.info(f"[Manager] Disable rule for {data.official_title}")
|
||||
return ResponseModel(
|
||||
@@ -105,15 +107,15 @@ class TorrentManager(Database):
|
||||
msg_zh=f"无法找到 id {_id}",
|
||||
)
|
||||
|
||||
def update_rule(self, bangumi_id, data: BangumiUpdate):
|
||||
async def update_rule(self, bangumi_id, data: BangumiUpdate):
|
||||
old_data: Bangumi = self.bangumi.search_id(bangumi_id)
|
||||
if old_data:
|
||||
# Move torrent
|
||||
match_list = self.__match_torrents_list(old_data)
|
||||
with DownloadClient() as client:
|
||||
match_list = await self.__match_torrents_list(old_data)
|
||||
async with DownloadClient() as client:
|
||||
path = client._gen_save_path(data)
|
||||
if match_list:
|
||||
client.move_torrent(match_list, path)
|
||||
await client.move_torrent(match_list, path)
|
||||
data.save_path = path
|
||||
self.bangumi.update(data, bangumi_id)
|
||||
return ResponseModel(
|
||||
@@ -131,11 +133,11 @@ class TorrentManager(Database):
|
||||
msg_zh=f"无法找到 id {bangumi_id} 的数据",
|
||||
)
|
||||
|
||||
def refresh_poster(self):
|
||||
async def refresh_poster(self):
|
||||
bangumis = self.bangumi.search_all()
|
||||
for bangumi in bangumis:
|
||||
if not bangumi.poster_link:
|
||||
TitleParser().tmdb_poster_parser(bangumi)
|
||||
await TitleParser().tmdb_poster_parser(bangumi)
|
||||
self.bangumi.update_all(bangumis)
|
||||
return ResponseModel(
|
||||
status_code=200,
|
||||
@@ -144,9 +146,9 @@ class TorrentManager(Database):
|
||||
msg_zh="刷新海报链接成功。",
|
||||
)
|
||||
|
||||
def refind_poster(self, bangumi_id: int):
|
||||
async def refind_poster(self, bangumi_id: int):
|
||||
bangumi = self.bangumi.search_id(bangumi_id)
|
||||
TitleParser().tmdb_poster_parser(bangumi)
|
||||
await TitleParser().tmdb_poster_parser(bangumi)
|
||||
self.bangumi.update(bangumi)
|
||||
return ResponseModel(
|
||||
status_code=200,
|
||||
@@ -155,9 +157,9 @@ class TorrentManager(Database):
|
||||
msg_zh="刷新海报链接成功。",
|
||||
)
|
||||
|
||||
def refresh_calendar(self):
|
||||
async def refresh_calendar(self):
|
||||
"""Fetch Bangumi.tv calendar and update air_weekday for all bangumi."""
|
||||
calendar_items = fetch_bgm_calendar()
|
||||
calendar_items = await fetch_bgm_calendar()
|
||||
if not calendar_items:
|
||||
return ResponseModel(
|
||||
status_code=500,
|
||||
@@ -204,8 +206,3 @@ class TorrentManager(Database):
|
||||
)
|
||||
else:
|
||||
return data
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with TorrentManager() as manager:
|
||||
manager.refresh_poster()
|
||||
|
||||
@@ -12,14 +12,14 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequestContent(RequestURL):
|
||||
def get_torrents(
|
||||
async def get_torrents(
|
||||
self,
|
||||
_url: str,
|
||||
_filter: str = None,
|
||||
limit: int = None,
|
||||
retry: int = 3,
|
||||
) -> list[Torrent]:
|
||||
soup = self.get_xml(_url, retry)
|
||||
soup = await self.get_xml(_url, retry)
|
||||
if soup:
|
||||
torrent_titles, torrent_urls, torrent_homepage = rss_parser(soup)
|
||||
torrents: list[Torrent] = []
|
||||
@@ -40,38 +40,40 @@ class RequestContent(RequestURL):
|
||||
logger.warning(f"[Network] Failed to get torrents: {_url}")
|
||||
return []
|
||||
|
||||
def get_xml(self, _url, retry: int = 3) -> xml.etree.ElementTree.Element:
|
||||
req = self.get_url(_url, retry)
|
||||
async def get_xml(self, _url, retry: int = 3) -> xml.etree.ElementTree.Element:
|
||||
req = await self.get_url(_url, retry)
|
||||
if req:
|
||||
return xml.etree.ElementTree.fromstring(req.text)
|
||||
|
||||
# API JSON
|
||||
def get_json(self, _url) -> dict:
|
||||
req = self.get_url(_url)
|
||||
async def get_json(self, _url) -> dict:
|
||||
req = await self.get_url(_url)
|
||||
if req:
|
||||
return req.json()
|
||||
|
||||
def post_json(self, _url, data: dict) -> dict:
|
||||
return self.post_url(_url, data).json()
|
||||
async def post_json(self, _url, data: dict) -> dict:
|
||||
resp = await self.post_url(_url, data)
|
||||
return resp.json()
|
||||
|
||||
def post_data(self, _url, data: dict) -> dict:
|
||||
return self.post_url(_url, data)
|
||||
async def post_data(self, _url, data: dict):
|
||||
return await self.post_url(_url, data)
|
||||
|
||||
def post_files(self, _url, data: dict, files: dict) -> dict:
|
||||
return self.post_form(_url, data, files)
|
||||
async def post_files(self, _url, data: dict, files: dict):
|
||||
return await self.post_form(_url, data, files)
|
||||
|
||||
def get_html(self, _url):
|
||||
return self.get_url(_url).text
|
||||
async def get_html(self, _url):
|
||||
resp = await self.get_url(_url)
|
||||
return resp.text
|
||||
|
||||
def get_content(self, _url):
|
||||
req = self.get_url(_url)
|
||||
async def get_content(self, _url):
|
||||
req = await self.get_url(_url)
|
||||
if req:
|
||||
return req.content
|
||||
|
||||
def check_connection(self, _url):
|
||||
return self.check_url(_url)
|
||||
async def check_connection(self, _url):
|
||||
return await self.check_url(_url)
|
||||
|
||||
def get_rss_title(self, _url):
|
||||
soup = self.get_xml(_url)
|
||||
async def get_rss_title(self, _url):
|
||||
soup = await self.get_xml(_url)
|
||||
if soup:
|
||||
return soup.find("./channel/title").text
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
import time
|
||||
|
||||
import requests
|
||||
import socks
|
||||
import httpx
|
||||
from httpx_socks import AsyncProxyTransport
|
||||
|
||||
from module.conf import settings
|
||||
|
||||
@@ -13,47 +12,50 @@ logger = logging.getLogger(__name__)
|
||||
class RequestURL:
|
||||
def __init__(self):
|
||||
self.header = {"user-agent": "Mozilla/5.0", "Accept": "application/xml"}
|
||||
self._socks5_proxy = False
|
||||
self._client: httpx.AsyncClient | None = None
|
||||
|
||||
def get_url(self, url, retry=3):
|
||||
async def get_url(self, url, retry=3):
|
||||
try_time = 0
|
||||
while True:
|
||||
try:
|
||||
req = self.session.get(url=url, headers=self.header, timeout=5)
|
||||
req = await self._client.get(url=url, headers=self.header)
|
||||
logger.debug(f"[Network] Successfully connected to {url}. Status: {req.status_code}")
|
||||
req.raise_for_status()
|
||||
return req
|
||||
except requests.RequestException:
|
||||
except httpx.HTTPStatusError:
|
||||
logger.debug(f"[Network] HTTP error from {url}.")
|
||||
break
|
||||
except httpx.RequestError:
|
||||
logger.debug(
|
||||
f"[Network] Cannot connect to {url}. Wait for 5 seconds."
|
||||
)
|
||||
try_time += 1
|
||||
if try_time >= retry:
|
||||
break
|
||||
time.sleep(5)
|
||||
await asyncio.sleep(5)
|
||||
except Exception as e:
|
||||
logger.debug(e)
|
||||
break
|
||||
logger.error(f"[Network] Unable to connect to {url}, Please check your network settings")
|
||||
return None
|
||||
|
||||
def post_url(self, url: str, data: dict, retry=3):
|
||||
async def post_url(self, url: str, data: dict, retry=3):
|
||||
try_time = 0
|
||||
while True:
|
||||
try:
|
||||
req = self.session.post(
|
||||
url=url, headers=self.header, data=data, timeout=5
|
||||
req = await self._client.post(
|
||||
url=url, headers=self.header, data=data
|
||||
)
|
||||
req.raise_for_status()
|
||||
return req
|
||||
except requests.RequestException:
|
||||
except httpx.RequestError:
|
||||
logger.warning(
|
||||
f"[Network] Cannot connect to {url}. Wait for 5 seconds."
|
||||
)
|
||||
try_time += 1
|
||||
if try_time >= retry:
|
||||
break
|
||||
time.sleep(5)
|
||||
await asyncio.sleep(5)
|
||||
except Exception as e:
|
||||
logger.debug(e)
|
||||
break
|
||||
@@ -61,64 +63,54 @@ class RequestURL:
|
||||
logger.warning("[Network] Please check DNS/Connection settings")
|
||||
return None
|
||||
|
||||
def check_url(self, url: str):
|
||||
async def check_url(self, url: str):
|
||||
if "://" not in url:
|
||||
url = f"http://{url}"
|
||||
try:
|
||||
req = requests.head(url=url, headers=self.header, timeout=5)
|
||||
req = await self._client.head(url=url, headers=self.header)
|
||||
req.raise_for_status()
|
||||
return True
|
||||
except requests.RequestException:
|
||||
except (httpx.RequestError, httpx.HTTPStatusError):
|
||||
logger.debug(f"[Network] Cannot connect to {url}.")
|
||||
return False
|
||||
|
||||
def post_form(self, url: str, data: dict, files):
|
||||
async def post_form(self, url: str, data: dict, files):
|
||||
try:
|
||||
req = self.session.post(
|
||||
url=url, headers=self.header, data=data, files=files, timeout=5
|
||||
req = await self._client.post(
|
||||
url=url, headers=self.header, data=data, files=files
|
||||
)
|
||||
req.raise_for_status()
|
||||
return req
|
||||
except requests.RequestException:
|
||||
except (httpx.RequestError, httpx.HTTPStatusError):
|
||||
logger.warning(f"[Network] Cannot connect to {url}.")
|
||||
return None
|
||||
|
||||
def __enter__(self):
|
||||
self.session = requests.Session()
|
||||
async def __aenter__(self):
|
||||
timeout = httpx.Timeout(connect=5.0, read=10.0, write=10.0, pool=10.0)
|
||||
if settings.proxy.enable:
|
||||
if "http" in settings.proxy.type:
|
||||
if settings.proxy.username:
|
||||
username=settings.proxy.username
|
||||
password=settings.proxy.password
|
||||
url = f"http://{username}:{password}@{settings.proxy.host}:{settings.proxy.port}"
|
||||
self.session.proxies = {
|
||||
"http": url,
|
||||
"https": url,
|
||||
}
|
||||
username = settings.proxy.username
|
||||
password = settings.proxy.password
|
||||
proxy_url = f"http://{username}:{password}@{settings.proxy.host}:{settings.proxy.port}"
|
||||
else:
|
||||
url = f"http://{settings.proxy.host}:{settings.proxy.port}"
|
||||
self.session.proxies = {
|
||||
"http": url,
|
||||
"https": url,
|
||||
}
|
||||
proxy_url = f"http://{settings.proxy.host}:{settings.proxy.port}"
|
||||
self._client = httpx.AsyncClient(proxy=proxy_url, timeout=timeout)
|
||||
elif settings.proxy.type == "socks5":
|
||||
self._socks5_proxy = True
|
||||
socks.set_default_proxy(
|
||||
socks.SOCKS5,
|
||||
addr=settings.proxy.host,
|
||||
port=settings.proxy.port,
|
||||
rdns=True,
|
||||
username=settings.proxy.username,
|
||||
password=settings.proxy.password,
|
||||
)
|
||||
socket.socket = socks.socksocket
|
||||
if settings.proxy.username:
|
||||
socks_url = f"socks5://{settings.proxy.username}:{settings.proxy.password}@{settings.proxy.host}:{settings.proxy.port}"
|
||||
else:
|
||||
socks_url = f"socks5://{settings.proxy.host}:{settings.proxy.port}"
|
||||
transport = AsyncProxyTransport.from_url(socks_url, rdns=True)
|
||||
self._client = httpx.AsyncClient(transport=transport, timeout=timeout)
|
||||
else:
|
||||
logger.error(f"[Network] Unsupported proxy type: {settings.proxy.type}")
|
||||
self._client = httpx.AsyncClient(timeout=timeout)
|
||||
else:
|
||||
self._client = httpx.AsyncClient(timeout=timeout)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._socks5_proxy:
|
||||
socks.set_default_proxy()
|
||||
socket.socket = socks.socksocket
|
||||
self._socks5_proxy = False
|
||||
self.session.close()
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._client:
|
||||
await self._client.aclose()
|
||||
self._client = None
|
||||
|
||||
@@ -40,18 +40,18 @@ class PostNotification:
|
||||
poster_path = db.bangumi.match_poster(notify.official_title)
|
||||
notify.poster_path = poster_path
|
||||
|
||||
def send_msg(self, notify: Notification) -> bool:
|
||||
async def send_msg(self, notify: Notification) -> bool:
|
||||
self._get_poster(notify)
|
||||
try:
|
||||
self.notifier.post_msg(notify)
|
||||
await self.notifier.post_msg(notify)
|
||||
logger.debug(f"Send notification: {notify.official_title}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to send notification: {e}")
|
||||
return False
|
||||
|
||||
def __enter__(self):
|
||||
self.notifier.__enter__()
|
||||
async def __aenter__(self):
|
||||
await self.notifier.__aenter__()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.notifier.__exit__(exc_type, exc_val, exc_tb)
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
await self.notifier.__aexit__(exc_type, exc_val, exc_tb)
|
||||
|
||||
@@ -19,9 +19,9 @@ class BarkNotification(RequestContent):
|
||||
"""
|
||||
return text.strip()
|
||||
|
||||
def post_msg(self, notify: Notification) -> bool:
|
||||
async def post_msg(self, notify: Notification) -> bool:
|
||||
text = self.gen_message(notify)
|
||||
data = {"title": notify.official_title, "body": text, "icon": notify.poster_path, "device_key": self.token}
|
||||
resp = self.post_data(self.notification_url, data)
|
||||
resp = await self.post_data(self.notification_url, data)
|
||||
logger.debug(f"Bark notification: {resp.status_code}")
|
||||
return resp.status_code == 200
|
||||
|
||||
@@ -20,12 +20,12 @@ class ServerChanNotification(RequestContent):
|
||||
"""
|
||||
return text.strip()
|
||||
|
||||
def post_msg(self, notify: Notification) -> bool:
|
||||
async def post_msg(self, notify: Notification) -> bool:
|
||||
text = self.gen_message(notify)
|
||||
data = {
|
||||
"title": notify.official_title,
|
||||
"desp": text,
|
||||
}
|
||||
resp = self.post_data(self.notification_url, data)
|
||||
resp = await self.post_data(self.notification_url, data)
|
||||
logger.debug(f"ServerChan notification: {resp.status_code}")
|
||||
return resp.status_code == 200
|
||||
|
||||
@@ -19,9 +19,9 @@ class SlackNotification(RequestContent):
|
||||
"""
|
||||
return text.strip()
|
||||
|
||||
def post_msg(self, notify: Notification) -> bool:
|
||||
async def post_msg(self, notify: Notification) -> bool:
|
||||
text = self.gen_message(notify)
|
||||
data = {"title": notify.official_title, "body": text, "device_key": self.token}
|
||||
resp = self.post_data(self.notification_url, data)
|
||||
resp = await self.post_data(self.notification_url, data)
|
||||
logger.debug(f"Bark notification: {resp.status_code}")
|
||||
return resp.status_code == 200
|
||||
|
||||
@@ -21,7 +21,7 @@ class TelegramNotification(RequestContent):
|
||||
"""
|
||||
return text.strip()
|
||||
|
||||
def post_msg(self, notify: Notification) -> bool:
|
||||
async def post_msg(self, notify: Notification) -> bool:
|
||||
text = self.gen_message(notify)
|
||||
data = {
|
||||
"chat_id": self.chat_id,
|
||||
@@ -31,8 +31,8 @@ class TelegramNotification(RequestContent):
|
||||
}
|
||||
photo = load_image(notify.poster_path)
|
||||
if photo:
|
||||
resp = self.post_files(self.photo_url, data, files={"photo": photo})
|
||||
resp = await self.post_files(self.photo_url, data, files={"photo": photo})
|
||||
else:
|
||||
resp = self.post_data(self.message_url, data)
|
||||
resp = await self.post_data(self.message_url, data)
|
||||
logger.debug(f"Telegram notification: {resp.status_code}")
|
||||
return resp.status_code == 200
|
||||
|
||||
@@ -22,7 +22,7 @@ class WecomNotification(RequestContent):
|
||||
"""
|
||||
return text.strip()
|
||||
|
||||
def post_msg(self, notify: Notification) -> bool:
|
||||
async def post_msg(self, notify: Notification) -> bool:
|
||||
##Change message format to match Wecom push better
|
||||
title = "【番剧更新】" + notify.official_title
|
||||
msg = self.gen_message(notify)
|
||||
@@ -37,6 +37,6 @@ class WecomNotification(RequestContent):
|
||||
"msg": msg,
|
||||
"picurl": picurl,
|
||||
}
|
||||
resp = self.post_data(self.notification_url, data)
|
||||
resp = await self.post_data(self.notification_url, data)
|
||||
logger.debug(f"Wecom notification: {resp.status_code}")
|
||||
return resp.status_code == 200
|
||||
|
||||
@@ -7,13 +7,13 @@ logger = logging.getLogger(__name__)
|
||||
BGM_CALENDAR_URL = "https://api.bgm.tv/calendar"
|
||||
|
||||
|
||||
def fetch_bgm_calendar() -> list[dict]:
|
||||
async def fetch_bgm_calendar() -> list[dict]:
|
||||
"""Fetch the current season's broadcast calendar from Bangumi.tv API.
|
||||
|
||||
Returns a flat list of anime items with their air_weekday (0=Mon, ..., 6=Sun).
|
||||
"""
|
||||
with RequestContent() as req:
|
||||
data = req.get_json(BGM_CALENDAR_URL)
|
||||
async with RequestContent() as req:
|
||||
data = await req.get_json(BGM_CALENDAR_URL)
|
||||
|
||||
if not data:
|
||||
logger.warning("[BGM Calendar] Failed to fetch calendar data.")
|
||||
|
||||
@@ -5,10 +5,10 @@ def search_url(e):
|
||||
return f"https://api.bgm.tv/search/subject/{e}?responseGroup=large"
|
||||
|
||||
|
||||
def bgm_parser(title):
|
||||
async def bgm_parser(title):
|
||||
url = search_url(title)
|
||||
with RequestContent() as req:
|
||||
contents = req.get_json(url)
|
||||
async with RequestContent() as req:
|
||||
contents = await req.get_json(url)
|
||||
if contents:
|
||||
return contents[0]
|
||||
else:
|
||||
|
||||
@@ -7,10 +7,10 @@ from module.network import RequestContent
|
||||
from module.utils import save_image
|
||||
|
||||
|
||||
def mikan_parser(homepage: str):
|
||||
async def mikan_parser(homepage: str):
|
||||
root_path = parse_url(homepage).host
|
||||
with RequestContent() as req:
|
||||
content = req.get_html(homepage)
|
||||
async with RequestContent() as req:
|
||||
content = await req.get_html(homepage)
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
poster_div = soup.find("div", {"class": "bangumi-poster"}).get("style")
|
||||
official_title = soup.select_one(
|
||||
@@ -20,7 +20,7 @@ def mikan_parser(homepage: str):
|
||||
if poster_div:
|
||||
poster_path = poster_div.split("url('")[1].split("')")[0]
|
||||
poster_path = poster_path.split("?")[0]
|
||||
img = req.get_content(f"https://{root_path}{poster_path}")
|
||||
img = await req.get_content(f"https://{root_path}{poster_path}")
|
||||
suffix = poster_path.split(".")[-1]
|
||||
poster_link = save_image(img, suffix)
|
||||
return poster_link, official_title
|
||||
@@ -28,5 +28,6 @@ def mikan_parser(homepage: str):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import asyncio
|
||||
homepage = "https://mikanani.me/Home/Episode/c89b3c6f0c1c0567a618f5288b853823c87a9862"
|
||||
print(mikan_parser(homepage))
|
||||
print(asyncio.run(mikan_parser(homepage)))
|
||||
|
||||
@@ -31,11 +31,11 @@ def info_url(e, key):
|
||||
return f"{TMDB_URL}/3/tv/{e}?api_key={TMDB_API}&language={LANGUAGE[key]}"
|
||||
|
||||
|
||||
def is_animation(tv_id, language) -> bool:
|
||||
async def is_animation(tv_id, language, req: RequestContent) -> bool:
|
||||
url_info = info_url(tv_id, language)
|
||||
with RequestContent() as req:
|
||||
type_id = req.get_json(url_info)["genres"]
|
||||
for type in type_id:
|
||||
type_id = await req.get_json(url_info)
|
||||
if type_id:
|
||||
for type in type_id.get("genres", []):
|
||||
if type.get("id") == 16:
|
||||
return True
|
||||
return False
|
||||
@@ -56,21 +56,27 @@ def get_season(seasons: list) -> tuple[int, str]:
|
||||
return len(ss), ss[-1].get("poster_path")
|
||||
|
||||
|
||||
def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
|
||||
with RequestContent() as req:
|
||||
async def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
|
||||
async with RequestContent() as req:
|
||||
url = search_url(title)
|
||||
contents = req.get_json(url).get("results")
|
||||
contents = await req.get_json(url)
|
||||
if not contents:
|
||||
return None
|
||||
contents = contents.get("results")
|
||||
if contents.__len__() == 0:
|
||||
url = search_url(title.replace(" ", ""))
|
||||
contents = req.get_json(url).get("results")
|
||||
contents_resp = await req.get_json(url)
|
||||
if not contents_resp:
|
||||
return None
|
||||
contents = contents_resp.get("results")
|
||||
# 判断动画
|
||||
if contents:
|
||||
for content in contents:
|
||||
id = content["id"]
|
||||
if is_animation(id, language):
|
||||
if await is_animation(id, language, req):
|
||||
break
|
||||
url_info = info_url(id, language)
|
||||
info_content = req.get_json(url_info)
|
||||
info_content = await req.get_json(url_info)
|
||||
season = [
|
||||
{
|
||||
"season": s.get("name"),
|
||||
@@ -87,7 +93,7 @@ def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
|
||||
year_number = info_content.get("first_air_date").split("-")[0]
|
||||
if poster_path:
|
||||
if not test:
|
||||
img = req.get_content(f"https://image.tmdb.org/t/p/w780{poster_path}")
|
||||
img = await req.get_content(f"https://image.tmdb.org/t/p/w780{poster_path}")
|
||||
poster_link = save_image(img, "jpg")
|
||||
else:
|
||||
poster_link = "https://image.tmdb.org/t/p/w780" + poster_path
|
||||
@@ -107,4 +113,5 @@ def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(tmdb_parser("魔法禁书目录", "zh"))
|
||||
import asyncio
|
||||
print(asyncio.run(tmdb_parser("魔法禁书目录", "zh")))
|
||||
|
||||
@@ -31,8 +31,8 @@ class TitleParser:
|
||||
logger.warning(f"Cannot parse {torrent_path} with error {e}")
|
||||
|
||||
@staticmethod
|
||||
def tmdb_parser(title: str, season: int, language: str):
|
||||
tmdb_info = tmdb_parser(title, language)
|
||||
async def tmdb_parser(title: str, season: int, language: str):
|
||||
tmdb_info = await tmdb_parser(title, language)
|
||||
if tmdb_info:
|
||||
logger.debug(f"TMDB Matched, official title is {tmdb_info.title}")
|
||||
tmdb_season = tmdb_info.last_season if tmdb_info.last_season else season
|
||||
@@ -43,8 +43,8 @@ class TitleParser:
|
||||
return title, season, None, None
|
||||
|
||||
@staticmethod
|
||||
def tmdb_poster_parser(bangumi: Bangumi):
|
||||
tmdb_info = tmdb_parser(bangumi.official_title, settings.rss_parser.language)
|
||||
async def tmdb_poster_parser(bangumi: Bangumi):
|
||||
tmdb_info = await tmdb_parser(bangumi.official_title, settings.rss_parser.language)
|
||||
if tmdb_info:
|
||||
logger.debug(f"TMDB Matched, official title is {tmdb_info.title}")
|
||||
bangumi.poster_link = tmdb_info.poster_link
|
||||
@@ -104,5 +104,5 @@ class TitleParser:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def mikan_parser(homepage: str) -> tuple[str, str]:
|
||||
return mikan_parser(homepage)
|
||||
async def mikan_parser(homepage: str) -> tuple[str, str]:
|
||||
return await mikan_parser(homepage)
|
||||
|
||||
@@ -12,17 +12,17 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RSSAnalyser(TitleParser):
|
||||
def official_title_parser(self, bangumi: Bangumi, rss: RSSItem, torrent: Torrent):
|
||||
async def official_title_parser(self, bangumi: Bangumi, rss: RSSItem, torrent: Torrent):
|
||||
if rss.parser == "mikan":
|
||||
try:
|
||||
bangumi.poster_link, bangumi.official_title = self.mikan_parser(
|
||||
bangumi.poster_link, bangumi.official_title = await self.mikan_parser(
|
||||
torrent.homepage
|
||||
)
|
||||
except AttributeError:
|
||||
logger.warning("[Parser] Mikan torrent has no homepage info.")
|
||||
pass
|
||||
elif rss.parser == "tmdb":
|
||||
tmdb_title, season, year, poster_link = self.tmdb_parser(
|
||||
tmdb_title, season, year, poster_link = await self.tmdb_parser(
|
||||
bangumi.official_title, bangumi.season, settings.rss_parser.language
|
||||
)
|
||||
bangumi.official_title = tmdb_title
|
||||
@@ -34,45 +34,45 @@ class RSSAnalyser(TitleParser):
|
||||
bangumi.official_title = re.sub(r"[/:.\\]", " ", bangumi.official_title)
|
||||
|
||||
@staticmethod
|
||||
def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list[Torrent]:
|
||||
with RequestContent() as req:
|
||||
async def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list[Torrent]:
|
||||
async with RequestContent() as req:
|
||||
if full_parse:
|
||||
rss_torrents = req.get_torrents(rss_link)
|
||||
rss_torrents = await req.get_torrents(rss_link)
|
||||
else:
|
||||
rss_torrents = req.get_torrents(rss_link, "\\d+-\\d+")
|
||||
rss_torrents = await req.get_torrents(rss_link, "\\d+-\\d+")
|
||||
return rss_torrents
|
||||
|
||||
def torrents_to_data(
|
||||
async def torrents_to_data(
|
||||
self, torrents: list[Torrent], rss: RSSItem, full_parse: bool = True
|
||||
) -> list:
|
||||
new_data = []
|
||||
for torrent in torrents:
|
||||
bangumi = self.raw_parser(raw=torrent.name)
|
||||
if bangumi and bangumi.title_raw not in [i.title_raw for i in new_data]:
|
||||
self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent)
|
||||
await self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent)
|
||||
if not full_parse:
|
||||
return [bangumi]
|
||||
new_data.append(bangumi)
|
||||
logger.info(f"[RSS] New bangumi founded: {bangumi.official_title}")
|
||||
return new_data
|
||||
|
||||
def torrent_to_data(self, torrent: Torrent, rss: RSSItem) -> Bangumi:
|
||||
async def torrent_to_data(self, torrent: Torrent, rss: RSSItem) -> Bangumi:
|
||||
bangumi = self.raw_parser(raw=torrent.name)
|
||||
if bangumi:
|
||||
self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent)
|
||||
await self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent)
|
||||
bangumi.rss_link = rss.url
|
||||
return bangumi
|
||||
|
||||
def rss_to_data(
|
||||
async def rss_to_data(
|
||||
self, rss: RSSItem, engine: RSSEngine, full_parse: bool = True
|
||||
) -> list[Bangumi]:
|
||||
rss_torrents = self.get_rss_torrents(rss.url, full_parse)
|
||||
rss_torrents = await self.get_rss_torrents(rss.url, full_parse)
|
||||
torrents_to_add = engine.bangumi.match_list(rss_torrents, rss.url)
|
||||
if not torrents_to_add:
|
||||
logger.debug("[RSS] No new title has been found.")
|
||||
return []
|
||||
# New List
|
||||
new_data = self.torrents_to_data(torrents_to_add, rss, full_parse)
|
||||
new_data = await self.torrents_to_data(torrents_to_add, rss, full_parse)
|
||||
if new_data:
|
||||
# Add to database
|
||||
engine.bangumi.add_all(new_data)
|
||||
@@ -80,8 +80,8 @@ class RSSAnalyser(TitleParser):
|
||||
else:
|
||||
return []
|
||||
|
||||
def link_to_data(self, rss: RSSItem) -> Bangumi | ResponseModel:
|
||||
torrents = self.get_rss_torrents(rss.url, False)
|
||||
async def link_to_data(self, rss: RSSItem) -> Bangumi | ResponseModel:
|
||||
torrents = await self.get_rss_torrents(rss.url, False)
|
||||
if not torrents:
|
||||
return ResponseModel(
|
||||
status=False,
|
||||
@@ -90,7 +90,7 @@ class RSSAnalyser(TitleParser):
|
||||
msg_zh="无法找到种子。",
|
||||
)
|
||||
for torrent in torrents:
|
||||
data = self.torrent_to_data(torrent, rss)
|
||||
data = await self.torrent_to_data(torrent, rss)
|
||||
if data:
|
||||
return data
|
||||
return ResponseModel(
|
||||
@@ -99,4 +99,3 @@ class RSSAnalyser(TitleParser):
|
||||
msg_en="Cannot parse this link.",
|
||||
msg_zh="无法解析此链接。",
|
||||
)
|
||||
|
||||
|
||||
@@ -16,9 +16,9 @@ class RSSEngine(Database):
|
||||
self._to_refresh = False
|
||||
|
||||
@staticmethod
|
||||
def _get_torrents(rss: RSSItem) -> list[Torrent]:
|
||||
with RequestContent() as req:
|
||||
torrents = req.get_torrents(rss.url)
|
||||
async def _get_torrents(rss: RSSItem) -> list[Torrent]:
|
||||
async with RequestContent() as req:
|
||||
torrents = await req.get_torrents(rss.url)
|
||||
# Add RSS ID
|
||||
for torrent in torrents:
|
||||
torrent.rss_id = rss.id
|
||||
@@ -31,7 +31,7 @@ class RSSEngine(Database):
|
||||
else:
|
||||
return []
|
||||
|
||||
def add_rss(
|
||||
async def add_rss(
|
||||
self,
|
||||
rss_link: str,
|
||||
name: str | None = None,
|
||||
@@ -39,8 +39,8 @@ class RSSEngine(Database):
|
||||
parser: str = "mikan",
|
||||
):
|
||||
if not name:
|
||||
with RequestContent() as req:
|
||||
name = req.get_rss_title(rss_link)
|
||||
async with RequestContent() as req:
|
||||
name = await req.get_rss_title(rss_link)
|
||||
if not name:
|
||||
return ResponseModel(
|
||||
status=False,
|
||||
@@ -94,8 +94,8 @@ class RSSEngine(Database):
|
||||
msg_zh="删除 RSS 成功。",
|
||||
)
|
||||
|
||||
def pull_rss(self, rss_item: RSSItem) -> list[Torrent]:
|
||||
torrents = self._get_torrents(rss_item)
|
||||
async def pull_rss(self, rss_item: RSSItem) -> list[Torrent]:
|
||||
torrents = await self._get_torrents(rss_item)
|
||||
new_torrents = self.torrent.check_new(torrents)
|
||||
return new_torrents
|
||||
|
||||
@@ -110,7 +110,7 @@ class RSSEngine(Database):
|
||||
return matched
|
||||
return None
|
||||
|
||||
def refresh_rss(self, client: DownloadClient, rss_id: Optional[int] = None):
|
||||
async def refresh_rss(self, client: DownloadClient, rss_id: Optional[int] = None):
|
||||
# Get All RSS Items
|
||||
if not rss_id:
|
||||
rss_items: list[RSSItem] = self.rss.search_active()
|
||||
@@ -120,25 +120,25 @@ class RSSEngine(Database):
|
||||
# From RSS Items, get all torrents
|
||||
logger.debug(f"[Engine] Get {len(rss_items)} RSS items")
|
||||
for rss_item in rss_items:
|
||||
new_torrents = self.pull_rss(rss_item)
|
||||
new_torrents = await self.pull_rss(rss_item)
|
||||
# Get all enabled bangumi data
|
||||
for torrent in new_torrents:
|
||||
matched_data = self.match_torrent(torrent)
|
||||
if matched_data:
|
||||
if client.add_torrent(torrent, matched_data):
|
||||
if await client.add_torrent(torrent, matched_data):
|
||||
logger.debug(f"[Engine] Add torrent {torrent.name} to client")
|
||||
torrent.downloaded = True
|
||||
# Add all torrents to database
|
||||
self.torrent.add_all(new_torrents)
|
||||
|
||||
def download_bangumi(self, bangumi: Bangumi):
|
||||
with RequestContent() as req:
|
||||
torrents = req.get_torrents(
|
||||
async def download_bangumi(self, bangumi: Bangumi):
|
||||
async with RequestContent() as req:
|
||||
torrents = await req.get_torrents(
|
||||
bangumi.rss_link, bangumi.filter.replace(",", "|")
|
||||
)
|
||||
if torrents:
|
||||
with DownloadClient() as client:
|
||||
client.add_torrent(torrents, bangumi)
|
||||
async with DownloadClient() as client:
|
||||
await client.add_torrent(torrents, bangumi)
|
||||
self.torrent.add_all(torrents)
|
||||
return ResponseModel(
|
||||
status=True,
|
||||
|
||||
@@ -20,22 +20,20 @@ BangumiJSON: TypeAlias = str
|
||||
|
||||
|
||||
class SearchTorrent(RequestContent, RSSAnalyser):
|
||||
def search_torrents(self, rss_item: RSSItem) -> list[Torrent]:
|
||||
return self.get_torrents(rss_item.url)
|
||||
# torrents = self.get_torrents(rss_item.url)
|
||||
# return torrents
|
||||
async def search_torrents(self, rss_item: RSSItem) -> list[Torrent]:
|
||||
return await self.get_torrents(rss_item.url)
|
||||
|
||||
def analyse_keyword(
|
||||
async def analyse_keyword(
|
||||
self, keywords: list[str], site: str = "mikan", limit: int = 5
|
||||
) -> BangumiJSON:
|
||||
):
|
||||
rss_item = search_url(site, keywords)
|
||||
torrents = self.search_torrents(rss_item)
|
||||
torrents = await self.search_torrents(rss_item)
|
||||
# yield for EventSourceResponse (Server Send)
|
||||
exist_list = []
|
||||
for torrent in torrents:
|
||||
if len(exist_list) >= limit:
|
||||
break
|
||||
bangumi = self.torrent_to_data(torrent=torrent, rss=rss_item)
|
||||
bangumi = await self.torrent_to_data(torrent=torrent, rss=rss_item)
|
||||
if bangumi:
|
||||
special_link = self.special_url(bangumi, site).url
|
||||
if special_link not in exist_list:
|
||||
@@ -49,7 +47,7 @@ class SearchTorrent(RequestContent, RSSAnalyser):
|
||||
url = search_url(site, keywords)
|
||||
return url
|
||||
|
||||
def search_season(self, data: Bangumi, site: str = "mikan") -> list[Torrent]:
|
||||
async def search_season(self, data: Bangumi, site: str = "mikan") -> list[Torrent]:
|
||||
rss_item = self.special_url(data, site)
|
||||
torrents = self.search_torrents(rss_item)
|
||||
torrents = await self.search_torrents(rss_item)
|
||||
return [torrent for torrent in torrents if data.title_raw in torrent.name]
|
||||
|
||||
@@ -2,12 +2,12 @@ import re
|
||||
|
||||
from urllib3.util import parse_url
|
||||
|
||||
from module.network import RequestContent
|
||||
from module.rss import RSSEngine
|
||||
from module.utils import save_image
|
||||
from module.network import RequestContent
|
||||
|
||||
|
||||
def from_30_to_31():
|
||||
async def from_30_to_31():
|
||||
with RSSEngine() as db:
|
||||
db.migrate()
|
||||
# Update poster link
|
||||
@@ -29,18 +29,18 @@ def from_30_to_31():
|
||||
aggregate = True
|
||||
else:
|
||||
aggregate = False
|
||||
db.add_rss(rss_link=rss, aggregate=aggregate)
|
||||
await db.add_rss(rss_link=rss, aggregate=aggregate)
|
||||
|
||||
|
||||
def cache_image():
|
||||
with RSSEngine() as db, RequestContent() as req:
|
||||
async def cache_image():
|
||||
with RSSEngine() as db:
|
||||
bangumis = db.bangumi.search_all()
|
||||
for bangumi in bangumis:
|
||||
if bangumi.poster_link:
|
||||
# Hash local path
|
||||
img = req.get_content(bangumi.poster_link)
|
||||
suffix = bangumi.poster_link.split(".")[-1]
|
||||
img_path = save_image(img, suffix)
|
||||
bangumi.poster_link = img_path
|
||||
async with RequestContent() as req:
|
||||
for bangumi in bangumis:
|
||||
if bangumi.poster_link:
|
||||
# Hash local path
|
||||
img = await req.get_content(bangumi.poster_link)
|
||||
suffix = bangumi.poster_link.split(".")[-1]
|
||||
img_path = save_image(img, suffix)
|
||||
bangumi.poster_link = img_path
|
||||
db.bangumi.update_all(bangumis)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
|
||||
import requests
|
||||
import httpx
|
||||
|
||||
|
||||
def load(filename):
|
||||
@@ -11,9 +11,9 @@ def load(filename):
|
||||
def save(filename, obj):
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
json.dump(obj, f, indent=4, separators=(",", ": "), ensure_ascii=False)
|
||||
pass
|
||||
|
||||
|
||||
def get(url):
|
||||
req = requests.get(url)
|
||||
return req.json()
|
||||
async def get(url):
|
||||
async with httpx.AsyncClient() as client:
|
||||
req = await client.get(url)
|
||||
return req.json()
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from module.parser.analyser.tmdb_parser import tmdb_parser
|
||||
|
||||
|
||||
def test_tmdb_parser():
|
||||
async def test_tmdb_parser():
|
||||
bangumi_title = "海盗战记"
|
||||
bangumi_year = "2019"
|
||||
bangumi_season = 2
|
||||
|
||||
tmdb_info = tmdb_parser(bangumi_title, "zh", test=True)
|
||||
tmdb_info = await tmdb_parser(bangumi_title, "zh", test=True)
|
||||
|
||||
assert tmdb_info.title == "冰海战记"
|
||||
assert tmdb_info.year == bangumi_year
|
||||
|
||||
1811
backend/uv.lock
generated
1811
backend/uv.lock
generated
File diff suppressed because it is too large
Load Diff
35
webui/src/api/downloader.ts
Normal file
35
webui/src/api/downloader.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import type { QbTorrentInfo } from '#/downloader';
|
||||
import type { ApiSuccess } from '#/api';
|
||||
|
||||
export const apiDownloader = {
|
||||
async getTorrents() {
|
||||
const { data } = await axios.get<QbTorrentInfo[]>(
|
||||
'api/v1/downloader/torrents'
|
||||
);
|
||||
return data!;
|
||||
},
|
||||
|
||||
async pause(hashes: string[]) {
|
||||
const { data } = await axios.post<ApiSuccess>(
|
||||
'api/v1/downloader/torrents/pause',
|
||||
{ hashes }
|
||||
);
|
||||
return data!;
|
||||
},
|
||||
|
||||
async resume(hashes: string[]) {
|
||||
const { data } = await axios.post<ApiSuccess>(
|
||||
'api/v1/downloader/torrents/resume',
|
||||
{ hashes }
|
||||
);
|
||||
return data!;
|
||||
},
|
||||
|
||||
async deleteTorrents(hashes: string[], deleteFiles: boolean = false) {
|
||||
const { data } = await axios.post<ApiSuccess>(
|
||||
'api/v1/downloader/torrents/delete',
|
||||
{ hashes, delete_files: deleteFiles }
|
||||
);
|
||||
return data!;
|
||||
},
|
||||
};
|
||||
@@ -2,7 +2,7 @@
|
||||
import { ErrorPicture, Write } from '@icon-park/vue-next';
|
||||
import type { BangumiRule } from '#/bangumi';
|
||||
|
||||
withDefaults(
|
||||
const props = withDefaults(
|
||||
defineProps<{
|
||||
type?: 'primary' | 'search' | 'mobile';
|
||||
bangumi: BangumiRule;
|
||||
@@ -13,6 +13,8 @@ withDefaults(
|
||||
);
|
||||
|
||||
defineEmits(['click']);
|
||||
|
||||
const posterSrc = computed(() => resolvePosterUrl(props.bangumi.poster_link));
|
||||
</script>
|
||||
|
||||
<template>
|
||||
@@ -28,7 +30,7 @@ defineEmits(['click']);
|
||||
>
|
||||
<div class="card-poster">
|
||||
<template v-if="bangumi.poster_link">
|
||||
<img :src="bangumi.poster_link" :alt="bangumi.official_title" class="card-img" />
|
||||
<img :src="posterSrc" :alt="bangumi.official_title" class="card-img" />
|
||||
</template>
|
||||
<template v-else>
|
||||
<div class="card-placeholder">
|
||||
@@ -37,6 +39,14 @@ defineEmits(['click']);
|
||||
</template>
|
||||
|
||||
<div class="card-overlay">
|
||||
<div class="card-overlay-tags">
|
||||
<ab-tag :title="`Season ${bangumi.season}`" type="primary" />
|
||||
<ab-tag
|
||||
v-if="bangumi.group_name"
|
||||
:title="bangumi.group_name"
|
||||
type="primary"
|
||||
/>
|
||||
</div>
|
||||
<div class="card-edit-btn">
|
||||
<Write size="18" />
|
||||
</div>
|
||||
@@ -45,14 +55,6 @@ defineEmits(['click']);
|
||||
|
||||
<div class="card-info">
|
||||
<div class="card-title">{{ bangumi.official_title }}</div>
|
||||
<div class="card-tags">
|
||||
<ab-tag :title="`Season ${bangumi.season}`" type="primary" />
|
||||
<ab-tag
|
||||
v-if="bangumi.group_name"
|
||||
:title="bangumi.group_name"
|
||||
type="primary"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -62,7 +64,7 @@ defineEmits(['click']);
|
||||
<div class="search-card-content">
|
||||
<div class="search-card-thumb">
|
||||
<template v-if="bangumi.poster_link">
|
||||
<img :src="bangumi.poster_link" :alt="bangumi.official_title" class="search-card-img" />
|
||||
<img :src="posterSrc" :alt="bangumi.official_title" class="search-card-img" />
|
||||
</template>
|
||||
<template v-else>
|
||||
<div class="card-placeholder card-placeholder--small">
|
||||
@@ -136,6 +138,7 @@ defineEmits(['click']);
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
opacity: 0;
|
||||
@@ -152,6 +155,24 @@ defineEmits(['click']);
|
||||
}
|
||||
}
|
||||
|
||||
.card-overlay-tags {
|
||||
position: absolute;
|
||||
bottom: 6px;
|
||||
left: 6px;
|
||||
right: 6px;
|
||||
display: flex;
|
||||
gap: 3px;
|
||||
flex-wrap: wrap;
|
||||
|
||||
:deep(.tag) {
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
border-color: rgba(255, 255, 255, 0.4);
|
||||
color: #fff;
|
||||
font-size: 9px;
|
||||
padding: 1px 6px;
|
||||
}
|
||||
}
|
||||
|
||||
.card-edit-btn {
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
@@ -180,7 +201,6 @@ defineEmits(['click']);
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
margin-bottom: 4px;
|
||||
transition: color var(--transition-normal);
|
||||
}
|
||||
|
||||
|
||||
@@ -80,6 +80,32 @@
|
||||
"step2_desc": "Set your qBittorrent host address, username, and password.",
|
||||
"step3_title": "Access Downloader",
|
||||
"step3_desc": "Once configured, the downloader web UI will be embedded right here."
|
||||
},
|
||||
"empty_torrents": "No torrents in Bangumi category",
|
||||
"selected": "selected",
|
||||
"torrent": {
|
||||
"name": "Name",
|
||||
"progress": "Progress",
|
||||
"status": "Status",
|
||||
"size": "Size",
|
||||
"dlspeed": "DL Speed",
|
||||
"upspeed": "UP Speed",
|
||||
"peers": "Seeds/Peers"
|
||||
},
|
||||
"state": {
|
||||
"downloading": "Downloading",
|
||||
"seeding": "Seeding",
|
||||
"paused": "Paused",
|
||||
"stalled": "Stalled",
|
||||
"queued": "Queued",
|
||||
"checking": "Checking",
|
||||
"error": "Error",
|
||||
"metadata": "Metadata"
|
||||
},
|
||||
"action": {
|
||||
"pause": "Pause",
|
||||
"resume": "Resume",
|
||||
"delete": "Delete"
|
||||
}
|
||||
},
|
||||
"homepage": {
|
||||
|
||||
@@ -80,6 +80,32 @@
|
||||
"step2_desc": "设置 qBittorrent 的地址、用户名和密码。",
|
||||
"step3_title": "访问下载器",
|
||||
"step3_desc": "配置完成后,下载器界面将直接嵌入此处。"
|
||||
},
|
||||
"empty_torrents": "Bangumi 分类中暂无种子",
|
||||
"selected": "已选择",
|
||||
"torrent": {
|
||||
"name": "名称",
|
||||
"progress": "进度",
|
||||
"status": "状态",
|
||||
"size": "大小",
|
||||
"dlspeed": "下载速度",
|
||||
"upspeed": "上传速度",
|
||||
"peers": "做种/下载"
|
||||
},
|
||||
"state": {
|
||||
"downloading": "下载中",
|
||||
"seeding": "做种中",
|
||||
"paused": "已暂停",
|
||||
"stalled": "等待中",
|
||||
"queued": "排队中",
|
||||
"checking": "校验中",
|
||||
"error": "错误",
|
||||
"metadata": "获取元数据"
|
||||
},
|
||||
"action": {
|
||||
"pause": "暂停",
|
||||
"resume": "恢复",
|
||||
"delete": "删除"
|
||||
}
|
||||
},
|
||||
"homepage": {
|
||||
|
||||
@@ -7,6 +7,7 @@ definePage({
|
||||
});
|
||||
|
||||
const { t } = useMyI18n();
|
||||
const posterSrc = (link: string | null | undefined) => resolvePosterUrl(link);
|
||||
const { bangumi } = storeToRefs(useBangumiStore());
|
||||
const { getAll, openEditPopup } = useBangumiStore();
|
||||
const { isMobile } = useBreakpointQuery();
|
||||
@@ -136,23 +137,23 @@ function isToday(index: number): boolean {
|
||||
<div class="calendar-card-poster">
|
||||
<img
|
||||
v-if="item.poster_link"
|
||||
:src="item.poster_link"
|
||||
:src="posterSrc(item.poster_link)"
|
||||
:alt="item.official_title"
|
||||
class="calendar-card-img"
|
||||
/>
|
||||
<div v-else class="calendar-card-placeholder">
|
||||
<ErrorPicture theme="outline" size="20" />
|
||||
</div>
|
||||
</div>
|
||||
<div class="calendar-card-info">
|
||||
<div class="calendar-card-title">{{ item.official_title }}</div>
|
||||
<div class="calendar-card-meta">
|
||||
<ab-tag :title="`S${item.season}`" type="primary" />
|
||||
<ab-tag
|
||||
v-if="item.group_name"
|
||||
:title="item.group_name"
|
||||
type="primary"
|
||||
/>
|
||||
<div class="calendar-card-overlay">
|
||||
<div class="calendar-card-overlay-tags">
|
||||
<ab-tag :title="`S${item.season}`" type="primary" />
|
||||
<ab-tag
|
||||
v-if="item.group_name"
|
||||
:title="item.group_name"
|
||||
type="primary"
|
||||
/>
|
||||
</div>
|
||||
<div class="calendar-card-overlay-title">{{ item.official_title }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -202,7 +203,7 @@ function isToday(index: number): boolean {
|
||||
<div class="calendar-row-poster">
|
||||
<img
|
||||
v-if="item.poster_link"
|
||||
:src="item.poster_link"
|
||||
:src="posterSrc(item.poster_link)"
|
||||
:alt="item.official_title"
|
||||
class="calendar-row-img"
|
||||
/>
|
||||
@@ -394,6 +395,7 @@ function isToday(index: number): boolean {
|
||||
}
|
||||
|
||||
.calendar-card-poster {
|
||||
position: relative;
|
||||
border-radius: var(--radius-sm);
|
||||
overflow: hidden;
|
||||
aspect-ratio: 2 / 3;
|
||||
@@ -417,25 +419,49 @@ function isToday(index: number): boolean {
|
||||
transition: background-color var(--transition-normal);
|
||||
}
|
||||
|
||||
.calendar-card-info {
|
||||
padding: 6px 2px 2px;
|
||||
.calendar-card-overlay {
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
opacity: 0;
|
||||
background: rgba(0, 0, 0, 0.3);
|
||||
backdrop-filter: blur(2px);
|
||||
transition: opacity var(--transition-normal);
|
||||
|
||||
.calendar-card:hover & {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.calendar-card-title {
|
||||
font-size: 12px;
|
||||
.calendar-card-overlay-title {
|
||||
position: absolute;
|
||||
top: 6px;
|
||||
left: 6px;
|
||||
right: 6px;
|
||||
font-size: 11px;
|
||||
font-weight: 500;
|
||||
color: var(--color-text);
|
||||
color: #fff;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
margin-bottom: 4px;
|
||||
transition: color var(--transition-normal);
|
||||
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
|
||||
.calendar-card-meta {
|
||||
.calendar-card-overlay-tags {
|
||||
position: absolute;
|
||||
bottom: 5px;
|
||||
left: 5px;
|
||||
right: 5px;
|
||||
display: flex;
|
||||
gap: 3px;
|
||||
flex-wrap: wrap;
|
||||
|
||||
:deep(.tag) {
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
border-color: rgba(255, 255, 255, 0.4);
|
||||
color: #fff;
|
||||
font-size: 9px;
|
||||
padding: 1px 5px;
|
||||
}
|
||||
}
|
||||
|
||||
// Empty day
|
||||
|
||||
@@ -1,30 +1,197 @@
|
||||
<script lang="ts" setup>
|
||||
<script lang="tsx" setup>
|
||||
import { NDataTable, NProgress, type DataTableColumns } from 'naive-ui';
|
||||
import type { QbTorrentInfo, TorrentGroup } from '#/downloader';
|
||||
|
||||
definePage({
|
||||
name: 'Downloader',
|
||||
});
|
||||
|
||||
const { t } = useMyI18n();
|
||||
const { config } = storeToRefs(useConfigStore());
|
||||
const { getConfig } = useConfigStore();
|
||||
const { groups, selectedHashes, loading } = storeToRefs(useDownloaderStore());
|
||||
const {
|
||||
getAll,
|
||||
pauseSelected,
|
||||
resumeSelected,
|
||||
deleteSelected,
|
||||
toggleHash,
|
||||
toggleGroup,
|
||||
clearSelection,
|
||||
} = useDownloaderStore();
|
||||
|
||||
const isNull = computed(() => {
|
||||
return config.value.downloader.host === '';
|
||||
});
|
||||
|
||||
const url = computed(() => {
|
||||
const downloader = config.value.downloader;
|
||||
const host = downloader.host.replace(/http(s?)\:\/\//, '');
|
||||
const protocol = downloader.ssl ? 'https' : 'http';
|
||||
|
||||
return `${protocol}://${host}`;
|
||||
});
|
||||
let timer: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
onActivated(() => {
|
||||
getConfig();
|
||||
if (!isNull.value) {
|
||||
getAll();
|
||||
timer = setInterval(getAll, 5000);
|
||||
}
|
||||
});
|
||||
|
||||
onDeactivated(() => {
|
||||
if (timer) {
|
||||
clearInterval(timer);
|
||||
timer = null;
|
||||
}
|
||||
clearSelection();
|
||||
});
|
||||
|
||||
function formatSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return (bytes / Math.pow(1024, i)).toFixed(1) + ' ' + units[i];
|
||||
}
|
||||
|
||||
function formatSpeed(bytesPerSec: number): string {
|
||||
if (bytesPerSec === 0) return '-';
|
||||
return formatSize(bytesPerSec) + '/s';
|
||||
}
|
||||
|
||||
function formatEta(seconds: number): string {
|
||||
if (seconds <= 0 || seconds === 8640000) return '-';
|
||||
if (seconds < 60) return `${seconds}s`;
|
||||
if (seconds < 3600) return `${Math.floor(seconds / 60)}m`;
|
||||
const h = Math.floor(seconds / 3600);
|
||||
const m = Math.floor((seconds % 3600) / 60);
|
||||
return `${h}h${m}m`;
|
||||
}
|
||||
|
||||
function stateLabel(state: string): string {
|
||||
const map: Record<string, string> = {
|
||||
downloading: t('downloader.state.downloading'),
|
||||
uploading: t('downloader.state.seeding'),
|
||||
pausedDL: t('downloader.state.paused'),
|
||||
pausedUP: t('downloader.state.paused'),
|
||||
stalledDL: t('downloader.state.stalled'),
|
||||
stalledUP: t('downloader.state.seeding'),
|
||||
queuedDL: t('downloader.state.queued'),
|
||||
queuedUP: t('downloader.state.queued'),
|
||||
checkingDL: t('downloader.state.checking'),
|
||||
checkingUP: t('downloader.state.checking'),
|
||||
error: t('downloader.state.error'),
|
||||
missingFiles: t('downloader.state.error'),
|
||||
metaDL: t('downloader.state.metadata'),
|
||||
};
|
||||
return map[state] || state;
|
||||
}
|
||||
|
||||
function stateType(state: string): string {
|
||||
if (state.includes('paused')) return 'inactive';
|
||||
if (state === 'downloading' || state === 'forcedDL') return 'active';
|
||||
if (state.includes('UP') || state === 'uploading') return 'primary';
|
||||
if (state === 'error' || state === 'missingFiles') return 'warn';
|
||||
return 'primary';
|
||||
}
|
||||
|
||||
function isGroupAllSelected(group: TorrentGroup): boolean {
|
||||
return group.torrents.every((t) => selectedHashes.value.includes(t.hash));
|
||||
}
|
||||
|
||||
function tableColumns(): DataTableColumns<QbTorrentInfo> {
|
||||
return [
|
||||
{
|
||||
type: 'selection',
|
||||
},
|
||||
{
|
||||
title: t('downloader.torrent.name'),
|
||||
key: 'name',
|
||||
ellipsis: { tooltip: true },
|
||||
minWidth: 200,
|
||||
},
|
||||
{
|
||||
title: t('downloader.torrent.progress'),
|
||||
key: 'progress',
|
||||
width: 160,
|
||||
render(row: QbTorrentInfo) {
|
||||
return (
|
||||
<NProgress
|
||||
type="line"
|
||||
percentage={Math.round(row.progress * 100)}
|
||||
indicator-placement="inside"
|
||||
processing={row.state === 'downloading' || row.state === 'forcedDL'}
|
||||
/>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: t('downloader.torrent.status'),
|
||||
key: 'state',
|
||||
width: 100,
|
||||
render(row: QbTorrentInfo) {
|
||||
return <ab-tag type={stateType(row.state)} title={stateLabel(row.state)} />;
|
||||
},
|
||||
},
|
||||
{
|
||||
title: t('downloader.torrent.size'),
|
||||
key: 'size',
|
||||
width: 100,
|
||||
render(row: QbTorrentInfo) {
|
||||
return formatSize(row.size);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: t('downloader.torrent.dlspeed'),
|
||||
key: 'dlspeed',
|
||||
width: 110,
|
||||
render(row: QbTorrentInfo) {
|
||||
return formatSpeed(row.dlspeed);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: t('downloader.torrent.upspeed'),
|
||||
key: 'upspeed',
|
||||
width: 110,
|
||||
render(row: QbTorrentInfo) {
|
||||
return formatSpeed(row.upspeed);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'ETA',
|
||||
key: 'eta',
|
||||
width: 80,
|
||||
render(row: QbTorrentInfo) {
|
||||
return formatEta(row.eta);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: t('downloader.torrent.peers'),
|
||||
key: 'peers',
|
||||
width: 90,
|
||||
render(row: QbTorrentInfo) {
|
||||
return `${row.num_seeds} / ${row.num_leechs}`;
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
function tableRowKey(row: QbTorrentInfo) {
|
||||
return row.hash;
|
||||
}
|
||||
|
||||
function onCheckedChange(group: TorrentGroup, keys: string[]) {
|
||||
const groupHashes = group.torrents.map((t) => t.hash);
|
||||
const otherSelected = selectedHashes.value.filter(
|
||||
(h) => !groupHashes.includes(h)
|
||||
);
|
||||
selectedHashes.value = [...otherSelected, ...keys];
|
||||
}
|
||||
|
||||
function groupCheckedKeys(group: TorrentGroup): string[] {
|
||||
return group.torrents
|
||||
.filter((t) => selectedHashes.value.includes(t.hash))
|
||||
.map((t) => t.hash);
|
||||
}
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div class="page-embed">
|
||||
<div class="page-downloader">
|
||||
<div v-if="isNull" class="empty-guide">
|
||||
<div class="empty-guide-header anim-fade-in">
|
||||
<div class="empty-guide-title">{{ $t('downloader.empty.title') }}</div>
|
||||
@@ -62,30 +229,114 @@ onActivated(() => {
|
||||
</RouterLink>
|
||||
</div>
|
||||
|
||||
<iframe
|
||||
v-else
|
||||
:src="url"
|
||||
frameborder="0"
|
||||
allowfullscreen="true"
|
||||
class="embed-frame"
|
||||
></iframe>
|
||||
<div v-else class="downloader-content">
|
||||
<div v-if="groups.length === 0 && !loading" class="downloader-empty">
|
||||
{{ $t('downloader.empty_torrents') }}
|
||||
</div>
|
||||
|
||||
<div v-else class="downloader-groups">
|
||||
<ab-fold-panel
|
||||
v-for="group in groups"
|
||||
:key="group.savePath"
|
||||
:title="`${group.name} (${group.count})`"
|
||||
:default-open="true"
|
||||
>
|
||||
<NDataTable
|
||||
:columns="tableColumns()"
|
||||
:data="group.torrents"
|
||||
:row-key="tableRowKey"
|
||||
:pagination="false"
|
||||
:bordered="false"
|
||||
:checked-row-keys="groupCheckedKeys(group)"
|
||||
size="small"
|
||||
@update:checked-row-keys="(keys: any) => onCheckedChange(group, keys as string[])"
|
||||
/>
|
||||
</ab-fold-panel>
|
||||
</div>
|
||||
|
||||
<Transition name="fade">
|
||||
<div v-if="selectedHashes.length > 0" class="action-bar">
|
||||
<span class="action-bar-count">
|
||||
{{ selectedHashes.length }} {{ $t('downloader.selected') }}
|
||||
</span>
|
||||
<div class="action-bar-buttons">
|
||||
<ab-button @click="resumeSelected">{{ $t('downloader.action.resume') }}</ab-button>
|
||||
<ab-button @click="pauseSelected">{{ $t('downloader.action.pause') }}</ab-button>
|
||||
<ab-button type="warn" @click="deleteSelected(false)">{{ $t('downloader.action.delete') }}</ab-button>
|
||||
</div>
|
||||
</div>
|
||||
</Transition>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<style lang="scss" scoped>
|
||||
.page-embed {
|
||||
.page-downloader {
|
||||
overflow: auto;
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.embed-frame {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
.downloader-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex: 1;
|
||||
gap: 12px;
|
||||
padding-bottom: 60px;
|
||||
}
|
||||
|
||||
.downloader-groups {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.downloader-empty {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex: 1;
|
||||
color: var(--color-text-secondary);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.action-bar {
|
||||
position: fixed;
|
||||
bottom: 24px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
padding: 10px 20px;
|
||||
border-radius: var(--radius-md);
|
||||
background: var(--color-surface);
|
||||
border: 1px solid var(--color-border);
|
||||
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.12);
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
.action-bar-count {
|
||||
font-size: 13px;
|
||||
color: var(--color-text-secondary);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.action-bar-buttons {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.fade-enter-active,
|
||||
.fade-leave-active {
|
||||
transition: opacity 0.2s ease, transform 0.2s ease;
|
||||
}
|
||||
|
||||
.fade-enter-from,
|
||||
.fade-leave-to {
|
||||
opacity: 0;
|
||||
transform: translateX(-50%) translateY(8px);
|
||||
}
|
||||
|
||||
.empty-guide {
|
||||
|
||||
117
webui/src/store/downloader.ts
Normal file
117
webui/src/store/downloader.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import type { QbTorrentInfo, TorrentGroup } from '#/downloader';
|
||||
|
||||
export const useDownloaderStore = defineStore('downloader', () => {
|
||||
const torrents = ref<QbTorrentInfo[]>([]);
|
||||
const selectedHashes = ref<string[]>([]);
|
||||
const loading = ref(false);
|
||||
|
||||
const groups = computed<TorrentGroup[]>(() => {
|
||||
const map = new Map<string, QbTorrentInfo[]>();
|
||||
for (const t of torrents.value) {
|
||||
const key = t.save_path;
|
||||
if (!map.has(key)) {
|
||||
map.set(key, []);
|
||||
}
|
||||
map.get(key)!.push(t);
|
||||
}
|
||||
|
||||
const result: TorrentGroup[] = [];
|
||||
for (const [savePath, items] of map) {
|
||||
const parts = savePath.replace(/\/$/, '').split('/');
|
||||
const name = parts[parts.length - 1] || savePath;
|
||||
const totalSize = items.reduce((sum, t) => sum + t.size, 0);
|
||||
const overallProgress =
|
||||
totalSize > 0
|
||||
? items.reduce((sum, t) => sum + t.size * t.progress, 0) / totalSize
|
||||
: 0;
|
||||
result.push({
|
||||
name,
|
||||
savePath,
|
||||
totalSize,
|
||||
overallProgress,
|
||||
count: items.length,
|
||||
torrents: items.sort((a, b) => b.added_on - a.added_on),
|
||||
});
|
||||
}
|
||||
|
||||
return result.sort((a, b) => a.name.localeCompare(b.name));
|
||||
});
|
||||
|
||||
async function getAll() {
|
||||
loading.value = true;
|
||||
try {
|
||||
torrents.value = await apiDownloader.getTorrents();
|
||||
} catch {
|
||||
torrents.value = [];
|
||||
} finally {
|
||||
loading.value = false;
|
||||
}
|
||||
}
|
||||
|
||||
const opts = {
|
||||
showMessage: true,
|
||||
onSuccess() {
|
||||
getAll();
|
||||
selectedHashes.value = [];
|
||||
},
|
||||
};
|
||||
|
||||
const { execute: pauseSelected } = useApi(
|
||||
() => apiDownloader.pause(selectedHashes.value),
|
||||
opts
|
||||
);
|
||||
const { execute: resumeSelected } = useApi(
|
||||
() => apiDownloader.resume(selectedHashes.value),
|
||||
opts
|
||||
);
|
||||
const { execute: deleteSelected } = useApi(
|
||||
(deleteFiles: boolean = false) =>
|
||||
apiDownloader.deleteTorrents(selectedHashes.value, deleteFiles),
|
||||
opts
|
||||
);
|
||||
|
||||
function toggleHash(hash: string) {
|
||||
const idx = selectedHashes.value.indexOf(hash);
|
||||
if (idx === -1) {
|
||||
selectedHashes.value.push(hash);
|
||||
} else {
|
||||
selectedHashes.value.splice(idx, 1);
|
||||
}
|
||||
}
|
||||
|
||||
function toggleGroup(group: TorrentGroup) {
|
||||
const groupHashes = group.torrents.map((t) => t.hash);
|
||||
const allSelected = groupHashes.every((h) =>
|
||||
selectedHashes.value.includes(h)
|
||||
);
|
||||
if (allSelected) {
|
||||
selectedHashes.value = selectedHashes.value.filter(
|
||||
(h) => !groupHashes.includes(h)
|
||||
);
|
||||
} else {
|
||||
const toAdd = groupHashes.filter(
|
||||
(h) => !selectedHashes.value.includes(h)
|
||||
);
|
||||
selectedHashes.value.push(...toAdd);
|
||||
}
|
||||
}
|
||||
|
||||
function clearSelection() {
|
||||
selectedHashes.value = [];
|
||||
}
|
||||
|
||||
return {
|
||||
torrents,
|
||||
groups,
|
||||
selectedHashes,
|
||||
loading,
|
||||
|
||||
getAll,
|
||||
pauseSelected,
|
||||
resumeSelected,
|
||||
deleteSelected,
|
||||
toggleHash,
|
||||
toggleGroup,
|
||||
clearSelection,
|
||||
};
|
||||
});
|
||||
@@ -14,6 +14,10 @@ export const useSearchStore = defineStore('search', () => {
|
||||
|
||||
const loading = computed(() => status.value !== 'CLOSED');
|
||||
|
||||
const bangumiList = computed(() =>
|
||||
searchData.value.map((item, index) => ({ order: index, value: item }))
|
||||
);
|
||||
|
||||
async function getProviders() {
|
||||
providers.value = await apiSearch.getProvider();
|
||||
provider.value = providers.value[0];
|
||||
@@ -21,18 +25,20 @@ export const useSearchStore = defineStore('search', () => {
|
||||
|
||||
function clearSearch() {
|
||||
keyword.value = '';
|
||||
searchData.value = [];
|
||||
closeSearch();
|
||||
}
|
||||
|
||||
return {
|
||||
keyword,
|
||||
inputValue: keyword,
|
||||
loading,
|
||||
provider,
|
||||
providers,
|
||||
searchData,
|
||||
bangumiList,
|
||||
|
||||
clearSearch,
|
||||
getProviders,
|
||||
openSearch,
|
||||
onSearch: openSearch,
|
||||
closeSearch,
|
||||
};
|
||||
});
|
||||
|
||||
5
webui/src/utils/poster.ts
Normal file
5
webui/src/utils/poster.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export function resolvePosterUrl(link: string | null | undefined): string {
|
||||
if (!link) return '';
|
||||
if (link.startsWith('http://') || link.startsWith('https://')) return link;
|
||||
return `/${link}`;
|
||||
}
|
||||
45
webui/types/downloader.ts
Normal file
45
webui/types/downloader.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
export type QbTorrentState =
|
||||
| 'error'
|
||||
| 'missingFiles'
|
||||
| 'uploading'
|
||||
| 'pausedUP'
|
||||
| 'queuedUP'
|
||||
| 'stalledUP'
|
||||
| 'checkingUP'
|
||||
| 'forcedUP'
|
||||
| 'allocating'
|
||||
| 'downloading'
|
||||
| 'metaDL'
|
||||
| 'pausedDL'
|
||||
| 'queuedDL'
|
||||
| 'stalledDL'
|
||||
| 'checkingDL'
|
||||
| 'forcedDL'
|
||||
| 'checkingResumeData'
|
||||
| 'moving'
|
||||
| 'unknown';
|
||||
|
||||
export interface QbTorrentInfo {
|
||||
hash: string;
|
||||
name: string;
|
||||
size: number;
|
||||
progress: number;
|
||||
dlspeed: number;
|
||||
upspeed: number;
|
||||
num_seeds: number;
|
||||
num_leechs: number;
|
||||
state: QbTorrentState;
|
||||
eta: number;
|
||||
category: string;
|
||||
save_path: string;
|
||||
added_on: number;
|
||||
}
|
||||
|
||||
export interface TorrentGroup {
|
||||
name: string;
|
||||
savePath: string;
|
||||
totalSize: number;
|
||||
overallProgress: number;
|
||||
count: number;
|
||||
torrents: QbTorrentInfo[];
|
||||
}
|
||||
3
webui/types/dts/auto-imports.d.ts
vendored
3
webui/types/dts/auto-imports.d.ts
vendored
@@ -10,6 +10,7 @@ declare global {
|
||||
const apiCheck: typeof import('../../src/api/check')['apiCheck']
|
||||
const apiConfig: typeof import('../../src/api/config')['apiConfig']
|
||||
const apiDownload: typeof import('../../src/api/download')['apiDownload']
|
||||
const apiDownloader: typeof import('../../src/api/downloader')['apiDownloader']
|
||||
const apiLog: typeof import('../../src/api/log')['apiLog']
|
||||
const apiPasskey: typeof import('../../src/api/passkey')['apiPasskey']
|
||||
const apiProgram: typeof import('../../src/api/program')['apiProgram']
|
||||
@@ -70,6 +71,7 @@ declare global {
|
||||
const readonly: typeof import('vue')['readonly']
|
||||
const ref: typeof import('vue')['ref']
|
||||
const resolveComponent: typeof import('vue')['resolveComponent']
|
||||
const resolvePosterUrl: typeof import('../../src/utils/poster')['resolvePosterUrl']
|
||||
const setActivePinia: typeof import('pinia')['setActivePinia']
|
||||
const setMapStoreSuffix: typeof import('pinia')['setMapStoreSuffix']
|
||||
const shallowReactive: typeof import('vue')['shallowReactive']
|
||||
@@ -95,6 +97,7 @@ declare global {
|
||||
const useCssModule: typeof import('vue')['useCssModule']
|
||||
const useCssVars: typeof import('vue')['useCssVars']
|
||||
const useDarkMode: typeof import('../../src/hooks/useDarkMode')['useDarkMode']
|
||||
const useDownloaderStore: typeof import('../../src/store/downloader')['useDownloaderStore']
|
||||
const useI18n: typeof import('vue-i18n')['useI18n']
|
||||
const useIntervalFn: typeof import('@vueuse/core')['useIntervalFn']
|
||||
const useLocalStorage: typeof import('@vueuse/core')['useLocalStorage']
|
||||
|
||||
Reference in New Issue
Block a user