fix: improve rename reliability and add torrent tagging API

- Fix qBittorrent rename verification (verify file actually renamed)
- Add pending rename cooldown to prevent spam when rename delayed
- Add torrent tagging API for accurate offset lookup
- Add auto calendar refresh every 24 hours
- Fix frontend error handling (don't logout on server errors)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Estrella Pan
2026-01-27 20:54:04 +01:00
parent b0c00598a5
commit b33ec01363
9 changed files with 391 additions and 18 deletions

View File

@@ -1,6 +1,6 @@
[project]
name = "auto-bangumi"
version = "3.2.2"
version = "3.2.3"
description = "AutoBangumi - Automated anime download manager"
requires-python = ">=3.13"
dependencies = [

View File

@@ -1,9 +1,14 @@
import logging
from fastapi import APIRouter, Depends
from pydantic import BaseModel
from module.database import Database
from module.downloader import DownloadClient
from module.security.api import get_current_user
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/downloader", tags=["downloader"])
@@ -16,6 +21,12 @@ class TorrentDeleteRequest(BaseModel):
delete_files: bool = False
class TorrentTagRequest(BaseModel):
"""Request to tag a torrent with a bangumi ID."""
hash: str
bangumi_id: int
@router.get("/torrents", dependencies=[Depends(get_current_user)])
async def get_torrents():
async with DownloadClient() as client:
@@ -44,3 +55,91 @@ async def delete_torrents(req: TorrentDeleteRequest):
async with DownloadClient() as client:
await client.delete_torrent(hashes, delete_files=req.delete_files)
return {"msg_en": "Torrents deleted", "msg_zh": "种子已删除"}
@router.post("/torrents/tag", dependencies=[Depends(get_current_user)])
async def tag_torrent(req: TorrentTagRequest):
"""Tag a torrent with a bangumi ID for accurate offset lookup.
This adds the 'ab:ID' tag to the torrent in qBittorrent, which allows
the renamer to look up the correct episode/season offset.
"""
# Verify bangumi exists
with Database() as db:
bangumi = db.bangumi.search_id(req.bangumi_id)
if not bangumi:
return {
"status": False,
"msg_en": f"Bangumi {req.bangumi_id} not found",
"msg_zh": f"未找到番剧 {req.bangumi_id}",
}
tag = f"ab:{req.bangumi_id}"
async with DownloadClient() as client:
await client.add_tag(req.hash, tag)
return {
"status": True,
"msg_en": f"Tagged torrent with {tag}",
"msg_zh": f"已为种子添加标签 {tag}",
}
@router.post("/torrents/tag/auto", dependencies=[Depends(get_current_user)])
async def auto_tag_torrents():
"""Auto-tag all untagged Bangumi torrents based on name/path matching.
This helps fix torrents that were added before tagging was implemented.
Returns the number of torrents tagged and any that couldn't be matched.
"""
tagged_count = 0
unmatched = []
async with DownloadClient() as client:
# Get all Bangumi torrents
torrents = await client.get_torrent_info(category="Bangumi", status_filter=None)
with Database() as db:
for torrent in torrents:
torrent_hash = torrent["hash"]
torrent_name = torrent["name"]
save_path = torrent["save_path"]
tags = torrent.get("tags", "")
# Skip if already has ab: tag
if "ab:" in tags:
continue
# Try to match bangumi
bangumi = None
# First try by torrent name
bangumi = db.bangumi.match_torrent(torrent_name)
# Then try by save_path
if not bangumi:
bangumi = db.bangumi.match_by_save_path(save_path)
if bangumi and not bangumi.deleted:
tag = f"ab:{bangumi.id}"
await client.add_tag(torrent_hash, tag)
tagged_count += 1
logger.info(
f"[AutoTag] Tagged '{torrent_name[:50]}...' with {tag} "
f"(matched: {bangumi.official_title})"
)
else:
unmatched.append({
"hash": torrent_hash,
"name": torrent_name,
"save_path": save_path,
})
return {
"status": True,
"tagged_count": tagged_count,
"unmatched_count": len(unmatched),
"unmatched": unmatched[:10], # Return first 10 unmatched for debugging
"msg_en": f"Tagged {tagged_count} torrents, {len(unmatched)} could not be matched",
"msg_zh": f"已标记 {tagged_count} 个种子,{len(unmatched)} 个无法匹配",
}

View File

@@ -13,7 +13,7 @@ from module.update import (
start_up,
)
from .sub_thread import OffsetScanThread, RenameThread, RSSThread
from .sub_thread import CalendarRefreshThread, OffsetScanThread, RenameThread, RSSThread
logger = logging.getLogger(__name__)
@@ -29,7 +29,7 @@ figlet = r"""
"""
class Program(RenameThread, RSSThread, OffsetScanThread):
class Program(RenameThread, RSSThread, OffsetScanThread, CalendarRefreshThread):
def __init__(self):
super().__init__()
self._startup_done = False
@@ -101,6 +101,8 @@ class Program(RenameThread, RSSThread, OffsetScanThread):
self.rss_start()
# Start offset scanner for background mismatch detection
self.scan_start()
# Start calendar refresh (every 24 hours)
self.calendar_start()
logger.info("Program running.")
return ResponseModel(
status=True,
@@ -115,6 +117,7 @@ class Program(RenameThread, RSSThread, OffsetScanThread):
await self.rename_stop()
await self.rss_stop()
await self.scan_stop()
await self.calendar_stop()
return ResponseModel(
status=True,
status_code=200,

View File

@@ -3,7 +3,7 @@ import logging
from module.conf import settings
from module.downloader import DownloadClient
from module.manager import Renamer, eps_complete
from module.manager import Renamer, TorrentManager, eps_complete
from module.notification import PostNotification
from module.rss import RSSAnalyser, RSSEngine
@@ -12,6 +12,9 @@ from .status import ProgramStatus
logger = logging.getLogger(__name__)
# Calendar refresh interval in seconds (24 hours)
CALENDAR_REFRESH_INTERVAL = 24 * 60 * 60
class RSSThread(ProgramStatus):
def __init__(self):
@@ -134,3 +137,51 @@ class OffsetScanThread(ProgramStatus):
pass
self._scan_task = None
logger.info("[OffsetScanThread] Stopped offset scanner")
class CalendarRefreshThread(ProgramStatus):
"""Background thread for refreshing bangumi calendar data every 24 hours."""
def __init__(self):
super().__init__()
self._calendar_task: asyncio.Task | None = None
async def calendar_loop(self):
# Initial delay to let the system stabilize
await asyncio.sleep(120)
while not self.stop_event.is_set():
try:
with TorrentManager() as manager:
resp = await manager.refresh_calendar()
if resp.status:
logger.info("[CalendarRefreshThread] Calendar refresh completed")
else:
logger.warning(
f"[CalendarRefreshThread] Calendar refresh failed: {resp.msg_en}"
)
except Exception as e:
logger.error(f"[CalendarRefreshThread] Error during refresh: {e}")
try:
await asyncio.wait_for(
self.stop_event.wait(),
timeout=CALENDAR_REFRESH_INTERVAL,
)
except asyncio.TimeoutError:
pass
def calendar_start(self):
self._calendar_task = asyncio.create_task(self.calendar_loop())
logger.info("[CalendarRefreshThread] Started calendar refresh (every 24h)")
async def calendar_stop(self):
if self._calendar_task and not self._calendar_task.done():
self.stop_event.set()
self._calendar_task.cancel()
try:
await self._calendar_task
except asyncio.CancelledError:
pass
self._calendar_task = None
logger.info("[CalendarRefreshThread] Stopped calendar refresh")

View File

@@ -202,7 +202,23 @@ class QbDownloader:
if resp.status_code == 409:
logger.debug(f"Conflict409Error: {old_path} >> {new_path}")
return False
return resp.status_code == 200
if resp.status_code != 200:
return False
# Verify the rename actually happened by checking file list
# qBittorrent can return 200 but delay the actual rename (e.g., while seeding)
await asyncio.sleep(0.5) # Brief delay to allow qBittorrent to process
files = await self.torrents_files(torrent_hash)
for f in files:
if f.get("name") == new_path:
return True
if f.get("name") == old_path:
# File still has old name - rename didn't actually happen
logger.debug(
f"[Downloader] Rename API returned 200 but file unchanged: {old_path}"
)
return False
return True # new_path found or old_path not found
except (httpx.ConnectError, httpx.RequestError, httpx.TimeoutException) as e:
logger.warning(f"[Downloader] Failed to rename file {old_path}: {e}")
return False

View File

@@ -231,3 +231,8 @@ class DownloadClient(TorrentPath):
if hasattr(self.client, "get_torrents_by_tag"):
return await self.client.get_torrents_by_tag(tag)
return []
async def add_tag(self, torrent_hash: str, tag: str):
"""Add a tag to a torrent."""
await self.client.add_tag(torrent_hash, tag)
logger.debug(f"[Downloader] Added tag '{tag}' to torrent {torrent_hash[:8]}...")

View File

@@ -1,6 +1,7 @@
import asyncio
import logging
import re
import time
from module.conf import settings
from module.database import Database
@@ -10,6 +11,12 @@ from module.parser import TitleParser
logger = logging.getLogger(__name__)
# Module-level cache to track pending renames that qBittorrent hasn't processed yet
# Key: (torrent_hash, old_path, new_path), Value: timestamp of last attempt
# This prevents spamming the same rename when qBittorrent returns 200 but doesn't actually rename
_pending_renames: dict[tuple[str, str, str], float] = {}
_PENDING_RENAME_COOLDOWN = 300 # 5 minutes cooldown before retrying same rename
class Renamer(DownloadClient):
def __init__(self):
@@ -100,9 +107,21 @@ class Renamer(DownloadClient):
)
if media_path != new_path:
if new_path not in self.check_pool.keys():
# Check if this rename was recently attempted but didn't take effect
# (qBittorrent can return 200 but delay actual rename while seeding)
pending_key = (_hash, media_path, new_path)
last_attempt = _pending_renames.get(pending_key)
if last_attempt and (time.time() - last_attempt) < _PENDING_RENAME_COOLDOWN:
logger.debug(
f"[Renamer] Skipping rename (pending cooldown): {media_path}"
)
return None
if await self.rename_torrent_file(
_hash=_hash, old_path=media_path, new_path=new_path
):
# Rename verified successful, remove from pending cache
_pending_renames.pop(pending_key, None)
# Season comes from folder which already has offset applied
# Only apply episode offset
original_ep = int(ep.episode)
@@ -114,6 +133,18 @@ class Renamer(DownloadClient):
season=ep.season,
episode=adjusted_episode,
)
else:
# Rename API returned success but file wasn't actually renamed
# Add to pending cache to avoid spamming
_pending_renames[pending_key] = time.time()
# Clean up old entries from cache
current_time = time.time()
expired_keys = [
k for k, v in _pending_renames.items()
if current_time - v > _PENDING_RENAME_COOLDOWN * 2
]
for k in expired_keys:
_pending_renames.pop(k, None)
else:
logger.warning(f"[Renamer] {media_path} parse failed")
if settings.bangumi_manage.remove_bad_torrent:
@@ -263,8 +294,9 @@ class Renamer(DownloadClient):
# Then try matching by torrent name
bangumi = db.bangumi.match_torrent(torrent_name)
if bangumi:
logger.debug(
f"[Renamer] Found offsets via torrent name match: ep={bangumi.episode_offset}, season={bangumi.season_offset}"
logger.info(
f"[Renamer] Matched bangumi '{bangumi.official_title}' (id={bangumi.id}) via name, "
f"offsets: ep={bangumi.episode_offset}, season={bangumi.season_offset}"
)
return bangumi.episode_offset, bangumi.season_offset
@@ -275,14 +307,15 @@ class Renamer(DownloadClient):
# Try with normalized path if exact match failed
bangumi = db.bangumi.match_by_save_path(normalized_save_path)
if bangumi:
logger.debug(
f"[Renamer] Found offsets via save_path match: ep={bangumi.episode_offset}, season={bangumi.season_offset}"
logger.info(
f"[Renamer] Matched bangumi '{bangumi.official_title}' (id={bangumi.id}) via save_path, "
f"offsets: ep={bangumi.episode_offset}, season={bangumi.season_offset}"
)
return bangumi.episode_offset, bangumi.season_offset
logger.debug(
f"[Renamer] No bangumi found for torrent: hash={torrent_hash[:8] if torrent_hash else 'N/A'}, "
f"name={torrent_name[:50] if torrent_name else 'N/A'}..., path={save_path}"
logger.info(
f"[Renamer] No bangumi match for torrent (using offset=0): "
f"name={torrent_name[:60] if torrent_name else 'N/A'}..."
)
except Exception as e:
logger.debug(f"[Renamer] Could not lookup offsets for {save_path}: {e}")

View File

@@ -284,3 +284,152 @@ class TestDeleteTorrents:
mock_download_client.delete_torrent.assert_called_once_with(
"abc123|def456", delete_files=False
)
# ---------------------------------------------------------------------------
# POST /downloader/torrents/tag
# ---------------------------------------------------------------------------
class TestTagTorrent:
def test_tag_torrent_success(self, authed_client, mock_download_client):
"""POST /downloader/torrents/tag adds bangumi tag to torrent."""
from module.models import Bangumi
mock_bangumi = Bangumi(
id=123,
official_title="Test Anime",
title_raw="Test",
season=1,
rss_link="",
poster_link="",
added=False,
deleted=False,
)
with patch("module.api.downloader.DownloadClient") as MockClient:
MockClient.return_value.__aenter__ = AsyncMock(
return_value=mock_download_client
)
MockClient.return_value.__aexit__ = AsyncMock(return_value=False)
with patch("module.api.downloader.Database") as MockDB:
mock_db = MockDB.return_value.__enter__.return_value
mock_db.bangumi.search_id.return_value = mock_bangumi
response = authed_client.post(
"/api/v1/downloader/torrents/tag",
json={"hash": "abc123", "bangumi_id": 123},
)
assert response.status_code == 200
data = response.json()
assert data["status"] is True
assert "ab:123" in data["msg_en"]
mock_download_client.add_tag.assert_called_once_with("abc123", "ab:123")
def test_tag_torrent_bangumi_not_found(self, authed_client, mock_download_client):
"""POST /downloader/torrents/tag fails if bangumi doesn't exist."""
with patch("module.api.downloader.Database") as MockDB:
mock_db = MockDB.return_value.__enter__.return_value
mock_db.bangumi.search_id.return_value = None
response = authed_client.post(
"/api/v1/downloader/torrents/tag",
json={"hash": "abc123", "bangumi_id": 999},
)
assert response.status_code == 200
data = response.json()
assert data["status"] is False
assert "not found" in data["msg_en"]
# ---------------------------------------------------------------------------
# POST /downloader/torrents/tag/auto
# ---------------------------------------------------------------------------
class TestAutoTagTorrents:
def test_auto_tag_success(self, authed_client, mock_download_client):
"""POST /downloader/torrents/tag/auto tags untagged torrents."""
from module.models import Bangumi
mock_bangumi = Bangumi(
id=123,
official_title="Test Anime",
title_raw="Test Anime",
season=1,
rss_link="",
poster_link="",
added=False,
deleted=False,
)
# Mock torrents - one untagged, one already tagged
mock_download_client.get_torrent_info.return_value = [
{
"hash": "abc123",
"name": "[TestGroup] Test Anime - 01.mkv",
"save_path": "/downloads/Test Anime/Season 1",
"tags": "",
},
{
"hash": "def456",
"name": "[TestGroup] Other Anime - 01.mkv",
"save_path": "/downloads/Other Anime/Season 1",
"tags": "ab:456", # Already tagged
},
]
with patch("module.api.downloader.DownloadClient") as MockClient:
MockClient.return_value.__aenter__ = AsyncMock(
return_value=mock_download_client
)
MockClient.return_value.__aexit__ = AsyncMock(return_value=False)
with patch("module.api.downloader.Database") as MockDB:
mock_db = MockDB.return_value.__enter__.return_value
mock_db.bangumi.match_torrent.return_value = mock_bangumi
mock_db.bangumi.match_by_save_path.return_value = None
response = authed_client.post("/api/v1/downloader/torrents/tag/auto")
assert response.status_code == 200
data = response.json()
assert data["status"] is True
assert data["tagged_count"] == 1
# Only the untagged torrent should be tagged
mock_download_client.add_tag.assert_called_once_with("abc123", "ab:123")
def test_auto_tag_no_matches(self, authed_client, mock_download_client):
"""POST /downloader/torrents/tag/auto handles unmatched torrents."""
mock_download_client.get_torrent_info.return_value = [
{
"hash": "abc123",
"name": "[TestGroup] Unknown Anime - 01.mkv",
"save_path": "/downloads/Unknown/Season 1",
"tags": "",
},
]
with patch("module.api.downloader.DownloadClient") as MockClient:
MockClient.return_value.__aenter__ = AsyncMock(
return_value=mock_download_client
)
MockClient.return_value.__aexit__ = AsyncMock(return_value=False)
with patch("module.api.downloader.Database") as MockDB:
mock_db = MockDB.return_value.__enter__.return_value
mock_db.bangumi.match_torrent.return_value = None
mock_db.bangumi.match_by_save_path.return_value = None
response = authed_client.post("/api/v1/downloader/torrents/tag/auto")
assert response.status_code == 200
data = response.json()
assert data["status"] is True
assert data["tagged_count"] == 0
assert data["unmatched_count"] == 1
assert len(data["unmatched"]) == 1
mock_download_client.add_tag.assert_not_called()

View File

@@ -23,8 +23,24 @@ axios.interceptors.response.use(
const { isLoggedIn } = useAuth();
// Handle network errors (no response from server)
if (!err.response) {
message.error(
returnUserLangText({
en: 'Network error. Please check your connection.',
'zh-CN': '网络错误,请检查连接。',
})
);
const error = {
status: 0,
msg_en: 'Network error',
msg_zh: '网络错误',
};
return Promise.reject(error);
}
switch (status) {
/** token 过期 */
/** token 过期 - only logout on auth errors */
case 401:
isLoggedIn.value = false;
if (errorMsg) message.error(errorMsg);
@@ -33,13 +49,14 @@ axios.interceptors.response.use(
case 406:
if (errorMsg) message.error(errorMsg);
break;
/** 服务器错误 - don't logout, just show error */
case 500:
isLoggedIn.value = false;
message.error(
returnUserLangText({
en: 'Server error!',
'zh-CN': '服务器错误!',
})
errorMsg ||
returnUserLangText({
en: 'Server error. Please try again later.',
'zh-CN': '服务器错误,请稍后重试。',
})
);
break;
}