perf(logging): optimize logging with rotation, async I/O, and lazy formatting

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Estrella Pan
2026-02-22 14:10:56 +01:00
parent c173454a67
commit 5f604e94fd
39 changed files with 198 additions and 116 deletions

View File

@@ -16,7 +16,7 @@ def qb_connect_failed_wait(func):
try:
return await func(*args, **kwargs)
except Exception as e:
logger.debug(f"URL: {args[0]}")
logger.debug("URL: %s", args[0])
logger.warning(e)
logger.warning("Cannot connect to qBittorrent. Wait 5 min and retry...")
await asyncio.sleep(300)
@@ -31,7 +31,7 @@ def api_failed(func):
try:
return await func(*args, **kwargs)
except Exception as e:
logger.debug(f"URL: {args[0]}")
logger.debug("URL: %s", args[0])
logger.warning("Wrong API response.")
logger.debug(e)

View File

@@ -8,11 +8,26 @@ from module.security.api import UNAUTHORIZED, get_current_user
router = APIRouter(prefix="/log", tags=["log"])
_TAIL_BYTES = 512 * 1024 # 512 KB
@router.get("", response_model=str, dependencies=[Depends(get_current_user)])
async def get_log():
if LOG_PATH.exists():
with open(LOG_PATH, "rb") as f:
return Response(f.read(), media_type="text/plain")
f.seek(0, 2)
size = f.tell()
if size > _TAIL_BYTES:
f.seek(-_TAIL_BYTES, 2)
data = f.read()
# Drop first partial line
idx = data.find(b"\n")
if idx != -1:
data = data[idx + 1 :]
else:
f.seek(0)
data = f.read()
return Response(data, media_type="text/plain")
else:
return Response("Log file not found", status_code=404)

View File

@@ -1,13 +1,20 @@
import atexit
import logging
from logging.handlers import QueueHandler, QueueListener, RotatingFileHandler
from pathlib import Path
from queue import SimpleQueue
from .config import settings
LOG_ROOT = Path("data")
LOG_PATH = LOG_ROOT / "log.txt"
_listener: QueueListener | None = None
def setup_logger(level: int = logging.INFO, reset: bool = False):
global _listener
level = logging.DEBUG if settings.log.debug_enable else level
LOG_ROOT.mkdir(exist_ok=True)
@@ -19,15 +26,26 @@ def setup_logger(level: int = logging.INFO, reset: bool = False):
logging.addLevelName(logging.WARNING, "WARNING:")
LOGGING_FORMAT = "[%(asctime)s] %(levelname)-8s %(message)s"
TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(LOGGING_FORMAT, datefmt=TIME_FORMAT)
file_handler = RotatingFileHandler(
LOG_PATH, maxBytes=5 * 1024 * 1024, backupCount=3, encoding="utf-8"
)
file_handler.setFormatter(formatter)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
log_queue: SimpleQueue = SimpleQueue()
queue_handler = QueueHandler(log_queue)
_listener = QueueListener(log_queue, file_handler, stream_handler, respect_handler_level=True)
_listener.start()
atexit.register(_listener.stop)
logging.basicConfig(
level=level,
format=LOGGING_FORMAT,
datefmt=TIME_FORMAT,
encoding="utf-8",
handlers=[
logging.FileHandler(LOG_PATH, encoding="utf-8"),
logging.StreamHandler(),
],
handlers=[queue_handler],
)
# Suppress verbose HTTP request logs from httpx

View File

@@ -11,10 +11,12 @@ logging_config = {
},
"handlers": {
"file": {
"class": "logging.FileHandler",
"class": "logging.handlers.RotatingFileHandler",
"filename": LOG_PATH,
"formatter": "default",
"encoding": "utf-8",
"maxBytes": 5242880,
"backupCount": 3,
},
"console": {
"class": "logging.StreamHandler",

View File

@@ -116,8 +116,11 @@ class BangumiDatabase:
)
if is_semantic_match:
logger.debug(
f"[Database] Found semantic duplicate: '{data.title_raw}' matches "
f"existing '{candidate.title_raw}' (official: {data.official_title})"
"[Database] Found semantic duplicate: '%s' matches "
"existing '%s' (official: %s)",
data.title_raw,
candidate.title_raw,
data.official_title,
)
return candidate
@@ -177,7 +180,9 @@ class BangumiDatabase:
def add(self, data: Bangumi) -> bool:
if self._is_duplicate(data):
logger.debug(
f"[Database] Skipping duplicate: {data.official_title} ({data.group_name})"
"[Database] Skipping duplicate: %s (%s)",
data.official_title,
data.group_name,
)
return False
@@ -195,7 +200,7 @@ class BangumiDatabase:
self.session.add(data)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Insert {data.official_title} into database.")
logger.debug("[Database] Insert %s into database.", data.official_title)
return True
def add_all(self, datas: list[Bangumi]) -> int:
@@ -250,12 +255,14 @@ class BangumiDatabase:
if not unique_to_add:
if semantic_merged > 0:
logger.debug(
f"[Database] {semantic_merged} bangumi merged as aliases, "
f"rest were duplicates."
"[Database] %s bangumi merged as aliases, "
"rest were duplicates.",
semantic_merged,
)
else:
logger.debug(
f"[Database] All {len(datas)} bangumi already exist, skipping."
"[Database] All %s bangumi already exist, skipping.",
len(datas),
)
return 0
@@ -265,12 +272,16 @@ class BangumiDatabase:
skipped = len(datas) - len(unique_to_add) - semantic_merged
if skipped > 0 or semantic_merged > 0:
logger.debug(
f"[Database] Insert {len(unique_to_add)} bangumi, "
f"skipped {skipped} duplicates, merged {semantic_merged} as aliases."
"[Database] Insert %s bangumi, "
"skipped %s duplicates, merged %s as aliases.",
len(unique_to_add),
skipped,
semantic_merged,
)
else:
logger.debug(
f"[Database] Insert {len(unique_to_add)} bangumi into database."
"[Database] Insert %s bangumi into database.",
len(unique_to_add),
)
return len(unique_to_add)
@@ -289,14 +300,14 @@ class BangumiDatabase:
self.session.add(db_data)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Update {data.official_title}")
logger.debug("[Database] Update %s", data.official_title)
return True
def update_all(self, datas: list[Bangumi]):
self.session.add_all(datas)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Update {len(datas)} bangumi.")
logger.debug("[Database] Update %s bangumi.", len(datas))
def update_rss(self, title_raw: str, rss_set: str):
statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
@@ -308,7 +319,7 @@ class BangumiDatabase:
self.session.add(bangumi)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Update {title_raw} rss_link to {rss_set}.")
logger.debug("[Database] Update %s rss_link to %s.", title_raw, rss_set)
def update_poster(self, title_raw: str, poster_link: str):
statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
@@ -319,7 +330,7 @@ class BangumiDatabase:
self.session.add(bangumi)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Update {title_raw} poster_link to {poster_link}.")
logger.debug("[Database] Update %s poster_link to %s.", title_raw, poster_link)
def delete_one(self, _id: int):
statement = select(Bangumi).where(Bangumi.id == _id)
@@ -329,7 +340,7 @@ class BangumiDatabase:
self.session.delete(bangumi)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Delete bangumi id: {_id}.")
logger.debug("[Database] Delete bangumi id: %s.", _id)
def delete_all(self):
statement = delete(Bangumi)
@@ -362,7 +373,7 @@ class BangumiDatabase:
if bangumi is None:
logger.warning(f"[Database] Cannot find bangumi id: {_id}.")
return None
logger.debug(f"[Database] Find bangumi id: {_id}.")
logger.debug("[Database] Find bangumi id: %s.", _id)
return bangumi
def search_ids(self, ids: list[int]) -> list[Bangumi]:
@@ -426,7 +437,8 @@ class BangumiDatabase:
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(
f"[Database] Batch updated rss_link for {len(rss_updated)} bangumi."
"[Database] Batch updated rss_link for %s bangumi.",
len(rss_updated),
)
return unmatched
@@ -485,7 +497,7 @@ class BangumiDatabase:
self.session.add(bangumi)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Disable rule {bangumi.title_raw}.")
logger.debug("[Database] Disable rule %s.", bangumi.title_raw)
def search_rss(self, rss_link: str) -> list[Bangumi]:
statement = select(Bangumi).where(func.instr(rss_link, Bangumi.rss_link) > 0)
@@ -502,7 +514,7 @@ class BangumiDatabase:
self.session.add(bangumi)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Archived bangumi id: {_id}.")
logger.debug("[Database] Archived bangumi id: %s.", _id)
return True
def unarchive_one(self, _id: int) -> bool:
@@ -515,7 +527,7 @@ class BangumiDatabase:
self.session.add(bangumi)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Unarchived bangumi id: {_id}.")
logger.debug("[Database] Unarchived bangumi id: %s.", _id)
return True
def match_by_save_path(self, save_path: str) -> Optional[Bangumi]:
@@ -615,8 +627,12 @@ class BangumiDatabase:
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(
f"[Database] Marked bangumi id {_id} as needs_review: {reason} "
f"(suggested: season={suggested_season_offset}, episode={suggested_episode_offset})"
"[Database] Marked bangumi id %s as needs_review: %s "
"(suggested: season=%s, episode=%s)",
_id,
reason,
suggested_season_offset,
suggested_episode_offset,
)
return True
@@ -632,5 +648,5 @@ class BangumiDatabase:
self.session.add(bangumi)
self.session.commit()
_invalidate_bangumi_cache()
logger.debug(f"[Database] Cleared needs_review for bangumi id {_id}")
logger.debug("[Database] Cleared needs_review for bangumi id %s", _id)
return True

View File

@@ -16,10 +16,10 @@ class RSSDatabase:
result = self.session.execute(statement)
db_data = result.scalar_one_or_none()
if db_data:
logger.debug(f"RSS Item {data.url} already exists.")
logger.debug("RSS Item %s already exists.", data.url)
return False
else:
logger.debug(f"RSS Item {data.url} not exists, adding...")
logger.debug("RSS Item %s not exists, adding...", data.url)
self.session.add(data)
self.session.commit()
self.session.refresh(data)
@@ -36,7 +36,7 @@ class RSSDatabase:
if new_items:
self.session.add_all(new_items)
self.session.commit()
logger.debug(f"Batch inserted {len(new_items)} RSS items.")
logger.debug("Batch inserted %s RSS items.", len(new_items))
def update(self, _id: int, data: RSSUpdate) -> bool:
statement = select(RSSItem).where(RSSItem.id == _id)

View File

@@ -14,17 +14,17 @@ class TorrentDatabase:
def add(self, data: Torrent):
self.session.add(data)
self.session.commit()
logger.debug(f"Insert {data.name} in database.")
logger.debug("Insert %s in database.", data.name)
def add_all(self, datas: list[Torrent]):
self.session.add_all(datas)
self.session.commit()
logger.debug(f"Insert {len(datas)} torrents in database.")
logger.debug("Insert %s torrents in database.", len(datas))
def update(self, data: Torrent):
self.session.add(data)
self.session.commit()
logger.debug(f"Update {data.name} in database.")
logger.debug("Update %s in database.", data.name)
def update_all(self, datas: list[Torrent]):
self.session.add_all(datas)
@@ -33,7 +33,7 @@ class TorrentDatabase:
def update_one_user(self, data: Torrent):
self.session.add(data)
self.session.commit()
logger.debug(f"Update {data.name} in database.")
logger.debug("Update %s in database.", data.name)
def search(self, _id: int) -> Torrent | None:
result = self.session.execute(select(Torrent).where(Torrent.id == _id))
@@ -82,6 +82,6 @@ class TorrentDatabase:
torrent.qb_hash = qb_hash
self.session.add(torrent)
self.session.commit()
logger.debug(f"Updated qb_hash for torrent {torrent_id}: {qb_hash}")
logger.debug("Updated qb_hash for torrent %s: %s", torrent_id, qb_hash)
return True
return False

View File

@@ -44,7 +44,7 @@ class MockDownloader:
async def prefs_init(self, prefs: dict):
self._prefs.update(prefs)
logger.debug(f"[MockDownloader] prefs_init: {prefs}")
logger.debug("[MockDownloader] prefs_init: %s", prefs)
async def get_app_prefs(self) -> dict:
logger.debug("[MockDownloader] get_app_prefs")
@@ -52,14 +52,14 @@ class MockDownloader:
async def add_category(self, category: str):
self._categories.add(category)
logger.debug(f"[MockDownloader] add_category: {category}")
logger.debug("[MockDownloader] add_category: %s", category)
async def torrents_info(
self, status_filter: str | None, category: str | None, tag: str | None = None
) -> list[dict]:
"""Return list of torrents matching the filter."""
logger.debug(
f"[MockDownloader] torrents_info(filter={status_filter}, category={category}, tag={tag})"
"[MockDownloader] torrents_info(filter=%s, category=%s, tag=%s)", status_filter, category, tag
)
result = []
for hash_, torrent in self._torrents.items():
@@ -72,7 +72,7 @@ class MockDownloader:
async def torrents_files(self, torrent_hash: str) -> list[dict]:
"""Return files for a torrent."""
logger.debug(f"[MockDownloader] torrents_files({torrent_hash})")
logger.debug("[MockDownloader] torrents_files(%s)", torrent_hash)
torrent = self._torrents.get(torrent_hash, {})
return torrent.get("files", [])
@@ -111,19 +111,19 @@ class MockDownloader:
hashes = hash.split("|") if "|" in hash else [hash]
for h in hashes:
self._torrents.pop(h, None)
logger.debug(f"[MockDownloader] torrents_delete({hash}, delete_files={delete_files})")
logger.debug("[MockDownloader] torrents_delete(%s, delete_files=%s)", hash, delete_files)
async def torrents_pause(self, hashes: str):
for h in hashes.split("|"):
if h in self._torrents:
self._torrents[h]["state"] = "paused"
logger.debug(f"[MockDownloader] torrents_pause({hashes})")
logger.debug("[MockDownloader] torrents_pause(%s)", hashes)
async def torrents_resume(self, hashes: str):
for h in hashes.split("|"):
if h in self._torrents:
self._torrents[h]["state"] = "downloading"
logger.debug(f"[MockDownloader] torrents_resume({hashes})")
logger.debug("[MockDownloader] torrents_resume(%s)", hashes)
async def torrents_rename_file(
self, torrent_hash: str, old_path: str, new_path: str
@@ -133,11 +133,11 @@ class MockDownloader:
async def rss_add_feed(self, url: str, item_path: str):
self._feeds[item_path] = {"url": url, "path": item_path}
logger.debug(f"[MockDownloader] rss_add_feed({url}, {item_path})")
logger.debug("[MockDownloader] rss_add_feed(%s, %s)", url, item_path)
async def rss_remove_item(self, item_path: str):
self._feeds.pop(item_path, None)
logger.debug(f"[MockDownloader] rss_remove_item({item_path})")
logger.debug("[MockDownloader] rss_remove_item(%s)", item_path)
async def rss_get_feeds(self) -> dict:
logger.debug("[MockDownloader] rss_get_feeds")
@@ -151,7 +151,7 @@ class MockDownloader:
for h in hashes.split("|"):
if h in self._torrents:
self._torrents[h]["save_path"] = new_location
logger.debug(f"[MockDownloader] move_torrent({hashes}, {new_location})")
logger.debug("[MockDownloader] move_torrent(%s, %s)", hashes, new_location)
async def get_download_rule(self) -> dict:
logger.debug("[MockDownloader] get_download_rule")
@@ -160,24 +160,24 @@ class MockDownloader:
async def get_torrent_path(self, _hash: str) -> str:
torrent = self._torrents.get(_hash, {})
path = torrent.get("save_path", "/tmp/mock-downloads")
logger.debug(f"[MockDownloader] get_torrent_path({_hash}) -> {path}")
logger.debug("[MockDownloader] get_torrent_path(%s) -> %s", _hash, path)
return path
async def set_category(self, _hash: str, category: str):
if _hash in self._torrents:
self._torrents[_hash]["category"] = category
logger.debug(f"[MockDownloader] set_category({_hash}, {category})")
logger.debug("[MockDownloader] set_category(%s, %s)", _hash, category)
async def remove_rule(self, rule_name: str):
self._rules.pop(rule_name, None)
logger.debug(f"[MockDownloader] remove_rule({rule_name})")
logger.debug("[MockDownloader] remove_rule(%s)", rule_name)
async def add_tag(self, _hash: str, tag: str):
if _hash in self._torrents:
tags = self._torrents[_hash].setdefault("tags", [])
if tag not in tags:
tags.append(tag)
logger.debug(f"[MockDownloader] add_tag({_hash}, {tag})")
logger.debug("[MockDownloader] add_tag(%s, %s)", _hash, tag)
async def check_connection(self) -> str:
return "v4.6.0 (mock)"
@@ -209,7 +209,7 @@ class MockDownloader:
"files": files or [{"name": f"{name}.mkv", "size": 1024 * 1024 * 500}],
"tags": [],
}
logger.debug(f"[MockDownloader] Added mock torrent: {name}")
logger.debug("[MockDownloader] Added mock torrent: %s", name)
return hash
def get_state(self) -> dict[str, Any]:

View File

@@ -64,7 +64,7 @@ class QbDownloader:
httpx.RequestError,
httpx.TimeoutException,
) as e:
logger.debug(f"[Downloader] Logout request failed (non-critical): {e}")
logger.debug("[Downloader] Logout request failed (non-critical): %s", e)
await self._client.aclose()
self._client = None
@@ -202,7 +202,7 @@ class QbDownloader:
data={"hash": torrent_hash, "oldPath": old_path, "newPath": new_path},
)
if resp.status_code == 409:
logger.debug(f"Conflict409Error: {old_path} >> {new_path}")
logger.debug("Conflict409Error: %s >> %s", old_path, new_path)
return False
if resp.status_code != 200:
return False
@@ -226,7 +226,7 @@ class QbDownloader:
continue
# Final attempt failed
logger.debug(
f"[Downloader] Rename API returned 200 but file unchanged: {old_path}"
"[Downloader] Rename API returned 200 but file unchanged: %s", old_path
)
return False
# new_path found or old_path not found

View File

@@ -75,7 +75,7 @@ class DownloadClient(TorrentPath):
await self.client.add_category("BangumiCollection")
except Exception as e:
logger.debug(
f"[Downloader] Could not add category (may already exist): {e}"
"[Downloader] Could not add category (may already exist): %s", e
)
if settings.downloader.path == "":
prefs = await self.client.get_app_prefs()
@@ -129,7 +129,7 @@ class DownloadClient(TorrentPath):
if result:
logger.info(f"{old_path} >> {new_path}")
else:
logger.debug(f"[Downloader] Rename failed: {old_path} >> {new_path}")
logger.debug("[Downloader] Rename failed: %s >> %s", old_path, new_path)
return result
async def delete_torrent(self, hashes, delete_files: bool = True):
@@ -149,7 +149,7 @@ class DownloadClient(TorrentPath):
if isinstance(torrent, list):
if len(torrent) == 0:
logger.debug(
f"[Downloader] No torrent found: {bangumi.official_title}"
"[Downloader] No torrent found: %s", bangumi.official_title
)
return False
if "magnet" in torrent[0].url:
@@ -189,11 +189,11 @@ class DownloadClient(TorrentPath):
category="Bangumi",
tags=tags,
):
logger.debug(f"[Downloader] Add torrent: {bangumi.official_title}")
logger.debug("[Downloader] Add torrent: %s", bangumi.official_title)
return True
else:
logger.debug(
f"[Downloader] Torrent added before: {bangumi.official_title}"
"[Downloader] Torrent added before: %s", bangumi.official_title
)
return False
except Exception as e:
@@ -237,4 +237,4 @@ class DownloadClient(TorrentPath):
async def add_tag(self, torrent_hash: str, tag: str):
"""Add a tag to a torrent."""
await self.client.add_tag(torrent_hash, tag)
logger.debug(f"[Downloader] Added tag '{tag}' to torrent {torrent_hash[:8]}...")
logger.debug("[Downloader] Added tag '%s' to torrent %s...", tag, torrent_hash[:8])

View File

@@ -49,7 +49,7 @@ class Renamer(DownloadClient):
logger.info(
f"Finished checking {torrent_count} files' name, renamed {rename_count} files."
)
logger.debug(f"Checked {torrent_count} files")
logger.debug("Checked %s files", torrent_count)
@staticmethod
def gen_path(
@@ -67,12 +67,13 @@ class Renamer(DownloadClient):
# Apply episode offset
original_episode = int(file_info.episode)
adjusted_episode = original_episode + episode_offset
# Episode 0 is valid (specials, OVAs, etc.) - only handle truly negative results
if adjusted_episode < 0:
# Offset would make episode negative - ignore the offset
# Episode 0 is valid for specials/OVAs when the source episode is already 0.
# But an offset producing exactly 0 (e.g., EP12 + offset -12) is almost always
# an off-by-one user error, so revert to original in that case.
if adjusted_episode < 0 or (adjusted_episode == 0 and original_episode > 0):
adjusted_episode = original_episode
logger.warning(
f"[Renamer] Episode offset {episode_offset} would make episode {original_episode} negative, ignoring offset"
f"[Renamer] Episode offset {episode_offset} would make episode {original_episode} non-positive, ignoring offset"
)
episode = f"0{adjusted_episode}" if adjusted_episode < 10 else adjusted_episode
if method == "none" or method == "subtitle_none":
@@ -128,7 +129,7 @@ class Renamer(DownloadClient):
and (time.time() - last_attempt) < _PENDING_RENAME_COOLDOWN
):
logger.debug(
f"[Renamer] Skipping rename (pending cooldown): {media_path}"
"[Renamer] Skipping rename (pending cooldown): %s", media_path
)
return None
@@ -141,8 +142,7 @@ class Renamer(DownloadClient):
# Only apply episode offset
original_ep = int(ep.episode)
adjusted_episode = original_ep + episode_offset
# Episode 0 is valid - only handle truly negative results
if adjusted_episode < 0:
if adjusted_episode < 0 or (adjusted_episode == 0 and original_ep > 0):
adjusted_episode = original_ep
return Notification(
official_title=bangumi_name,
@@ -348,7 +348,7 @@ class Renamer(DownloadClient):
result[torrent_hash] = (0, 0)
except Exception as e:
logger.debug(f"[Renamer] Batch offset lookup failed: {e}")
logger.debug("[Renamer] Batch offset lookup failed: %s", e)
# Fall back to individual lookups on error
for info in torrents_info:
if info["hash"] not in result:
@@ -384,7 +384,7 @@ class Renamer(DownloadClient):
bangumi = db.bangumi.search_id(torrent_record.bangumi_id)
if bangumi and not bangumi.deleted:
logger.debug(
f"[Renamer] Found offsets via qb_hash: ep={bangumi.episode_offset}, season={bangumi.season_offset}"
"[Renamer] Found offsets via qb_hash: ep=%s, season=%s", bangumi.episode_offset, bangumi.season_offset
)
return bangumi.episode_offset, bangumi.season_offset
@@ -394,7 +394,7 @@ class Renamer(DownloadClient):
bangumi = db.bangumi.search_id(bangumi_id)
if bangumi and not bangumi.deleted:
logger.debug(
f"[Renamer] Found offsets via tag ab:{bangumi_id}: ep={bangumi.episode_offset}, season={bangumi.season_offset}"
"[Renamer] Found offsets via tag ab:%s: ep=%s, season=%s", bangumi_id, bangumi.episode_offset, bangumi.season_offset
)
return bangumi.episode_offset, bangumi.season_offset
@@ -425,7 +425,7 @@ class Renamer(DownloadClient):
f"name={torrent_name[:60] if torrent_name else 'N/A'}..."
)
except Exception as e:
logger.debug(f"[Renamer] Could not lookup offsets for {save_path}: {e}")
logger.debug("[Renamer] Could not lookup offsets for %s: %s", save_path, e)
return 0, 0
async def rename(self) -> list[Notification]:

View File

@@ -78,7 +78,7 @@ class RequestURL:
while True:
try:
req = await self._client.get(url=url, headers=headers)
logger.debug(f"[Network] Successfully connected to {url}. Status: {req.status_code}")
logger.debug("[Network] Successfully connected to %s. Status: %s", url, req.status_code)
req.raise_for_status()
return req
except httpx.HTTPStatusError as e:
@@ -130,7 +130,7 @@ class RequestURL:
req.raise_for_status()
return True
except (httpx.RequestError, httpx.HTTPStatusError):
logger.debug(f"[Network] Cannot connect to {url}.")
logger.debug("[Network] Cannot connect to %s.", url)
return False
async def post_form(self, url: str, data: dict, files):

View File

@@ -35,7 +35,7 @@ class NotificationManager:
try:
provider = provider_cls(cfg)
self.providers.append(provider)
logger.debug(f"Loaded notification provider: {cfg.type}")
logger.debug("Loaded notification provider: %s", cfg.type)
except Exception as e:
logger.warning(f"Failed to load provider {cfg.type}: {e}")
else:
@@ -73,8 +73,9 @@ class NotificationManager:
async with provider:
await provider.send(notification)
logger.debug(
f"Sent notification via {provider.__class__.__name__}: "
f"{notification.official_title}"
"Sent notification via %s: %s",
provider.__class__.__name__,
notification.official_title,
)
except Exception as e:
logger.warning(

View File

@@ -45,7 +45,7 @@ class PostNotification:
await asyncio.to_thread(self._get_poster_sync, notify)
try:
await self.notifier.post_msg(notify)
logger.debug(f"Send notification: {notify.official_title}")
logger.debug("Send notification: %s", notify.official_title)
except Exception as e:
logger.warning(f"Failed to send notification: {e}")
return False

View File

@@ -23,5 +23,5 @@ class BarkNotification(RequestContent):
text = self.gen_message(notify)
data = {"title": notify.official_title, "body": text, "icon": notify.poster_path, "device_key": self.token}
resp = await self.post_data(self.notification_url, data)
logger.debug(f"Bark notification: {resp.status_code}")
logger.debug("Bark notification: %s", resp.status_code)
return resp.status_code == 200

View File

@@ -27,5 +27,5 @@ class ServerChanNotification(RequestContent):
"desp": text,
}
resp = await self.post_data(self.notification_url, data)
logger.debug(f"ServerChan notification: {resp.status_code}")
logger.debug("ServerChan notification: %s", resp.status_code)
return resp.status_code == 200

View File

@@ -23,5 +23,5 @@ class SlackNotification(RequestContent):
text = self.gen_message(notify)
data = {"title": notify.official_title, "body": text, "device_key": self.token}
resp = await self.post_data(self.notification_url, data)
logger.debug(f"Bark notification: {resp.status_code}")
logger.debug("Bark notification: %s", resp.status_code)
return resp.status_code == 200

View File

@@ -34,5 +34,5 @@ class TelegramNotification(RequestContent):
resp = await self.post_files(self.photo_url, data, files={"photo": photo})
else:
resp = await self.post_data(self.message_url, data)
logger.debug(f"Telegram notification: {resp.status_code}")
logger.debug("Telegram notification: %s", resp.status_code)
return resp.status_code == 200

View File

@@ -38,5 +38,5 @@ class WecomNotification(RequestContent):
"picurl": picurl,
}
resp = await self.post_data(self.notification_url, data)
logger.debug(f"Wecom notification: {resp.status_code}")
logger.debug("Wecom notification: %s", resp.status_code)
return resp.status_code == 200

View File

@@ -35,7 +35,7 @@ class BarkProvider(NotificationProvider):
}
resp = await self.post_data(self.notification_url, data)
logger.debug(f"Bark notification: {resp.status_code}")
logger.debug("Bark notification: %s", resp.status_code)
return resp.status_code == 200
async def test(self) -> tuple[bool, str]:

View File

@@ -39,7 +39,7 @@ class DiscordProvider(NotificationProvider):
}
resp = await self.post_data(self.webhook_url, data)
logger.debug(f"Discord notification: {resp.status_code}")
logger.debug("Discord notification: %s", resp.status_code)
return resp.status_code in (200, 204)
async def test(self) -> tuple[bool, str]:

View File

@@ -43,7 +43,7 @@ class GotifyProvider(NotificationProvider):
}
resp = await self.post_data(self.notification_url, data)
logger.debug(f"Gotify notification: {resp.status_code}")
logger.debug("Gotify notification: %s", resp.status_code)
return resp.status_code == 200
async def test(self) -> tuple[bool, str]:

View File

@@ -40,7 +40,7 @@ class PushoverProvider(NotificationProvider):
data["url_title"] = "查看海报"
resp = await self.post_data(self.API_URL, data)
logger.debug(f"Pushover notification: {resp.status_code}")
logger.debug("Pushover notification: %s", resp.status_code)
return resp.status_code == 200
async def test(self) -> tuple[bool, str]:

View File

@@ -29,7 +29,7 @@ class ServerChanProvider(NotificationProvider):
}
resp = await self.post_data(self.notification_url, data)
logger.debug(f"ServerChan notification: {resp.status_code}")
logger.debug("ServerChan notification: %s", resp.status_code)
return resp.status_code == 200
async def test(self) -> tuple[bool, str]:

View File

@@ -39,7 +39,7 @@ class TelegramProvider(NotificationProvider):
else:
resp = await self.post_data(self.message_url, data)
logger.debug(f"Telegram notification: {resp.status_code}")
logger.debug("Telegram notification: %s", resp.status_code)
return resp.status_code == 200
async def test(self) -> tuple[bool, str]:

View File

@@ -74,7 +74,7 @@ class WebhookProvider(NotificationProvider):
data = self._render_template(notification)
resp = await self.post_data(self.url, data)
logger.debug(f"Webhook notification: {resp.status_code}")
logger.debug("Webhook notification: %s", resp.status_code)
# Accept any 2xx status code as success
return 200 <= resp.status_code < 300

View File

@@ -46,7 +46,7 @@ class WecomProvider(NotificationProvider):
}
resp = await self.post_data(self.notification_url, data)
logger.debug(f"Wecom notification: {resp.status_code}")
logger.debug("Wecom notification: %s", resp.status_code)
return resp.status_code == 200
async def test(self) -> tuple[bool, str]:

View File

@@ -15,7 +15,6 @@ _mikan_cache: dict[str, tuple[str, str]] = {}
async def mikan_parser(homepage: str):
if homepage in _mikan_cache:
logger.debug(f"[Mikan] Cache hit for {homepage}")
return _mikan_cache[homepage]
root_path = parse_url(homepage).host
async with RequestContent() as req:

View File

@@ -108,7 +108,7 @@ class OpenAIParser:
except json.JSONDecodeError:
logger.warning(f"Cannot parse result {result} as python dict.")
logger.debug(f"the parsed result is: {result}")
logger.debug("the parsed result is: %s", result)
return result

View File

@@ -120,8 +120,9 @@ def detect_virtual_seasons(episodes: list[dict], gap_months: int = 6) -> list[in
if days_diff > gap_days:
virtual_season_starts.append(curr_ep["episode_number"])
logger.debug(
f"[TMDB] Detected virtual season break: {days_diff} days gap "
f"between ep{prev_ep['episode_number']} and ep{curr_ep['episode_number']}"
"[TMDB] Detected virtual season break: %s days gap "
"between ep%s and ep%s",
days_diff, prev_ep['episode_number'], curr_ep['episode_number']
)
return virtual_season_starts
@@ -161,7 +162,7 @@ async def get_aired_episode_count(tv_id: int, season_number: int, language: str,
# Invalid date format, skip this episode
continue
logger.debug(f"[TMDB] Season {season_number}: {aired_count} aired of {len(episodes)} total episodes")
logger.debug("[TMDB] Season %s: %s aired of %s total episodes", season_number, aired_count, len(episodes))
return aired_count
@@ -183,7 +184,6 @@ def get_season(seasons: list) -> tuple[int, str]:
async def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
cache_key = f"{title}:{language}"
if cache_key in _tmdb_cache:
logger.debug(f"[TMDB] Cache hit for {title}")
return _tmdb_cache[cache_key]
async with RequestContent() as req:
@@ -232,7 +232,7 @@ async def tmdb_parser(title, language, test: bool = False) -> TMDBInfo | None:
vs_starts = detect_virtual_seasons(episodes)
if len(vs_starts) > 1:
virtual_season_starts[season_num] = vs_starts
logger.debug(f"[TMDB] Season {season_num} has virtual seasons starting at episodes: {vs_starts}")
logger.debug("[TMDB] Season %s has virtual seasons starting at episodes: %s", season_num, vs_starts)
# Count only aired episodes
season_episode_counts[season_num] = len(episodes)
else:

View File

@@ -34,7 +34,7 @@ class TitleParser:
async def tmdb_parser(title: str, season: int, language: str):
tmdb_info = await tmdb_parser(title, language)
if tmdb_info:
logger.debug(f"TMDB Matched, official title is {tmdb_info.title}")
logger.debug("TMDB Matched, official title is %s", tmdb_info.title)
tmdb_season = tmdb_info.last_season if tmdb_info.last_season else season
return tmdb_info.title, tmdb_season, tmdb_info.year, tmdb_info.poster_link
else:
@@ -48,7 +48,7 @@ class TitleParser:
bangumi.official_title, settings.rss_parser.language
)
if tmdb_info:
logger.debug(f"TMDB Matched, official title is {tmdb_info.title}")
logger.debug("TMDB Matched, official title is %s", tmdb_info.title)
bangumi.poster_link = tmdb_info.poster_link
else:
logger.warning(
@@ -86,7 +86,7 @@ class TitleParser:
else:
official_title = title_raw
_season = episode.season
logger.debug(f"RAW:{raw} >> {title_raw}")
logger.debug("RAW:%s >> %s", raw, title_raw)
return Bangumi(
official_title=official_title,
title_raw=title_raw,

View File

@@ -137,7 +137,7 @@ class RSSEngine(Database):
rss_item = self.rss.search_id(rss_id)
rss_items = [rss_item] if rss_item else []
# From RSS Items, fetch all torrents concurrently
logger.debug(f"[Engine] Get {len(rss_items)} RSS items")
logger.debug("[Engine] Get %s RSS items", len(rss_items))
results = await asyncio.gather(
*[self._pull_rss_with_status(rss_item) for rss_item in rss_items]
)
@@ -153,7 +153,7 @@ class RSSEngine(Database):
matched_data = self.match_torrent(torrent)
if matched_data:
if await client.add_torrent(torrent, matched_data):
logger.debug(f"[Engine] Add torrent {torrent.name} to client")
logger.debug("[Engine] Add torrent %s to client", torrent.name)
torrent.downloaded = True
# Add all torrents to database
self.torrent.add_all(new_torrents)

View File

@@ -41,7 +41,7 @@ class SearchTorrent(RequestContent, RSSAnalyser):
_poster_cache[title] = tmdb_info.poster_link
return tmdb_info.poster_link
except Exception as e:
logger.debug(f"[Searcher] Failed to fetch TMDB poster for {title}: {e}")
logger.debug("[Searcher] Failed to fetch TMDB poster for %s: %s", title, e)
_poster_cache[title] = None
return None

View File

@@ -93,7 +93,7 @@ class WebAuthnService:
# 存储 challenge 用于后续验证
challenge_key = f"reg_{username}"
self._challenges[challenge_key] = options.challenge
logger.debug(f"Generated registration challenge for {username}")
logger.debug("Generated registration challenge for %s", username)
return json.loads(options_to_json(options))
@@ -187,7 +187,7 @@ class WebAuthnService:
# 存储 challenge
challenge_key = f"auth_{username}"
self._challenges[challenge_key] = options.challenge
logger.debug(f"Generated authentication challenge for {username}")
logger.debug("Generated authentication challenge for %s", username)
return json.loads(options_to_json(options))

View File

@@ -569,14 +569,30 @@ class TestGenPathWithOffsets:
result = Renamer.gen_path(ep, "Bangumi", method="pn", episode_offset=-12)
assert "E03" in result # 15 - 12 = 3
def test_episode_offset_negative_below_one_ignored(self):
"""Negative offset that would go below 1 is ignored."""
def test_episode_offset_negative_below_zero_ignored(self):
"""Negative offset that would go below 0 is ignored."""
ep = EpisodeFile(
media_path="old.mkv", title="My Anime", season=1, episode=5, suffix=".mkv"
)
result = Renamer.gen_path(ep, "Bangumi", method="pn", episode_offset=-10)
assert "E05" in result # Would be -5, so offset ignored
def test_episode_offset_producing_zero_ignored(self):
"""Offset that would make a positive episode become 0 is ignored (off-by-one guard)."""
ep = EpisodeFile(
media_path="old.mkv", title="My Anime", season=1, episode=12, suffix=".mkv"
)
result = Renamer.gen_path(ep, "Bangumi", method="pn", episode_offset=-12)
assert "E12" in result # Would be 0, so offset ignored
def test_episode_zero_preserved_without_offset(self):
"""Episode 0 (specials/OVAs) is preserved when no offset is applied."""
ep = EpisodeFile(
media_path="old.mkv", title="My Anime", season=1, episode=0, suffix=".mkv"
)
result = Renamer.gen_path(ep, "Bangumi", method="pn", episode_offset=0)
assert "E00" in result # Episode 0 is valid for specials
def test_season_offset_positive(self):
"""Season offset is now applied to folder path, not filename.

2
backend/uv.lock generated
View File

@@ -52,7 +52,7 @@ wheels = [
[[package]]
name = "auto-bangumi"
version = "3.2.0b13"
version = "3.2.3b3"
source = { virtual = "." }
dependencies = [
{ name = "aiosqlite" },