mirror of
https://github.com/EstrellaXD/Auto_Bangumi.git
synced 2026-04-13 18:11:03 +08:00
test: add comprehensive test suite for core business logic
Cover RSS engine, downloader, renamer, auth, notifications, search, config, API endpoints, and end-to-end integration flows. When all 210 tests pass, the program's key behavioral contracts are verified. Generated with [Claude Code](https://claude.ai/code) via [Happy](https://happy.engineering) Co-Authored-By: Claude <noreply@anthropic.com> Co-Authored-By: Happy <yesreply@happy.engineering>
This commit is contained in:
17
CHANGELOG.md
17
CHANGELOG.md
@@ -14,6 +14,23 @@
|
||||
- 数据库迁移机制重构:使用 `schema_version` 表替代仅依赖应用版本号的迁移策略
|
||||
- 启动时始终检查并执行未完成的迁移,防止迁移中断后无法恢复
|
||||
|
||||
### Tests
|
||||
|
||||
- 新增全面的测试套件,覆盖核心业务逻辑:
|
||||
- RSS 引擎测试:pull_rss、match_torrent、refresh_rss、add_rss 全流程
|
||||
- 下载客户端测试:init_downloader、set_rule、add_torrent(磁力/文件)、rename
|
||||
- 路径工具测试:save_path 生成、文件分类、is_ep 深度检查
|
||||
- 重命名器测试:gen_path 命名方法(pn/advance/none/subtitle)、单文件/集合重命名
|
||||
- 认证测试:JWT 创建/解码/验证、密码哈希、get_current_user
|
||||
- 通知测试:getClient 工厂、send_msg 成功/失败、poster 查询
|
||||
- 搜索测试:URL 构建、关键词清洗、special_url
|
||||
- 配置测试:默认值、序列化、迁移、环境变量覆盖
|
||||
- Bangumi API 测试:CRUD 端点 + 认证要求
|
||||
- RSS API 测试:CRUD/批量端点 + 刷新
|
||||
- 集成测试:RSS→下载全流程、重命名全流程、数据库一致性
|
||||
- 新增 `pytest-mock` 开发依赖
|
||||
- 新增共享测试 fixtures(`conftest.py`)和数据工厂(`factories.py`)
|
||||
|
||||
---
|
||||
|
||||
# [3.1] - 2023-08
|
||||
|
||||
@@ -30,6 +30,7 @@ dependencies = [
|
||||
dev = [
|
||||
"pytest>=8.0.0",
|
||||
"pytest-asyncio>=0.23.0",
|
||||
"pytest-mock>=3.12.0",
|
||||
"ruff>=0.1.0",
|
||||
"black>=24.0.0",
|
||||
"pre-commit>=3.0.0",
|
||||
|
||||
82
backend/src/test/conftest.py
Normal file
82
backend/src/test/conftest.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""Shared test fixtures for AutoBangumi test suite."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from sqlmodel import Session, SQLModel, create_engine
|
||||
|
||||
from module.models.config import Config
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Database Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db_engine():
|
||||
"""Create an in-memory SQLite engine for testing."""
|
||||
engine = create_engine("sqlite://", echo=False)
|
||||
SQLModel.metadata.create_all(engine)
|
||||
yield engine
|
||||
SQLModel.metadata.drop_all(engine)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db_session(db_engine):
|
||||
"""Provide a fresh database session per test."""
|
||||
with Session(db_engine) as session:
|
||||
yield session
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Settings Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_settings():
|
||||
"""Provide a Config object with predictable test defaults."""
|
||||
return Config()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_settings(test_settings):
|
||||
"""Patch module.conf.settings globally with test defaults."""
|
||||
with patch("module.conf.settings", test_settings):
|
||||
with patch("module.conf.config.settings", test_settings):
|
||||
yield test_settings
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Download Client Mock
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_qb_client():
|
||||
"""Mock QbDownloader that simulates qBittorrent API responses."""
|
||||
client = AsyncMock()
|
||||
client.auth.return_value = True
|
||||
client.logout.return_value = None
|
||||
client.check_host.return_value = True
|
||||
client.torrents_info.return_value = []
|
||||
client.torrents_files.return_value = []
|
||||
client.torrents_rename_file.return_value = True
|
||||
client.add_torrents.return_value = True
|
||||
client.torrents_delete.return_value = None
|
||||
client.torrents_pause.return_value = None
|
||||
client.torrents_resume.return_value = None
|
||||
client.rss_set_rule.return_value = None
|
||||
client.prefs_init.return_value = None
|
||||
client.add_category.return_value = None
|
||||
client.get_app_prefs.return_value = {"save_path": "/downloads"}
|
||||
client.move_torrent.return_value = None
|
||||
client.rss_add_feed.return_value = None
|
||||
client.rss_remove_item.return_value = None
|
||||
client.rss_get_feeds.return_value = {}
|
||||
client.get_download_rule.return_value = {}
|
||||
client.get_torrent_path.return_value = "/downloads/Bangumi"
|
||||
client.set_category.return_value = None
|
||||
client.remove_rule.return_value = None
|
||||
return client
|
||||
54
backend/src/test/factories.py
Normal file
54
backend/src/test/factories.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Test data factories for creating model instances with sensible defaults."""
|
||||
|
||||
from module.models import Bangumi, RSSItem, Torrent
|
||||
|
||||
|
||||
def make_bangumi(**overrides) -> Bangumi:
|
||||
"""Create a Bangumi instance with sensible test defaults."""
|
||||
defaults = dict(
|
||||
official_title="Test Anime",
|
||||
year="2024",
|
||||
title_raw="Test Anime Raw",
|
||||
season=1,
|
||||
season_raw="",
|
||||
group_name="TestGroup",
|
||||
dpi="1080p",
|
||||
source="Web",
|
||||
subtitle="CHT",
|
||||
eps_collect=False,
|
||||
offset=0,
|
||||
filter="720",
|
||||
rss_link="https://mikanani.me/RSS/test",
|
||||
poster_link="/test/poster.jpg",
|
||||
added=True,
|
||||
rule_name="[TestGroup] Test Anime S1",
|
||||
save_path="/downloads/Bangumi/Test Anime (2024)/Season 1",
|
||||
deleted=False,
|
||||
)
|
||||
defaults.update(overrides)
|
||||
return Bangumi(**defaults)
|
||||
|
||||
|
||||
def make_torrent(**overrides) -> Torrent:
|
||||
"""Create a Torrent instance with sensible test defaults."""
|
||||
defaults = dict(
|
||||
name="[TestGroup] Test Anime Raw - 01 [1080p].mkv",
|
||||
url="https://example.com/test.torrent",
|
||||
homepage="https://mikanani.me/Home/Episode/test",
|
||||
downloaded=False,
|
||||
)
|
||||
defaults.update(overrides)
|
||||
return Torrent(**defaults)
|
||||
|
||||
|
||||
def make_rss_item(**overrides) -> RSSItem:
|
||||
"""Create an RSSItem instance with sensible test defaults."""
|
||||
defaults = dict(
|
||||
name="Test RSS Feed",
|
||||
url="https://mikanani.me/RSS/MyBangumi?token=test",
|
||||
aggregate=True,
|
||||
parser="mikan",
|
||||
enabled=True,
|
||||
)
|
||||
defaults.update(overrides)
|
||||
return RSSItem(**defaults)
|
||||
220
backend/src/test/test_api_bangumi.py
Normal file
220
backend/src/test/test_api_bangumi.py
Normal file
@@ -0,0 +1,220 @@
|
||||
"""Tests for Bangumi API endpoints."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock, AsyncMock
|
||||
from datetime import timedelta
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from module.api import v1
|
||||
from module.models import Bangumi, BangumiUpdate, ResponseModel
|
||||
from module.security.api import get_current_user, active_user
|
||||
from module.security.jwt import create_access_token
|
||||
|
||||
from test.factories import make_bangumi
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
"""Create a FastAPI app with v1 routes for testing."""
|
||||
app = FastAPI()
|
||||
app.include_router(v1, prefix="/api")
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authed_client(app):
|
||||
"""TestClient with auth dependency overridden."""
|
||||
async def mock_user():
|
||||
return "testuser"
|
||||
|
||||
app.dependency_overrides[get_current_user] = mock_user
|
||||
client = TestClient(app)
|
||||
yield client
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def unauthed_client(app):
|
||||
"""TestClient without auth (no override)."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Auth requirement
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestAuthRequired:
|
||||
def test_get_all_unauthorized(self, unauthed_client):
|
||||
"""GET /bangumi/get/all without auth returns 401."""
|
||||
response = unauthed_client.get("/api/v1/bangumi/get/all")
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_get_by_id_unauthorized(self, unauthed_client):
|
||||
"""GET /bangumi/get/1 without auth returns 401."""
|
||||
response = unauthed_client.get("/api/v1/bangumi/get/1")
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_delete_unauthorized(self, unauthed_client):
|
||||
"""DELETE /bangumi/delete/1 without auth returns 401."""
|
||||
response = unauthed_client.delete("/api/v1/bangumi/delete/1")
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# GET endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGetBangumi:
|
||||
def test_get_all(self, authed_client):
|
||||
"""GET /bangumi/get/all returns list of Bangumi."""
|
||||
mock_bangumi = [make_bangumi(id=1), make_bangumi(id=2, title_raw="Other")]
|
||||
with patch("module.api.bangumi.TorrentManager") as MockManager:
|
||||
mock_mgr = MagicMock()
|
||||
mock_mgr.bangumi.search_all.return_value = mock_bangumi
|
||||
MockManager.return_value.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
MockManager.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.get("/api/v1/bangumi/get/all")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2
|
||||
|
||||
def test_get_by_id(self, authed_client):
|
||||
"""GET /bangumi/get/{id} returns single Bangumi."""
|
||||
bangumi = make_bangumi(id=1, official_title="Found Anime")
|
||||
with patch("module.api.bangumi.TorrentManager") as MockManager:
|
||||
mock_mgr = MagicMock()
|
||||
mock_mgr.search_one.return_value = bangumi
|
||||
MockManager.return_value.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
MockManager.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.get("/api/v1/bangumi/get/1")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PATCH/UPDATE endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestUpdateBangumi:
|
||||
def test_update_success(self, authed_client):
|
||||
"""PATCH /bangumi/update/{id} updates and returns success."""
|
||||
resp_model = ResponseModel(
|
||||
status=True, status_code=200, msg_en="Updated.", msg_zh="已更新。"
|
||||
)
|
||||
with patch("module.api.bangumi.TorrentManager") as MockManager:
|
||||
mock_mgr = MagicMock()
|
||||
mock_mgr.update_rule = AsyncMock(return_value=resp_model)
|
||||
MockManager.return_value.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
MockManager.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
# BangumiUpdate requires all fields
|
||||
update_data = {
|
||||
"official_title": "New Title",
|
||||
"title_raw": "new_raw",
|
||||
"season": 1,
|
||||
"year": "2024",
|
||||
"season_raw": "",
|
||||
"group_name": "Group",
|
||||
"dpi": "1080p",
|
||||
"source": "Web",
|
||||
"subtitle": "CHT",
|
||||
"eps_collect": False,
|
||||
"offset": 0,
|
||||
"filter": "720",
|
||||
"rss_link": "https://test.com/rss",
|
||||
"poster_link": None,
|
||||
"added": True,
|
||||
"rule_name": None,
|
||||
"save_path": None,
|
||||
"deleted": False,
|
||||
}
|
||||
response = authed_client.patch(
|
||||
"/api/v1/bangumi/update/1",
|
||||
json=update_data,
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# DELETE endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDeleteBangumi:
|
||||
def test_delete_success(self, authed_client):
|
||||
"""DELETE /bangumi/delete/{id} removes bangumi."""
|
||||
resp_model = ResponseModel(
|
||||
status=True, status_code=200, msg_en="Deleted.", msg_zh="已删除。"
|
||||
)
|
||||
with patch("module.api.bangumi.TorrentManager") as MockManager:
|
||||
mock_mgr = MagicMock()
|
||||
mock_mgr.delete_rule = AsyncMock(return_value=resp_model)
|
||||
MockManager.return_value.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
MockManager.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.delete("/api/v1/bangumi/delete/1")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_disable_rule(self, authed_client):
|
||||
"""DELETE /bangumi/disable/{id} marks as deleted."""
|
||||
resp_model = ResponseModel(
|
||||
status=True, status_code=200, msg_en="Disabled.", msg_zh="已禁用。"
|
||||
)
|
||||
with patch("module.api.bangumi.TorrentManager") as MockManager:
|
||||
mock_mgr = MagicMock()
|
||||
mock_mgr.disable_rule = AsyncMock(return_value=resp_model)
|
||||
MockManager.return_value.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
MockManager.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.delete("/api/v1/bangumi/disable/1")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_enable_rule(self, authed_client):
|
||||
"""GET /bangumi/enable/{id} re-enables rule."""
|
||||
resp_model = ResponseModel(
|
||||
status=True, status_code=200, msg_en="Enabled.", msg_zh="已启用。"
|
||||
)
|
||||
with patch("module.api.bangumi.TorrentManager") as MockManager:
|
||||
mock_mgr = MagicMock()
|
||||
mock_mgr.enable_rule.return_value = resp_model
|
||||
MockManager.return_value.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
MockManager.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.get("/api/v1/bangumi/enable/1")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Reset
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestResetBangumi:
|
||||
def test_reset_all(self, authed_client):
|
||||
"""GET /bangumi/reset/all deletes all bangumi."""
|
||||
with patch("module.api.bangumi.TorrentManager") as MockManager:
|
||||
mock_mgr = MagicMock()
|
||||
mock_mgr.bangumi.delete_all.return_value = None
|
||||
MockManager.return_value.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
MockManager.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.get("/api/v1/bangumi/reset/all")
|
||||
|
||||
assert response.status_code == 200
|
||||
312
backend/src/test/test_api_rss.py
Normal file
312
backend/src/test/test_api_rss.py
Normal file
@@ -0,0 +1,312 @@
|
||||
"""Tests for RSS API endpoints."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock, AsyncMock
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from module.api import v1
|
||||
from module.models import RSSItem, RSSUpdate, ResponseModel, Torrent
|
||||
from module.security.api import get_current_user
|
||||
|
||||
from test.factories import make_rss_item, make_torrent
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app():
|
||||
app = FastAPI()
|
||||
app.include_router(v1, prefix="/api")
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authed_client(app):
|
||||
async def mock_user():
|
||||
return "testuser"
|
||||
|
||||
app.dependency_overrides[get_current_user] = mock_user
|
||||
client = TestClient(app)
|
||||
yield client
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def unauthed_client(app):
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Auth requirement
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestAuthRequired:
|
||||
def test_get_rss_unauthorized(self, unauthed_client):
|
||||
"""GET /rss without auth returns 401."""
|
||||
response = unauthed_client.get("/api/v1/rss")
|
||||
assert response.status_code == 401
|
||||
|
||||
def test_add_rss_unauthorized(self, unauthed_client):
|
||||
"""POST /rss/add without auth returns 401."""
|
||||
response = unauthed_client.post(
|
||||
"/api/v1/rss/add", json={"url": "https://test.com"}
|
||||
)
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# GET /rss
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGetRss:
|
||||
def test_get_all(self, authed_client):
|
||||
"""GET /rss returns list of RSSItems."""
|
||||
items = [
|
||||
make_rss_item(id=1, name="Feed 1"),
|
||||
make_rss_item(id=2, name="Feed 2"),
|
||||
]
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.rss.search_all.return_value = items
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.get("/api/v1/rss")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# POST /rss/add
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestAddRss:
|
||||
def test_add_success(self, authed_client):
|
||||
"""POST /rss/add creates a new RSS feed."""
|
||||
resp_model = ResponseModel(
|
||||
status=True, status_code=200, msg_en="Added.", msg_zh="添加成功。"
|
||||
)
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.add_rss = AsyncMock(return_value=resp_model)
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.post(
|
||||
"/api/v1/rss/add",
|
||||
json={
|
||||
"url": "https://mikanani.me/RSS/test",
|
||||
"name": "Test Feed",
|
||||
"aggregate": True,
|
||||
"parser": "mikan",
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# DELETE /rss/delete/{id}
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDeleteRss:
|
||||
def test_delete_success(self, authed_client):
|
||||
"""DELETE /rss/delete/{id} removes the feed."""
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.rss.delete.return_value = True
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.delete("/api/v1/rss/delete/1")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_delete_failure(self, authed_client):
|
||||
"""DELETE /rss/delete/{id} returns 406 when feed not found."""
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.rss.delete.return_value = False
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.delete("/api/v1/rss/delete/999")
|
||||
|
||||
assert response.status_code == 406
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PATCH /rss/disable/{id}
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDisableRss:
|
||||
def test_disable_success(self, authed_client):
|
||||
"""PATCH /rss/disable/{id} disables the feed."""
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.rss.disable.return_value = True
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.patch("/api/v1/rss/disable/1")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_disable_failure(self, authed_client):
|
||||
"""PATCH /rss/disable/{id} returns 406 when feed not found."""
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.rss.disable.return_value = False
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.patch("/api/v1/rss/disable/999")
|
||||
|
||||
assert response.status_code == 406
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# POST /rss/enable/many, /rss/disable/many, /rss/delete/many
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestBatchOperations:
|
||||
def test_enable_many(self, authed_client):
|
||||
"""POST /rss/enable/many enables multiple feeds."""
|
||||
resp_model = ResponseModel(
|
||||
status=True, status_code=200, msg_en="Enabled.", msg_zh="启用成功。"
|
||||
)
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.enable_list.return_value = resp_model
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.post("/api/v1/rss/enable/many", json=[1, 2, 3])
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_disable_many(self, authed_client):
|
||||
"""POST /rss/disable/many disables multiple feeds."""
|
||||
resp_model = ResponseModel(
|
||||
status=True, status_code=200, msg_en="Disabled.", msg_zh="禁用成功。"
|
||||
)
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.disable_list.return_value = resp_model
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.post("/api/v1/rss/disable/many", json=[1, 2])
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_delete_many(self, authed_client):
|
||||
"""POST /rss/delete/many deletes multiple feeds."""
|
||||
resp_model = ResponseModel(
|
||||
status=True, status_code=200, msg_en="Deleted.", msg_zh="删除成功。"
|
||||
)
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.delete_list.return_value = resp_model
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.post("/api/v1/rss/delete/many", json=[1, 2])
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PATCH /rss/update/{id}
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestUpdateRss:
|
||||
def test_update_success(self, authed_client):
|
||||
"""PATCH /rss/update/{id} updates feed."""
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.rss.update.return_value = True
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.patch(
|
||||
"/api/v1/rss/update/1",
|
||||
json={"name": "Updated Name", "aggregate": False},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# GET /rss/refresh/*
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRefreshRss:
|
||||
def test_refresh_all(self, authed_client):
|
||||
"""GET /rss/refresh/all triggers engine.refresh_rss."""
|
||||
with patch("module.api.rss.DownloadClient") as MockClient:
|
||||
mock_client = AsyncMock()
|
||||
MockClient.return_value.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
MockClient.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.refresh_rss = AsyncMock()
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.get("/api/v1/rss/refresh/all")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_refresh_single(self, authed_client):
|
||||
"""GET /rss/refresh/{id} refreshes specific feed."""
|
||||
with patch("module.api.rss.DownloadClient") as MockClient:
|
||||
mock_client = AsyncMock()
|
||||
MockClient.return_value.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
MockClient.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.refresh_rss = AsyncMock()
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.get("/api/v1/rss/refresh/1")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# GET /rss/torrent/{id}
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGetRssTorrents:
|
||||
def test_get_torrents(self, authed_client):
|
||||
"""GET /rss/torrent/{id} returns torrents for that feed."""
|
||||
torrents = [make_torrent(id=1, rss_id=1), make_torrent(id=2, rss_id=1)]
|
||||
with patch("module.api.rss.RSSEngine") as MockEngine:
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.get_rss_torrents.return_value = torrents
|
||||
MockEngine.return_value.__enter__ = MagicMock(return_value=mock_eng)
|
||||
MockEngine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
response = authed_client.get("/api/v1/rss/torrent/1")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2
|
||||
201
backend/src/test/test_auth.py
Normal file
201
backend/src/test/test_auth.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""Tests for authentication: JWT tokens, password hashing, login flow."""
|
||||
|
||||
import pytest
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from jose import JWTError
|
||||
|
||||
from module.security.jwt import (
|
||||
create_access_token,
|
||||
decode_token,
|
||||
verify_token,
|
||||
verify_password,
|
||||
get_password_hash,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# JWT Token Creation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestCreateAccessToken:
|
||||
def test_creates_valid_token(self):
|
||||
"""create_access_token returns a decodable JWT with sub claim."""
|
||||
token = create_access_token(data={"sub": "testuser"})
|
||||
assert token is not None
|
||||
assert isinstance(token, str)
|
||||
assert len(token) > 0
|
||||
|
||||
def test_token_contains_sub_claim(self):
|
||||
"""Decoded token contains the 'sub' field."""
|
||||
token = create_access_token(data={"sub": "myuser"})
|
||||
payload = decode_token(token)
|
||||
assert payload is not None
|
||||
assert payload["sub"] == "myuser"
|
||||
|
||||
def test_token_contains_exp_claim(self):
|
||||
"""Decoded token contains 'exp' expiration field."""
|
||||
token = create_access_token(data={"sub": "user"})
|
||||
payload = decode_token(token)
|
||||
assert "exp" in payload
|
||||
|
||||
def test_custom_expiry(self):
|
||||
"""Custom expires_delta is respected."""
|
||||
token = create_access_token(
|
||||
data={"sub": "user"}, expires_delta=timedelta(hours=2)
|
||||
)
|
||||
payload = decode_token(token)
|
||||
assert payload is not None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Token Decoding
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDecodeToken:
|
||||
def test_valid_token(self):
|
||||
"""decode_token returns payload for valid token."""
|
||||
token = create_access_token(data={"sub": "testuser"})
|
||||
result = decode_token(token)
|
||||
assert result is not None
|
||||
assert result["sub"] == "testuser"
|
||||
|
||||
def test_invalid_token(self):
|
||||
"""decode_token returns None for invalid/garbage token."""
|
||||
result = decode_token("not.a.valid.jwt.token")
|
||||
assert result is None
|
||||
|
||||
def test_empty_token(self):
|
||||
"""decode_token returns None for empty string."""
|
||||
result = decode_token("")
|
||||
assert result is None
|
||||
|
||||
def test_missing_sub_claim(self):
|
||||
"""decode_token returns None when 'sub' claim is missing."""
|
||||
token = create_access_token(data={"other": "data"})
|
||||
result = decode_token(token)
|
||||
# sub is None so decode_token returns None
|
||||
assert result is None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Token Verification
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestVerifyToken:
|
||||
def test_valid_fresh_token(self):
|
||||
"""verify_token succeeds for a fresh token."""
|
||||
token = create_access_token(
|
||||
data={"sub": "user"}, expires_delta=timedelta(hours=1)
|
||||
)
|
||||
result = verify_token(token)
|
||||
assert result is not None
|
||||
assert result["sub"] == "user"
|
||||
|
||||
def test_expired_token_returns_none(self):
|
||||
"""verify_token returns None for expired token (caught by decode_token)."""
|
||||
token = create_access_token(
|
||||
data={"sub": "user"}, expires_delta=timedelta(seconds=-10)
|
||||
)
|
||||
# python-jose catches expired tokens during decode, so decode_token
|
||||
# returns None, and verify_token propagates that as None
|
||||
result = verify_token(token)
|
||||
assert result is None
|
||||
|
||||
def test_invalid_token_returns_none(self):
|
||||
"""verify_token returns None for invalid token (decode fails)."""
|
||||
result = verify_token("garbage.token.string")
|
||||
assert result is None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Password Hashing
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestPasswordHashing:
|
||||
def test_hash_and_verify_roundtrip(self):
|
||||
"""get_password_hash then verify_password returns True."""
|
||||
password = "my_secure_password"
|
||||
hashed = get_password_hash(password)
|
||||
assert verify_password(password, hashed) is True
|
||||
|
||||
def test_wrong_password(self):
|
||||
"""verify_password with wrong password returns False."""
|
||||
hashed = get_password_hash("correct_password")
|
||||
assert verify_password("wrong_password", hashed) is False
|
||||
|
||||
def test_hash_is_not_plaintext(self):
|
||||
"""Hash is not equal to the plaintext password."""
|
||||
password = "my_password"
|
||||
hashed = get_password_hash(password)
|
||||
assert hashed != password
|
||||
|
||||
def test_different_hashes_for_same_password(self):
|
||||
"""Bcrypt produces different hashes for the same password (salt)."""
|
||||
password = "same_password"
|
||||
hash1 = get_password_hash(password)
|
||||
hash2 = get_password_hash(password)
|
||||
assert hash1 != hash2
|
||||
# Both still verify correctly
|
||||
assert verify_password(password, hash1) is True
|
||||
assert verify_password(password, hash2) is True
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# API Auth Flow (get_current_user)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGetCurrentUser:
|
||||
async def test_no_cookie_raises_401(self):
|
||||
"""get_current_user raises 401 when no token cookie."""
|
||||
from module.security.api import get_current_user
|
||||
from fastapi import HTTPException
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await get_current_user(token=None)
|
||||
assert exc_info.value.status_code == 401
|
||||
|
||||
async def test_invalid_token_raises_401(self):
|
||||
"""get_current_user raises 401 for invalid token."""
|
||||
from module.security.api import get_current_user
|
||||
from fastapi import HTTPException
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await get_current_user(token="invalid.jwt.token")
|
||||
assert exc_info.value.status_code == 401
|
||||
|
||||
async def test_valid_token_user_not_active(self):
|
||||
"""get_current_user raises 401 when user not in active_user list."""
|
||||
from module.security.api import get_current_user, active_user
|
||||
from fastapi import HTTPException
|
||||
|
||||
token = create_access_token(
|
||||
data={"sub": "ghost_user"}, expires_delta=timedelta(hours=1)
|
||||
)
|
||||
active_user.clear()
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await get_current_user(token=token)
|
||||
assert exc_info.value.status_code == 401
|
||||
|
||||
async def test_valid_token_active_user_succeeds(self):
|
||||
"""get_current_user returns username for valid token + active user."""
|
||||
from module.security.api import get_current_user, active_user
|
||||
|
||||
token = create_access_token(
|
||||
data={"sub": "active_user"}, expires_delta=timedelta(hours=1)
|
||||
)
|
||||
active_user.clear()
|
||||
active_user.append("active_user")
|
||||
|
||||
result = await get_current_user(token=token)
|
||||
assert result == "active_user"
|
||||
|
||||
# Cleanup
|
||||
active_user.clear()
|
||||
230
backend/src/test/test_config.py
Normal file
230
backend/src/test/test_config.py
Normal file
@@ -0,0 +1,230 @@
|
||||
"""Tests for configuration: loading, env overrides, defaults, migration."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from module.models.config import (
|
||||
Config,
|
||||
Program,
|
||||
Downloader,
|
||||
RSSParser,
|
||||
BangumiManage,
|
||||
Proxy,
|
||||
Notification as NotificationConfig,
|
||||
)
|
||||
from module.conf.config import Settings
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Config model defaults
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestConfigDefaults:
|
||||
def test_program_defaults(self):
|
||||
"""Program has correct default values."""
|
||||
config = Config()
|
||||
assert config.program.rss_time == 900
|
||||
assert config.program.rename_time == 60
|
||||
assert config.program.webui_port == 7892
|
||||
|
||||
def test_downloader_defaults(self):
|
||||
"""Downloader has correct default values."""
|
||||
config = Config()
|
||||
assert config.downloader.type == "qbittorrent"
|
||||
assert config.downloader.path == "/downloads/Bangumi"
|
||||
assert config.downloader.ssl is False
|
||||
|
||||
def test_rss_parser_defaults(self):
|
||||
"""RSSParser has correct default values."""
|
||||
config = Config()
|
||||
assert config.rss_parser.enable is True
|
||||
assert config.rss_parser.language == "zh"
|
||||
assert "720" in config.rss_parser.filter
|
||||
|
||||
def test_bangumi_manage_defaults(self):
|
||||
"""BangumiManage has correct default values."""
|
||||
config = Config()
|
||||
assert config.bangumi_manage.enable is True
|
||||
assert config.bangumi_manage.rename_method == "pn"
|
||||
assert config.bangumi_manage.group_tag is False
|
||||
assert config.bangumi_manage.remove_bad_torrent is False
|
||||
assert config.bangumi_manage.eps_complete is False
|
||||
|
||||
def test_proxy_defaults(self):
|
||||
"""Proxy is disabled by default."""
|
||||
config = Config()
|
||||
assert config.proxy.enable is False
|
||||
assert config.proxy.type == "http"
|
||||
|
||||
def test_notification_defaults(self):
|
||||
"""Notification is disabled by default."""
|
||||
config = Config()
|
||||
assert config.notification.enable is False
|
||||
assert config.notification.type == "telegram"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Config serialization
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestConfigSerialization:
|
||||
def test_dict_uses_alias(self):
|
||||
"""Config.dict() uses field aliases (by_alias=True)."""
|
||||
config = Config()
|
||||
d = config.dict()
|
||||
# Downloader uses alias 'host' not 'host_'
|
||||
assert "host" in d["downloader"]
|
||||
assert "host_" not in d["downloader"]
|
||||
|
||||
def test_roundtrip_json(self, tmp_path):
|
||||
"""Config can be serialized to JSON and loaded back."""
|
||||
config = Config()
|
||||
config_dict = config.dict()
|
||||
json_path = tmp_path / "config.json"
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(config_dict, f)
|
||||
|
||||
with open(json_path, "r") as f:
|
||||
loaded = json.load(f)
|
||||
|
||||
loaded_config = Config.parse_obj(loaded)
|
||||
assert loaded_config.program.rss_time == config.program.rss_time
|
||||
assert loaded_config.downloader.type == config.downloader.type
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Settings._migrate_old_config
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestMigrateOldConfig:
|
||||
def test_sleep_time_to_rss_time(self):
|
||||
"""Migrates sleep_time → rss_time."""
|
||||
old_config = {
|
||||
"program": {"sleep_time": 1800},
|
||||
"rss_parser": {},
|
||||
}
|
||||
result = Settings._migrate_old_config(old_config)
|
||||
assert result["program"]["rss_time"] == 1800
|
||||
assert "sleep_time" not in result["program"]
|
||||
|
||||
def test_times_to_rename_time(self):
|
||||
"""Migrates times → rename_time."""
|
||||
old_config = {
|
||||
"program": {"times": 120},
|
||||
"rss_parser": {},
|
||||
}
|
||||
result = Settings._migrate_old_config(old_config)
|
||||
assert result["program"]["rename_time"] == 120
|
||||
assert "times" not in result["program"]
|
||||
|
||||
def test_removes_data_version(self):
|
||||
"""Removes deprecated data_version field."""
|
||||
old_config = {
|
||||
"program": {"data_version": 2},
|
||||
"rss_parser": {},
|
||||
}
|
||||
result = Settings._migrate_old_config(old_config)
|
||||
assert "data_version" not in result["program"]
|
||||
|
||||
def test_removes_deprecated_rss_parser_fields(self):
|
||||
"""Removes deprecated type, custom_url, token, enable_tmdb from rss_parser."""
|
||||
old_config = {
|
||||
"program": {},
|
||||
"rss_parser": {
|
||||
"type": "mikan",
|
||||
"custom_url": "https://custom.url",
|
||||
"token": "abc",
|
||||
"enable_tmdb": True,
|
||||
"enable": True,
|
||||
},
|
||||
}
|
||||
result = Settings._migrate_old_config(old_config)
|
||||
assert "type" not in result["rss_parser"]
|
||||
assert "custom_url" not in result["rss_parser"]
|
||||
assert "token" not in result["rss_parser"]
|
||||
assert "enable_tmdb" not in result["rss_parser"]
|
||||
assert result["rss_parser"]["enable"] is True
|
||||
|
||||
def test_no_migration_needed(self):
|
||||
"""Already-current config passes through unchanged."""
|
||||
current_config = {
|
||||
"program": {"rss_time": 900, "rename_time": 60},
|
||||
"rss_parser": {"enable": True},
|
||||
}
|
||||
result = Settings._migrate_old_config(current_config)
|
||||
assert result["program"]["rss_time"] == 900
|
||||
assert result["program"]["rename_time"] == 60
|
||||
|
||||
def test_both_old_and_new_fields(self):
|
||||
"""When both sleep_time and rss_time exist, removes sleep_time."""
|
||||
config = {
|
||||
"program": {"sleep_time": 1800, "rss_time": 900},
|
||||
"rss_parser": {},
|
||||
}
|
||||
result = Settings._migrate_old_config(config)
|
||||
assert result["program"]["rss_time"] == 900
|
||||
assert "sleep_time" not in result["program"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Settings.load from file
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestSettingsLoad:
|
||||
def test_load_from_json_file(self, tmp_path):
|
||||
"""Settings loads config from a JSON file when it exists."""
|
||||
config_data = Config().dict()
|
||||
config_data["program"]["rss_time"] = 1200 # Custom value
|
||||
config_file = tmp_path / "config.json"
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config_data, f)
|
||||
|
||||
with patch("module.conf.config.CONFIG_PATH", config_file):
|
||||
with patch("module.conf.config.VERSION", "3.2.0"):
|
||||
s = Settings.__new__(Settings)
|
||||
Config.__init__(s)
|
||||
s.load()
|
||||
|
||||
assert s.program.rss_time == 1200
|
||||
|
||||
def test_save_writes_json(self, tmp_path):
|
||||
"""settings.save() writes valid JSON to CONFIG_PATH."""
|
||||
config_file = tmp_path / "config_out.json"
|
||||
|
||||
with patch("module.conf.config.CONFIG_PATH", config_file):
|
||||
s = Settings.__new__(Settings)
|
||||
Config.__init__(s)
|
||||
s.save()
|
||||
|
||||
assert config_file.exists()
|
||||
with open(config_file) as f:
|
||||
data = json.load(f)
|
||||
assert "program" in data
|
||||
assert "downloader" in data
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Environment variable overrides
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestEnvOverrides:
|
||||
def test_downloader_host_from_env(self, tmp_path):
|
||||
"""AB_DOWNLOADER_HOST env var overrides downloader host."""
|
||||
config_file = tmp_path / "config.json"
|
||||
|
||||
env = {"AB_DOWNLOADER_HOST": "192.168.1.100:9090"}
|
||||
with patch.dict(os.environ, env, clear=False):
|
||||
with patch("module.conf.config.CONFIG_PATH", config_file):
|
||||
s = Settings.__new__(Settings)
|
||||
Config.__init__(s)
|
||||
s.init()
|
||||
|
||||
assert "192.168.1.100:9090" in s.downloader.host
|
||||
299
backend/src/test/test_download_client.py
Normal file
299
backend/src/test/test_download_client.py
Normal file
@@ -0,0 +1,299 @@
|
||||
"""Tests for DownloadClient: init, set_rule, add_torrent, rename, etc."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
|
||||
from module.models import Bangumi, Torrent
|
||||
from module.models.config import Config
|
||||
from module.downloader.download_client import DownloadClient
|
||||
|
||||
from test.factories import make_bangumi, make_torrent
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def download_client(mock_qb_client):
|
||||
"""Create a DownloadClient with mocked internal client."""
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.type = "qbittorrent"
|
||||
mock_settings.downloader.host = "localhost:8080"
|
||||
mock_settings.downloader.username = "admin"
|
||||
mock_settings.downloader.password = "admin"
|
||||
mock_settings.downloader.ssl = False
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
with patch(
|
||||
"module.downloader.download_client.DownloadClient._DownloadClient__getClient",
|
||||
return_value=mock_qb_client,
|
||||
):
|
||||
client = DownloadClient()
|
||||
client.client = mock_qb_client
|
||||
return client
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# auth
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestAuth:
|
||||
async def test_auth_success(self, download_client, mock_qb_client):
|
||||
"""auth() sets authed=True when client authenticates."""
|
||||
mock_qb_client.auth.return_value = True
|
||||
await download_client.auth()
|
||||
assert download_client.authed is True
|
||||
|
||||
async def test_auth_failure(self, download_client, mock_qb_client):
|
||||
"""auth() keeps authed=False when client fails."""
|
||||
mock_qb_client.auth.return_value = False
|
||||
await download_client.auth()
|
||||
assert download_client.authed is False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# init_downloader
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestInitDownloader:
|
||||
async def test_sets_prefs_and_category(self, download_client, mock_qb_client):
|
||||
"""init_downloader calls prefs_init with RSS config and adds category."""
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
await download_client.init_downloader()
|
||||
|
||||
mock_qb_client.prefs_init.assert_called_once()
|
||||
prefs_arg = mock_qb_client.prefs_init.call_args[1]["prefs"]
|
||||
assert prefs_arg["rss_auto_downloading_enabled"] is True
|
||||
assert prefs_arg["rss_refresh_interval"] == 30
|
||||
mock_qb_client.add_category.assert_called_once_with("BangumiCollection")
|
||||
|
||||
async def test_detects_path_when_empty(self, download_client, mock_qb_client):
|
||||
"""When downloader.path is empty, fetches from app prefs."""
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.path = ""
|
||||
mock_qb_client.get_app_prefs.return_value = {"save_path": "/data"}
|
||||
await download_client.init_downloader()
|
||||
|
||||
assert mock_settings.downloader.path != ""
|
||||
assert "Bangumi" in mock_settings.downloader.path
|
||||
|
||||
async def test_category_already_exists_no_error(self, download_client, mock_qb_client):
|
||||
"""If category already exists, logs debug but doesn't crash."""
|
||||
mock_qb_client.add_category.side_effect = Exception("already exists")
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
# Should not raise
|
||||
await download_client.init_downloader()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# set_rule
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestSetRule:
|
||||
async def test_generates_correct_rule(self, download_client, mock_qb_client):
|
||||
"""set_rule creates a rule with correct mustContain and savePath."""
|
||||
bangumi = make_bangumi(
|
||||
title_raw="Mushoku Tensei",
|
||||
filter="720,480",
|
||||
official_title="Mushoku Tensei",
|
||||
season=2,
|
||||
year="2024",
|
||||
)
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
await download_client.set_rule(bangumi)
|
||||
|
||||
mock_qb_client.rss_set_rule.assert_called_once()
|
||||
call_kwargs = mock_qb_client.rss_set_rule.call_args[1]
|
||||
rule = call_kwargs["rule_def"]
|
||||
assert rule["mustContain"] == "Mushoku Tensei"
|
||||
# filter string is joined char-by-char with "|" (this is how the code works)
|
||||
assert rule["mustNotContain"] == "|".join("720,480")
|
||||
assert rule["enable"] is True
|
||||
assert "Season 2" in rule["savePath"]
|
||||
|
||||
async def test_marks_bangumi_added(self, download_client, mock_qb_client):
|
||||
"""set_rule sets data.added=True after creating the rule."""
|
||||
bangumi = make_bangumi(added=False, filter="")
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
await download_client.set_rule(bangumi)
|
||||
|
||||
assert bangumi.added is True
|
||||
|
||||
async def test_rule_name_set(self, download_client, mock_qb_client):
|
||||
"""set_rule populates rule_name and save_path on the Bangumi."""
|
||||
bangumi = make_bangumi(
|
||||
official_title="My Anime",
|
||||
season=1,
|
||||
filter="",
|
||||
rule_name=None,
|
||||
save_path=None,
|
||||
)
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
await download_client.set_rule(bangumi)
|
||||
|
||||
assert bangumi.rule_name is not None
|
||||
assert "My Anime" in bangumi.rule_name
|
||||
assert bangumi.save_path is not None
|
||||
|
||||
async def test_rule_name_with_group_tag(self, download_client, mock_qb_client):
|
||||
"""When group_tag=True, rule_name includes [group]."""
|
||||
bangumi = make_bangumi(
|
||||
official_title="My Anime",
|
||||
group_name="SubGroup",
|
||||
season=1,
|
||||
filter="",
|
||||
)
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = True
|
||||
await download_client.set_rule(bangumi)
|
||||
|
||||
assert "[SubGroup]" in bangumi.rule_name
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# add_torrent
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestAddTorrent:
|
||||
async def test_magnet_url(self, download_client, mock_qb_client):
|
||||
"""Magnet URLs are passed as torrent_urls, no file download."""
|
||||
torrent = make_torrent(url="magnet:?xt=urn:btih:abc123")
|
||||
bangumi = make_bangumi()
|
||||
|
||||
with patch("module.downloader.download_client.RequestContent") as MockReq:
|
||||
mock_req = AsyncMock()
|
||||
MockReq.return_value.__aenter__ = AsyncMock(return_value=mock_req)
|
||||
MockReq.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
result = await download_client.add_torrent(torrent, bangumi)
|
||||
|
||||
assert result is True
|
||||
call_kwargs = mock_qb_client.add_torrents.call_args[1]
|
||||
assert call_kwargs["torrent_urls"] == "magnet:?xt=urn:btih:abc123"
|
||||
assert call_kwargs["torrent_files"] is None
|
||||
|
||||
async def test_file_url_downloads_content(self, download_client, mock_qb_client):
|
||||
"""Non-magnet URLs trigger file download and pass as torrent_files."""
|
||||
torrent = make_torrent(url="https://example.com/file.torrent")
|
||||
bangumi = make_bangumi()
|
||||
|
||||
with patch("module.downloader.download_client.RequestContent") as MockReq:
|
||||
mock_req = AsyncMock()
|
||||
mock_req.get_content = AsyncMock(return_value=b"torrent-file-data")
|
||||
MockReq.return_value.__aenter__ = AsyncMock(return_value=mock_req)
|
||||
MockReq.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
result = await download_client.add_torrent(torrent, bangumi)
|
||||
|
||||
assert result is True
|
||||
call_kwargs = mock_qb_client.add_torrents.call_args[1]
|
||||
assert call_kwargs["torrent_files"] == b"torrent-file-data"
|
||||
assert call_kwargs["torrent_urls"] is None
|
||||
|
||||
async def test_list_magnet_urls(self, download_client, mock_qb_client):
|
||||
"""List of magnet torrents are joined as list of URLs."""
|
||||
torrents = [
|
||||
make_torrent(url="magnet:?xt=urn:btih:aaa"),
|
||||
make_torrent(url="magnet:?xt=urn:btih:bbb"),
|
||||
make_torrent(url="magnet:?xt=urn:btih:ccc"),
|
||||
]
|
||||
bangumi = make_bangumi()
|
||||
|
||||
with patch("module.downloader.download_client.RequestContent") as MockReq:
|
||||
mock_req = AsyncMock()
|
||||
MockReq.return_value.__aenter__ = AsyncMock(return_value=mock_req)
|
||||
MockReq.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
result = await download_client.add_torrent(torrents, bangumi)
|
||||
|
||||
assert result is True
|
||||
call_kwargs = mock_qb_client.add_torrents.call_args[1]
|
||||
assert len(call_kwargs["torrent_urls"]) == 3
|
||||
|
||||
async def test_empty_list_returns_false(self, download_client, mock_qb_client):
|
||||
"""Empty torrent list returns False without calling client."""
|
||||
bangumi = make_bangumi()
|
||||
with patch("module.downloader.download_client.RequestContent") as MockReq:
|
||||
mock_req = AsyncMock()
|
||||
MockReq.return_value.__aenter__ = AsyncMock(return_value=mock_req)
|
||||
MockReq.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
result = await download_client.add_torrent([], bangumi)
|
||||
|
||||
assert result is False
|
||||
mock_qb_client.add_torrents.assert_not_called()
|
||||
|
||||
async def test_client_rejects_returns_false(self, download_client, mock_qb_client):
|
||||
"""When client.add_torrents returns False, returns False."""
|
||||
mock_qb_client.add_torrents.return_value = False
|
||||
torrent = make_torrent(url="magnet:?xt=urn:btih:abc")
|
||||
bangumi = make_bangumi()
|
||||
|
||||
with patch("module.downloader.download_client.RequestContent") as MockReq:
|
||||
mock_req = AsyncMock()
|
||||
MockReq.return_value.__aenter__ = AsyncMock(return_value=mock_req)
|
||||
MockReq.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
result = await download_client.add_torrent(torrent, bangumi)
|
||||
|
||||
assert result is False
|
||||
|
||||
async def test_generates_save_path_if_missing(self, download_client, mock_qb_client):
|
||||
"""When bangumi.save_path is empty, generates one."""
|
||||
torrent = make_torrent(url="magnet:?xt=urn:btih:abc")
|
||||
bangumi = make_bangumi(save_path=None)
|
||||
|
||||
with patch("module.downloader.download_client.RequestContent") as MockReq:
|
||||
mock_req = AsyncMock()
|
||||
MockReq.return_value.__aenter__ = AsyncMock(return_value=mock_req)
|
||||
MockReq.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
await download_client.add_torrent(torrent, bangumi)
|
||||
|
||||
assert bangumi.save_path is not None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# get_torrent_info / rename_torrent_file / delete_torrent
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestClientDelegation:
|
||||
async def test_get_torrent_info(self, download_client, mock_qb_client):
|
||||
"""get_torrent_info delegates to client.torrents_info."""
|
||||
mock_qb_client.torrents_info.return_value = [
|
||||
{"hash": "abc", "name": "test", "save_path": "/test"}
|
||||
]
|
||||
result = await download_client.get_torrent_info()
|
||||
mock_qb_client.torrents_info.assert_called_once_with(
|
||||
status_filter="completed", category="Bangumi", tag=None
|
||||
)
|
||||
assert len(result) == 1
|
||||
|
||||
async def test_rename_torrent_file_success(self, download_client, mock_qb_client):
|
||||
"""rename_torrent_file returns True on success."""
|
||||
mock_qb_client.torrents_rename_file.return_value = True
|
||||
result = await download_client.rename_torrent_file("hash1", "old.mkv", "new.mkv")
|
||||
assert result is True
|
||||
|
||||
async def test_rename_torrent_file_failure(self, download_client, mock_qb_client):
|
||||
"""rename_torrent_file returns False on failure."""
|
||||
mock_qb_client.torrents_rename_file.return_value = False
|
||||
result = await download_client.rename_torrent_file("hash1", "old.mkv", "new.mkv")
|
||||
assert result is False
|
||||
|
||||
async def test_delete_torrent(self, download_client, mock_qb_client):
|
||||
"""delete_torrent delegates to client.torrents_delete."""
|
||||
await download_client.delete_torrent("hash1", delete_files=True)
|
||||
mock_qb_client.torrents_delete.assert_called_once_with("hash1", delete_files=True)
|
||||
370
backend/src/test/test_integration.py
Normal file
370
backend/src/test/test_integration.py
Normal file
@@ -0,0 +1,370 @@
|
||||
"""Integration tests: end-to-end flows with real DB and mocked externals."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
|
||||
from sqlmodel import Session, SQLModel, create_engine
|
||||
|
||||
from module.database.bangumi import BangumiDatabase, _invalidate_bangumi_cache
|
||||
from module.database.rss import RSSDatabase
|
||||
from module.database.torrent import TorrentDatabase
|
||||
from module.models import Bangumi, EpisodeFile, Notification, RSSItem, Torrent
|
||||
from module.rss.engine import RSSEngine
|
||||
|
||||
from test.factories import make_bangumi, make_torrent, make_rss_item
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_cache():
|
||||
_invalidate_bangumi_cache()
|
||||
yield
|
||||
_invalidate_bangumi_cache()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# RSS → Download Flow
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRssToDownloadFlow:
|
||||
"""End-to-end: RSS feed parsed → matched → downloaded → stored in DB."""
|
||||
|
||||
async def test_full_flow(self, db_engine):
|
||||
"""Complete RSS → match → download pipeline."""
|
||||
# 1. Setup: create engine with real in-memory DB
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
|
||||
# 2. Add RSS feed and Bangumi to DB
|
||||
rss_item = make_rss_item(name="My Feed", url="https://mikanani.me/RSS/test")
|
||||
engine.rss.add(rss_item)
|
||||
|
||||
bangumi = make_bangumi(
|
||||
title_raw="Mushoku Tensei",
|
||||
official_title="Mushoku Tensei",
|
||||
filter="",
|
||||
added=True,
|
||||
)
|
||||
engine.bangumi.add(bangumi)
|
||||
|
||||
# 3. Mock the HTTP layer to return new torrents
|
||||
new_torrents = [
|
||||
Torrent(
|
||||
name="[Sub] Mushoku Tensei - 11 [1080p].mkv",
|
||||
url="https://example.com/ep11.torrent",
|
||||
),
|
||||
Torrent(
|
||||
name="[Sub] Mushoku Tensei - 12 [1080p].mkv",
|
||||
url="https://example.com/ep12.torrent",
|
||||
),
|
||||
Torrent(
|
||||
name="[Other] Unknown Anime - 01 [720p].mkv",
|
||||
url="https://example.com/unknown.torrent",
|
||||
),
|
||||
]
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = new_torrents
|
||||
|
||||
# 4. Mock download client
|
||||
mock_client = AsyncMock()
|
||||
mock_client.add_torrent = AsyncMock(return_value=True)
|
||||
|
||||
# 5. Execute refresh_rss
|
||||
await engine.refresh_rss(mock_client)
|
||||
|
||||
# 6. Verify: matched torrents were downloaded
|
||||
assert mock_client.add_torrent.call_count == 2
|
||||
|
||||
# 7. Verify: all torrents stored in DB
|
||||
all_torrents = engine.torrent.search_all()
|
||||
assert len(all_torrents) == 3
|
||||
|
||||
# 8. Verify: matched torrents are marked downloaded
|
||||
downloaded = [t for t in all_torrents if t.downloaded]
|
||||
assert len(downloaded) == 2
|
||||
# All downloaded torrents should contain "Mushoku Tensei"
|
||||
for t in downloaded:
|
||||
assert "Mushoku Tensei" in t.name
|
||||
|
||||
# 9. Verify: unmatched torrent is NOT downloaded
|
||||
not_downloaded = [t for t in all_torrents if not t.downloaded]
|
||||
assert len(not_downloaded) == 1
|
||||
assert "Unknown Anime" in not_downloaded[0].name
|
||||
|
||||
async def test_filtered_torrents_not_downloaded(self, db_engine):
|
||||
"""Torrents matching the filter regex are NOT downloaded."""
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
|
||||
rss_item = make_rss_item()
|
||||
engine.rss.add(rss_item)
|
||||
|
||||
# Bangumi has filter="720" to exclude 720p
|
||||
bangumi = make_bangumi(
|
||||
title_raw="Mushoku Tensei",
|
||||
filter="720",
|
||||
)
|
||||
engine.bangumi.add(bangumi)
|
||||
|
||||
torrents = [
|
||||
Torrent(
|
||||
name="[Sub] Mushoku Tensei - 01 [720p].mkv",
|
||||
url="https://example.com/720.torrent",
|
||||
),
|
||||
Torrent(
|
||||
name="[Sub] Mushoku Tensei - 01 [1080p].mkv",
|
||||
url="https://example.com/1080.torrent",
|
||||
),
|
||||
]
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = torrents
|
||||
mock_client = AsyncMock()
|
||||
mock_client.add_torrent = AsyncMock(return_value=True)
|
||||
await engine.refresh_rss(mock_client)
|
||||
|
||||
# Only 1080p should be downloaded (720p is filtered)
|
||||
assert mock_client.add_torrent.call_count == 1
|
||||
|
||||
async def test_duplicate_torrents_not_reprocessed(self, db_engine):
|
||||
"""Torrents already in the DB are not processed again."""
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
|
||||
rss_item = make_rss_item()
|
||||
engine.rss.add(rss_item)
|
||||
|
||||
bangumi = make_bangumi(title_raw="Anime", filter="")
|
||||
engine.bangumi.add(bangumi)
|
||||
|
||||
# Pre-insert a torrent
|
||||
existing = Torrent(
|
||||
name="[Sub] Anime - 01 [1080p].mkv",
|
||||
url="https://example.com/ep01.torrent",
|
||||
downloaded=True,
|
||||
)
|
||||
engine.torrent.add(existing)
|
||||
|
||||
# Mock returns same torrent + a new one
|
||||
torrents = [
|
||||
Torrent(
|
||||
name="[Sub] Anime - 01 [1080p].mkv",
|
||||
url="https://example.com/ep01.torrent",
|
||||
),
|
||||
Torrent(
|
||||
name="[Sub] Anime - 02 [1080p].mkv",
|
||||
url="https://example.com/ep02.torrent",
|
||||
),
|
||||
]
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = torrents
|
||||
mock_client = AsyncMock()
|
||||
mock_client.add_torrent = AsyncMock(return_value=True)
|
||||
await engine.refresh_rss(mock_client)
|
||||
|
||||
# Only ep02 should be downloaded (ep01 already exists)
|
||||
assert mock_client.add_torrent.call_count == 1
|
||||
all_torrents = engine.torrent.search_all()
|
||||
assert len(all_torrents) == 2 # original + new one
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Rename Flow
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRenameFlow:
|
||||
"""End-to-end: completed torrent → parse → rename → notification."""
|
||||
|
||||
async def test_single_file_rename(self, mock_qb_client):
|
||||
"""Single-file torrent is parsed and renamed correctly."""
|
||||
from module.manager.renamer import Renamer
|
||||
|
||||
# Setup renamer with mocked client
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.type = "qbittorrent"
|
||||
mock_settings.downloader.host = "localhost"
|
||||
mock_settings.downloader.username = "admin"
|
||||
mock_settings.downloader.password = "admin"
|
||||
mock_settings.downloader.ssl = False
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
with patch(
|
||||
"module.downloader.download_client.DownloadClient._DownloadClient__getClient",
|
||||
return_value=mock_qb_client,
|
||||
):
|
||||
renamer = Renamer()
|
||||
renamer.client = mock_qb_client
|
||||
|
||||
# Mock completed torrent info
|
||||
mock_qb_client.torrents_info.return_value = [
|
||||
{
|
||||
"hash": "abc123",
|
||||
"name": "[Lilith-Raws] Mushoku Tensei - 11 [1080p].mkv",
|
||||
"save_path": "/downloads/Bangumi/Mushoku Tensei (2024)/Season 1",
|
||||
}
|
||||
]
|
||||
mock_qb_client.torrents_files.return_value = [
|
||||
{"name": "[Lilith-Raws] Mushoku Tensei - 11 [1080p].mkv"}
|
||||
]
|
||||
mock_qb_client.torrents_rename_file.return_value = True
|
||||
|
||||
ep = EpisodeFile(
|
||||
media_path="[Lilith-Raws] Mushoku Tensei - 11 [1080p].mkv",
|
||||
title="Mushoku Tensei",
|
||||
season=1,
|
||||
episode=11,
|
||||
suffix=".mkv",
|
||||
)
|
||||
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=ep):
|
||||
with patch("module.manager.renamer.settings") as mock_mgr_settings:
|
||||
mock_mgr_settings.bangumi_manage.rename_method = "pn"
|
||||
mock_mgr_settings.bangumi_manage.remove_bad_torrent = False
|
||||
with patch("module.downloader.path.settings") as mock_path_settings:
|
||||
mock_path_settings.downloader.path = "/downloads/Bangumi"
|
||||
result = await renamer.rename()
|
||||
|
||||
# Verify: file was renamed
|
||||
mock_qb_client.torrents_rename_file.assert_called_once()
|
||||
call_args = mock_qb_client.torrents_rename_file.call_args
|
||||
assert "S01E11" in str(call_args)
|
||||
|
||||
# Verify: notification returned
|
||||
assert len(result) == 1
|
||||
assert result[0].official_title == "Mushoku Tensei (2024)"
|
||||
assert result[0].episode == 11
|
||||
|
||||
async def test_collection_rename(self, mock_qb_client):
|
||||
"""Multi-file torrent is treated as collection and re-categorized."""
|
||||
from module.manager.renamer import Renamer
|
||||
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.type = "qbittorrent"
|
||||
mock_settings.downloader.host = "localhost"
|
||||
mock_settings.downloader.username = "admin"
|
||||
mock_settings.downloader.password = "admin"
|
||||
mock_settings.downloader.ssl = False
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
with patch(
|
||||
"module.downloader.download_client.DownloadClient._DownloadClient__getClient",
|
||||
return_value=mock_qb_client,
|
||||
):
|
||||
renamer = Renamer()
|
||||
renamer.client = mock_qb_client
|
||||
|
||||
mock_qb_client.torrents_info.return_value = [
|
||||
{
|
||||
"hash": "batch_hash",
|
||||
"name": "Anime Batch",
|
||||
"save_path": "/downloads/Bangumi/Anime (2024)/Season 1",
|
||||
}
|
||||
]
|
||||
mock_qb_client.torrents_files.return_value = [
|
||||
{"name": "ep01.mkv"},
|
||||
{"name": "ep02.mkv"},
|
||||
{"name": "ep03.mkv"},
|
||||
]
|
||||
mock_qb_client.torrents_rename_file.return_value = True
|
||||
|
||||
def mock_parser(torrent_path, season, **kwargs):
|
||||
ep_num = int(torrent_path.replace("ep", "").replace(".mkv", ""))
|
||||
return EpisodeFile(
|
||||
media_path=torrent_path,
|
||||
title="Anime",
|
||||
season=season,
|
||||
episode=ep_num,
|
||||
suffix=".mkv",
|
||||
)
|
||||
|
||||
with patch.object(renamer._parser, "torrent_parser", side_effect=mock_parser):
|
||||
with patch("module.manager.renamer.settings") as mock_mgr_settings:
|
||||
mock_mgr_settings.bangumi_manage.rename_method = "pn"
|
||||
mock_mgr_settings.bangumi_manage.remove_bad_torrent = False
|
||||
with patch("module.downloader.path.settings") as mock_path_settings:
|
||||
mock_path_settings.downloader.path = "/downloads/Bangumi"
|
||||
await renamer.rename()
|
||||
|
||||
# Verify: all 3 files renamed
|
||||
assert mock_qb_client.torrents_rename_file.call_count == 3
|
||||
# Verify: category set to BangumiCollection
|
||||
mock_qb_client.set_category.assert_called_once_with(
|
||||
"batch_hash", "BangumiCollection"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Database Consistency
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDatabaseConsistency:
|
||||
"""Verify database operations maintain data integrity across operations."""
|
||||
|
||||
def test_bangumi_uniqueness_by_title_raw(self, db_engine):
|
||||
"""Cannot add two Bangumi with same title_raw."""
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
|
||||
b1 = make_bangumi(title_raw="Same Title", official_title="First")
|
||||
b2 = make_bangumi(title_raw="Same Title", official_title="Second")
|
||||
|
||||
assert engine.bangumi.add(b1) is True
|
||||
assert engine.bangumi.add(b2) is False # Duplicate rejected
|
||||
|
||||
all_bangumi = engine.bangumi.search_all()
|
||||
assert len(all_bangumi) == 1
|
||||
assert all_bangumi[0].official_title == "First"
|
||||
|
||||
def test_rss_uniqueness_by_url(self, db_engine):
|
||||
"""Cannot add two RSSItems with same URL."""
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
|
||||
r1 = make_rss_item(url="https://same.url/rss", name="First")
|
||||
r2 = make_rss_item(url="https://same.url/rss", name="Second")
|
||||
|
||||
assert engine.rss.add(r1) is True
|
||||
assert engine.rss.add(r2) is False
|
||||
|
||||
def test_torrent_check_new_filters_duplicates(self, db_engine):
|
||||
"""check_new only returns torrents not already in the database."""
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
|
||||
existing = Torrent(name="existing", url="https://existing.com")
|
||||
engine.torrent.add(existing)
|
||||
|
||||
candidates = [
|
||||
Torrent(name="existing", url="https://existing.com"),
|
||||
Torrent(name="new1", url="https://new1.com"),
|
||||
Torrent(name="new2", url="https://new2.com"),
|
||||
]
|
||||
new_ones = engine.torrent.check_new(candidates)
|
||||
assert len(new_ones) == 2
|
||||
assert all(t.url != "https://existing.com" for t in new_ones)
|
||||
|
||||
def test_match_torrent_respects_deleted_flag(self, db_engine):
|
||||
"""Deleted bangumi are not matched by match_torrent."""
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
|
||||
bangumi = make_bangumi(title_raw="Deleted Anime", filter="", deleted=True)
|
||||
engine.bangumi.add(bangumi)
|
||||
|
||||
torrent = Torrent(
|
||||
name="[Sub] Deleted Anime - 01 [1080p].mkv",
|
||||
url="https://test.com",
|
||||
)
|
||||
result = engine.match_torrent(torrent)
|
||||
assert result is None
|
||||
|
||||
def test_bangumi_disable_and_enable(self, db_engine):
|
||||
"""disable_rule and re-enabling preserves data."""
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
|
||||
bangumi = make_bangumi(title_raw="My Anime", deleted=False)
|
||||
engine.bangumi.add(bangumi)
|
||||
bangumi_id = engine.bangumi.search_all()[0].id
|
||||
|
||||
# Disable
|
||||
engine.bangumi.disable_rule(bangumi_id)
|
||||
disabled = engine.bangumi.search_id(bangumi_id)
|
||||
assert disabled.deleted is True
|
||||
|
||||
# Torrent matching should now fail
|
||||
torrent = Torrent(name="[Sub] My Anime - 01.mkv", url="https://test.com")
|
||||
assert engine.match_torrent(torrent) is None
|
||||
132
backend/src/test/test_notification.py
Normal file
132
backend/src/test/test_notification.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""Tests for notification: client factory, send_msg, poster lookup."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
|
||||
from module.models import Notification
|
||||
from module.notification.notification import getClient, PostNotification
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# getClient factory
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGetClient:
|
||||
def test_telegram(self):
|
||||
"""Returns TelegramNotification for 'telegram' type."""
|
||||
from module.notification.plugin import TelegramNotification
|
||||
|
||||
result = getClient("telegram")
|
||||
assert result is TelegramNotification
|
||||
|
||||
def test_bark(self):
|
||||
"""Returns BarkNotification for 'bark' type."""
|
||||
from module.notification.plugin import BarkNotification
|
||||
|
||||
result = getClient("bark")
|
||||
assert result is BarkNotification
|
||||
|
||||
def test_server_chan(self):
|
||||
"""Returns ServerChanNotification for 'server-chan' type."""
|
||||
from module.notification.plugin import ServerChanNotification
|
||||
|
||||
result = getClient("server-chan")
|
||||
assert result is ServerChanNotification
|
||||
|
||||
def test_wecom(self):
|
||||
"""Returns WecomNotification for 'wecom' type."""
|
||||
from module.notification.plugin import WecomNotification
|
||||
|
||||
result = getClient("wecom")
|
||||
assert result is WecomNotification
|
||||
|
||||
def test_unknown_type(self):
|
||||
"""Returns None for unknown notification type."""
|
||||
result = getClient("unknown_service")
|
||||
assert result is None
|
||||
|
||||
def test_case_insensitive(self):
|
||||
"""Type matching is case-insensitive."""
|
||||
from module.notification.plugin import TelegramNotification
|
||||
|
||||
assert getClient("Telegram") is TelegramNotification
|
||||
assert getClient("TELEGRAM") is TelegramNotification
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PostNotification
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestPostNotification:
|
||||
@pytest.fixture
|
||||
def mock_notifier(self):
|
||||
"""Create a mocked notifier instance."""
|
||||
notifier = AsyncMock()
|
||||
notifier.post_msg = AsyncMock()
|
||||
notifier.__aenter__ = AsyncMock(return_value=notifier)
|
||||
notifier.__aexit__ = AsyncMock(return_value=False)
|
||||
return notifier
|
||||
|
||||
@pytest.fixture
|
||||
def post_notification(self, mock_notifier):
|
||||
"""Create PostNotification with mocked notifier."""
|
||||
with patch("module.notification.notification.settings") as mock_settings:
|
||||
mock_settings.notification.type = "telegram"
|
||||
mock_settings.notification.token = "test_token"
|
||||
mock_settings.notification.chat_id = "12345"
|
||||
with patch(
|
||||
"module.notification.notification.getClient"
|
||||
) as mock_get_client:
|
||||
MockClass = MagicMock()
|
||||
MockClass.return_value = mock_notifier
|
||||
mock_get_client.return_value = MockClass
|
||||
pn = PostNotification()
|
||||
pn.notifier = mock_notifier
|
||||
return pn
|
||||
|
||||
async def test_send_msg_success(self, post_notification, mock_notifier):
|
||||
"""send_msg calls notifier.post_msg and succeeds."""
|
||||
notify = Notification(official_title="Test Anime", season=1, episode=5)
|
||||
|
||||
with patch.object(PostNotification, "_get_poster"):
|
||||
result = await post_notification.send_msg(notify)
|
||||
|
||||
mock_notifier.post_msg.assert_called_once_with(notify)
|
||||
|
||||
async def test_send_msg_failure_no_crash(self, post_notification, mock_notifier):
|
||||
"""send_msg catches exceptions and returns False."""
|
||||
mock_notifier.post_msg.side_effect = Exception("Network error")
|
||||
notify = Notification(official_title="Test Anime", season=1, episode=5)
|
||||
|
||||
with patch.object(PostNotification, "_get_poster"):
|
||||
result = await post_notification.send_msg(notify)
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_get_poster_sets_path(self):
|
||||
"""_get_poster queries DB and sets poster_path on notification."""
|
||||
notify = Notification(official_title="My Anime", season=1, episode=1)
|
||||
|
||||
with patch("module.notification.notification.Database") as MockDB:
|
||||
mock_db = MagicMock()
|
||||
mock_db.bangumi.match_poster.return_value = "/posters/my_anime.jpg"
|
||||
MockDB.return_value.__enter__ = MagicMock(return_value=mock_db)
|
||||
MockDB.return_value.__exit__ = MagicMock(return_value=False)
|
||||
PostNotification._get_poster(notify)
|
||||
|
||||
assert notify.poster_path == "/posters/my_anime.jpg"
|
||||
|
||||
def test_get_poster_empty_when_not_found(self):
|
||||
"""_get_poster sets empty string when no poster found in DB."""
|
||||
notify = Notification(official_title="Unknown", season=1, episode=1)
|
||||
|
||||
with patch("module.notification.notification.Database") as MockDB:
|
||||
mock_db = MagicMock()
|
||||
mock_db.bangumi.match_poster.return_value = ""
|
||||
MockDB.return_value.__enter__ = MagicMock(return_value=mock_db)
|
||||
MockDB.return_value.__exit__ = MagicMock(return_value=False)
|
||||
PostNotification._get_poster(notify)
|
||||
|
||||
assert notify.poster_path == ""
|
||||
221
backend/src/test/test_path.py
Normal file
221
backend/src/test/test_path.py
Normal file
@@ -0,0 +1,221 @@
|
||||
"""Tests for TorrentPath: save path generation, file classification, parsing."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
|
||||
from module.downloader.path import TorrentPath
|
||||
from module.models import Bangumi, BangumiUpdate
|
||||
|
||||
from test.factories import make_bangumi
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def torrent_path():
|
||||
return TorrentPath()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _gen_save_path
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGenSavePath:
|
||||
def test_with_year(self):
|
||||
"""Save path includes (year) when year is set."""
|
||||
bangumi = make_bangumi(official_title="My Anime", year="2024", season=2)
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
result = TorrentPath._gen_save_path(bangumi)
|
||||
|
||||
assert "My Anime (2024)" in result
|
||||
assert "Season 2" in result
|
||||
|
||||
def test_without_year(self):
|
||||
"""Save path omits year parentheses when year is None."""
|
||||
bangumi = make_bangumi(official_title="My Anime", year=None, season=1)
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
result = TorrentPath._gen_save_path(bangumi)
|
||||
|
||||
assert "My Anime" in result
|
||||
assert "()" not in result
|
||||
assert "Season 1" in result
|
||||
|
||||
def test_season_formatting(self):
|
||||
"""Season is a plain integer, not zero-padded in path."""
|
||||
bangumi = make_bangumi(season=10)
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
result = TorrentPath._gen_save_path(bangumi)
|
||||
|
||||
assert "Season 10" in result
|
||||
|
||||
def test_with_different_base_path(self):
|
||||
"""Works with different base download path."""
|
||||
bangumi = make_bangumi(official_title="Test", year="2025", season=3)
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/mnt/media/Bangumi"
|
||||
result = TorrentPath._gen_save_path(bangumi)
|
||||
|
||||
assert result.startswith("/mnt/media/Bangumi")
|
||||
assert "Test (2025)" in result
|
||||
assert "Season 3" in result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _rule_name
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRuleName:
|
||||
def test_without_group_tag(self):
|
||||
"""Rule name without group tag is just title and season."""
|
||||
bangumi = make_bangumi(official_title="My Anime", season=1, group_name="Sub")
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
result = TorrentPath._rule_name(bangumi)
|
||||
|
||||
assert result == "My Anime S1"
|
||||
|
||||
def test_with_group_tag(self):
|
||||
"""Rule name with group tag includes [group] prefix."""
|
||||
bangumi = make_bangumi(official_title="My Anime", season=2, group_name="SubGroup")
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.bangumi_manage.group_tag = True
|
||||
result = TorrentPath._rule_name(bangumi)
|
||||
|
||||
assert result == "[SubGroup] My Anime S2"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# check_files
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestCheckFiles:
|
||||
def test_separates_media_and_subtitles(self):
|
||||
"""Media files (.mp4/.mkv) and subtitle files (.ass/.srt) are separated."""
|
||||
files = [
|
||||
{"name": "episode01.mkv"},
|
||||
{"name": "episode01.ass"},
|
||||
{"name": "episode02.mp4"},
|
||||
{"name": "episode02.srt"},
|
||||
]
|
||||
media, subs = TorrentPath.check_files(files)
|
||||
|
||||
assert len(media) == 2
|
||||
assert "episode01.mkv" in media
|
||||
assert "episode02.mp4" in media
|
||||
assert len(subs) == 2
|
||||
assert "episode01.ass" in subs
|
||||
assert "episode02.srt" in subs
|
||||
|
||||
def test_ignores_other_extensions(self):
|
||||
"""Files with non-media, non-subtitle extensions are ignored."""
|
||||
files = [
|
||||
{"name": "episode.mkv"},
|
||||
{"name": "readme.txt"},
|
||||
{"name": "info.nfo"},
|
||||
{"name": "cover.jpg"},
|
||||
]
|
||||
media, subs = TorrentPath.check_files(files)
|
||||
|
||||
assert len(media) == 1
|
||||
assert len(subs) == 0
|
||||
|
||||
def test_case_insensitive_extensions(self):
|
||||
"""Extension matching is case-insensitive."""
|
||||
files = [
|
||||
{"name": "episode.MKV"},
|
||||
{"name": "episode.MP4"},
|
||||
{"name": "sub.ASS"},
|
||||
{"name": "sub.SRT"},
|
||||
]
|
||||
media, subs = TorrentPath.check_files(files)
|
||||
|
||||
assert len(media) == 2
|
||||
assert len(subs) == 2
|
||||
|
||||
def test_empty_file_list(self):
|
||||
"""Empty file list returns empty lists."""
|
||||
media, subs = TorrentPath.check_files([])
|
||||
assert media == []
|
||||
assert subs == []
|
||||
|
||||
def test_nested_paths(self):
|
||||
"""Files in subdirectories are handled correctly."""
|
||||
files = [
|
||||
{"name": "Season 1/episode01.mkv"},
|
||||
{"name": "Subs/episode01.ass"},
|
||||
]
|
||||
media, subs = TorrentPath.check_files(files)
|
||||
|
||||
assert len(media) == 1
|
||||
assert len(subs) == 1
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _path_to_bangumi
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestPathToBangumi:
|
||||
def test_extracts_name_and_season(self):
|
||||
"""Parses save_path to extract bangumi name and season number."""
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
tp = TorrentPath()
|
||||
name, season = tp._path_to_bangumi(
|
||||
"/downloads/Bangumi/My Anime (2024)/Season 2"
|
||||
)
|
||||
|
||||
assert name == "My Anime (2024)"
|
||||
assert season == 2
|
||||
|
||||
def test_season_1_default(self):
|
||||
"""When no Season pattern found, defaults to season 1."""
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
tp = TorrentPath()
|
||||
name, season = tp._path_to_bangumi("/downloads/Bangumi/My Anime (2024)")
|
||||
|
||||
assert name == "My Anime (2024)"
|
||||
assert season == 1
|
||||
|
||||
def test_s_prefix_pattern(self):
|
||||
"""Recognizes S01 style season naming."""
|
||||
with patch("module.downloader.path.settings") as mock_settings:
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
tp = TorrentPath()
|
||||
name, season = tp._path_to_bangumi("/downloads/Bangumi/Anime/S03")
|
||||
|
||||
assert season == 3
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# is_ep / _file_depth
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestIsEp:
|
||||
def test_shallow_file(self):
|
||||
"""File at depth 1 (just filename) is considered an episode."""
|
||||
tp = TorrentPath()
|
||||
assert tp.is_ep("episode.mkv") is True
|
||||
|
||||
def test_one_folder_deep(self):
|
||||
"""File at depth 2 (one folder) is still an episode."""
|
||||
tp = TorrentPath()
|
||||
assert tp.is_ep("Season 1/episode.mkv") is True
|
||||
|
||||
def test_too_deep(self):
|
||||
"""File at depth 3+ is NOT considered an episode."""
|
||||
tp = TorrentPath()
|
||||
assert tp.is_ep("a/b/episode.mkv") is False
|
||||
|
||||
def test_file_depth(self):
|
||||
"""_file_depth returns correct part count."""
|
||||
tp = TorrentPath()
|
||||
assert tp._file_depth("file.mkv") == 1
|
||||
assert tp._file_depth("a/file.mkv") == 2
|
||||
assert tp._file_depth("a/b/c/file.mkv") == 4
|
||||
472
backend/src/test/test_renamer.py
Normal file
472
backend/src/test/test_renamer.py
Normal file
@@ -0,0 +1,472 @@
|
||||
"""Tests for Renamer: gen_path, rename_file, rename_collection, rename flow."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
|
||||
from module.models import EpisodeFile, Notification, SubtitleFile
|
||||
from module.manager.renamer import Renamer
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# gen_path
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestGenPath:
|
||||
def test_pn_method(self):
|
||||
"""pn method: {title} S{ss}E{ee}{suffix}"""
|
||||
ep = EpisodeFile(
|
||||
media_path="old.mkv", title="My Anime", season=1, episode=5, suffix=".mkv"
|
||||
)
|
||||
result = Renamer.gen_path(ep, "Bangumi Name", method="pn")
|
||||
assert result == "My Anime S01E05.mkv"
|
||||
|
||||
def test_advance_method(self):
|
||||
"""advance method: {bangumi_name} S{ss}E{ee}{suffix}"""
|
||||
ep = EpisodeFile(
|
||||
media_path="old.mkv", title="My Anime", season=2, episode=12, suffix=".mkv"
|
||||
)
|
||||
result = Renamer.gen_path(ep, "Bangumi Name", method="advance")
|
||||
assert result == "Bangumi Name S02E12.mkv"
|
||||
|
||||
def test_none_method(self):
|
||||
"""none method: returns original media_path unchanged."""
|
||||
ep = EpisodeFile(
|
||||
media_path="original/path/file.mkv",
|
||||
title="Test",
|
||||
season=1,
|
||||
episode=1,
|
||||
suffix=".mkv",
|
||||
)
|
||||
result = Renamer.gen_path(ep, "Bangumi", method="none")
|
||||
assert result == "original/path/file.mkv"
|
||||
|
||||
def test_subtitle_none_method(self):
|
||||
"""subtitle_none: returns original path unchanged."""
|
||||
sub = SubtitleFile(
|
||||
media_path="sub.ass",
|
||||
title="Test",
|
||||
season=1,
|
||||
episode=1,
|
||||
language="zh",
|
||||
suffix=".ass",
|
||||
)
|
||||
result = Renamer.gen_path(sub, "Bangumi", method="subtitle_none")
|
||||
assert result == "sub.ass"
|
||||
|
||||
def test_subtitle_pn_method(self):
|
||||
"""subtitle_pn: {title} S{ss}E{ee}.{language}{suffix}"""
|
||||
sub = SubtitleFile(
|
||||
media_path="sub.ass",
|
||||
title="My Anime",
|
||||
season=1,
|
||||
episode=3,
|
||||
language="zh",
|
||||
suffix=".ass",
|
||||
)
|
||||
result = Renamer.gen_path(sub, "Bangumi", method="subtitle_pn")
|
||||
assert result == "My Anime S01E03.zh.ass"
|
||||
|
||||
def test_subtitle_advance_method(self):
|
||||
"""subtitle_advance: {bangumi_name} S{ss}E{ee}.{language}{suffix}"""
|
||||
sub = SubtitleFile(
|
||||
media_path="sub.srt",
|
||||
title="My Anime",
|
||||
season=2,
|
||||
episode=7,
|
||||
language="zh-tw",
|
||||
suffix=".srt",
|
||||
)
|
||||
result = Renamer.gen_path(sub, "Bangumi Name", method="subtitle_advance")
|
||||
assert result == "Bangumi Name S02E07.zh-tw.srt"
|
||||
|
||||
def test_zero_padding_single_digit(self):
|
||||
"""Season and episode < 10 get zero-padded."""
|
||||
ep = EpisodeFile(
|
||||
media_path="old.mkv", title="Test", season=1, episode=9, suffix=".mkv"
|
||||
)
|
||||
result = Renamer.gen_path(ep, "Test", method="pn")
|
||||
assert "S01E09" in result
|
||||
|
||||
def test_no_padding_double_digit(self):
|
||||
"""Season and episode >= 10 are NOT zero-padded."""
|
||||
ep = EpisodeFile(
|
||||
media_path="old.mkv", title="Test", season=10, episode=12, suffix=".mkv"
|
||||
)
|
||||
result = Renamer.gen_path(ep, "Test", method="pn")
|
||||
assert "S10E12" in result
|
||||
|
||||
def test_unknown_method_returns_original(self):
|
||||
"""Unknown method returns original media_path."""
|
||||
ep = EpisodeFile(
|
||||
media_path="original.mkv", title="Test", season=1, episode=1, suffix=".mkv"
|
||||
)
|
||||
result = Renamer.gen_path(ep, "Test", method="invalid_method")
|
||||
assert result == "original.mkv"
|
||||
|
||||
def test_mp4_suffix(self):
|
||||
"""Works with .mp4 suffix too."""
|
||||
ep = EpisodeFile(
|
||||
media_path="old.mp4", title="Test", season=1, episode=1, suffix=".mp4"
|
||||
)
|
||||
result = Renamer.gen_path(ep, "Test", method="pn")
|
||||
assert result.endswith(".mp4")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# rename_file
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRenameFile:
|
||||
@pytest.fixture
|
||||
def renamer(self, mock_qb_client):
|
||||
"""Create Renamer with mocked internals."""
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.type = "qbittorrent"
|
||||
mock_settings.downloader.host = "localhost:8080"
|
||||
mock_settings.downloader.username = "admin"
|
||||
mock_settings.downloader.password = "admin"
|
||||
mock_settings.downloader.ssl = False
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
mock_settings.bangumi_manage.remove_bad_torrent = False
|
||||
mock_settings.bangumi_manage.rename_method = "pn"
|
||||
with patch(
|
||||
"module.downloader.download_client.DownloadClient._DownloadClient__getClient",
|
||||
return_value=mock_qb_client,
|
||||
):
|
||||
r = Renamer()
|
||||
r.client = mock_qb_client
|
||||
return r
|
||||
|
||||
async def test_successful_rename(self, renamer):
|
||||
"""rename_file parses, generates new path, renames, returns Notification."""
|
||||
ep = EpisodeFile(
|
||||
media_path="old.mkv", title="My Anime", season=1, episode=5, suffix=".mkv"
|
||||
)
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=ep):
|
||||
renamer.client.torrents_rename_file.return_value = True
|
||||
result = await renamer.rename_file(
|
||||
torrent_name="[Sub] My Anime - 05.mkv",
|
||||
media_path="old.mkv",
|
||||
bangumi_name="My Anime",
|
||||
method="pn",
|
||||
season=1,
|
||||
_hash="hash123",
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert isinstance(result, Notification)
|
||||
assert result.official_title == "My Anime"
|
||||
assert result.season == 1
|
||||
assert result.episode == 5
|
||||
|
||||
async def test_parse_fails_no_remove(self, renamer):
|
||||
"""When parser returns None and remove_bad_torrent=False, returns None."""
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=None):
|
||||
with patch("module.manager.renamer.settings") as mock_settings:
|
||||
mock_settings.bangumi_manage.remove_bad_torrent = False
|
||||
result = await renamer.rename_file(
|
||||
torrent_name="garbage",
|
||||
media_path="bad.mkv",
|
||||
bangumi_name="Test",
|
||||
method="pn",
|
||||
season=1,
|
||||
_hash="hash123",
|
||||
)
|
||||
|
||||
assert result is None
|
||||
renamer.client.torrents_delete.assert_not_called()
|
||||
|
||||
async def test_parse_fails_remove_bad(self, renamer):
|
||||
"""When parser fails and remove_bad_torrent=True, deletes torrent."""
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=None):
|
||||
with patch("module.manager.renamer.settings") as mock_settings:
|
||||
mock_settings.bangumi_manage.remove_bad_torrent = True
|
||||
await renamer.rename_file(
|
||||
torrent_name="garbage",
|
||||
media_path="bad.mkv",
|
||||
bangumi_name="Test",
|
||||
method="pn",
|
||||
season=1,
|
||||
_hash="hash_bad",
|
||||
)
|
||||
|
||||
renamer.client.torrents_delete.assert_called_once_with(
|
||||
"hash_bad", delete_files=True
|
||||
)
|
||||
|
||||
async def test_same_path_skipped(self, renamer):
|
||||
"""When generated path equals current path, no rename occurs."""
|
||||
ep = EpisodeFile(
|
||||
media_path="My Anime S01E05.mkv",
|
||||
title="My Anime",
|
||||
season=1,
|
||||
episode=5,
|
||||
suffix=".mkv",
|
||||
)
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=ep):
|
||||
result = await renamer.rename_file(
|
||||
torrent_name="test",
|
||||
media_path="My Anime S01E05.mkv",
|
||||
bangumi_name="My Anime",
|
||||
method="pn",
|
||||
season=1,
|
||||
_hash="hash123",
|
||||
)
|
||||
|
||||
assert result is None
|
||||
renamer.client.torrents_rename_file.assert_not_called()
|
||||
|
||||
async def test_duplicate_in_check_pool_skipped(self, renamer):
|
||||
"""When new_path is already in check_pool, skip rename."""
|
||||
ep = EpisodeFile(
|
||||
media_path="old.mkv", title="My Anime", season=1, episode=5, suffix=".mkv"
|
||||
)
|
||||
# Pre-populate check_pool with the expected new path
|
||||
renamer.check_pool["My Anime S01E05.mkv"] = True
|
||||
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=ep):
|
||||
result = await renamer.rename_file(
|
||||
torrent_name="test",
|
||||
media_path="old.mkv",
|
||||
bangumi_name="My Anime",
|
||||
method="pn",
|
||||
season=1,
|
||||
_hash="hash123",
|
||||
)
|
||||
|
||||
assert result is None
|
||||
renamer.client.torrents_rename_file.assert_not_called()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# rename_collection
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRenameCollection:
|
||||
@pytest.fixture
|
||||
def renamer(self, mock_qb_client):
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.type = "qbittorrent"
|
||||
mock_settings.downloader.host = "localhost:8080"
|
||||
mock_settings.downloader.username = "admin"
|
||||
mock_settings.downloader.password = "admin"
|
||||
mock_settings.downloader.ssl = False
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
mock_settings.bangumi_manage.remove_bad_torrent = False
|
||||
with patch(
|
||||
"module.downloader.download_client.DownloadClient._DownloadClient__getClient",
|
||||
return_value=mock_qb_client,
|
||||
):
|
||||
r = Renamer()
|
||||
r.client = mock_qb_client
|
||||
return r
|
||||
|
||||
async def test_renames_each_file(self, renamer):
|
||||
"""rename_collection iterates media_list and renames each valid file."""
|
||||
media_list = ["ep01.mkv", "ep02.mkv", "ep03.mkv"]
|
||||
|
||||
def mock_parser(torrent_path, season, **kwargs):
|
||||
ep_num = int(torrent_path.replace("ep", "").replace(".mkv", ""))
|
||||
return EpisodeFile(
|
||||
media_path=torrent_path,
|
||||
title="Anime",
|
||||
season=season,
|
||||
episode=ep_num,
|
||||
suffix=".mkv",
|
||||
)
|
||||
|
||||
with patch.object(renamer._parser, "torrent_parser", side_effect=mock_parser):
|
||||
renamer.client.torrents_rename_file.return_value = True
|
||||
await renamer.rename_collection(
|
||||
media_list=media_list,
|
||||
bangumi_name="Anime",
|
||||
season=1,
|
||||
method="pn",
|
||||
_hash="hash123",
|
||||
)
|
||||
|
||||
assert renamer.client.torrents_rename_file.call_count == 3
|
||||
|
||||
async def test_skips_deep_files(self, renamer):
|
||||
"""Files deeper than 2 levels are skipped (not is_ep)."""
|
||||
media_list = ["ep01.mkv", "extras/bonus/ep_sp.mkv"]
|
||||
|
||||
ep = EpisodeFile(
|
||||
media_path="ep01.mkv",
|
||||
title="Anime",
|
||||
season=1,
|
||||
episode=1,
|
||||
suffix=".mkv",
|
||||
)
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=ep):
|
||||
renamer.client.torrents_rename_file.return_value = True
|
||||
await renamer.rename_collection(
|
||||
media_list=media_list,
|
||||
bangumi_name="Anime",
|
||||
season=1,
|
||||
method="pn",
|
||||
_hash="hash123",
|
||||
)
|
||||
|
||||
# Only called once for ep01.mkv (depth 1)
|
||||
assert renamer.client.torrents_rename_file.call_count == 1
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# rename_subtitles
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRenameSubtitles:
|
||||
@pytest.fixture
|
||||
def renamer(self, mock_qb_client):
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.type = "qbittorrent"
|
||||
mock_settings.downloader.host = "localhost:8080"
|
||||
mock_settings.downloader.username = "admin"
|
||||
mock_settings.downloader.password = "admin"
|
||||
mock_settings.downloader.ssl = False
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
with patch(
|
||||
"module.downloader.download_client.DownloadClient._DownloadClient__getClient",
|
||||
return_value=mock_qb_client,
|
||||
):
|
||||
r = Renamer()
|
||||
r.client = mock_qb_client
|
||||
return r
|
||||
|
||||
async def test_renames_subtitles_with_language(self, renamer):
|
||||
"""rename_subtitles prepends subtitle_ to method and renames files."""
|
||||
sub = SubtitleFile(
|
||||
media_path="sub.ass",
|
||||
title="Anime",
|
||||
season=1,
|
||||
episode=1,
|
||||
language="zh",
|
||||
suffix=".ass",
|
||||
)
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=sub):
|
||||
renamer.client.torrents_rename_file.return_value = True
|
||||
await renamer.rename_subtitles(
|
||||
subtitle_list=["sub.ass"],
|
||||
torrent_name="[Sub] Anime - 01.mkv",
|
||||
bangumi_name="Anime",
|
||||
season=1,
|
||||
method="pn",
|
||||
_hash="hash123",
|
||||
)
|
||||
|
||||
renamer.client.torrents_rename_file.assert_called_once()
|
||||
call_args = renamer.client.torrents_rename_file.call_args
|
||||
new_path = call_args[1]["new_path"] if "new_path" in (call_args[1] or {}) else call_args[0][2]
|
||||
assert ".zh." in new_path
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# rename (full flow)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRenameFlow:
|
||||
@pytest.fixture
|
||||
def renamer(self, mock_qb_client):
|
||||
with patch("module.downloader.download_client.settings") as mock_settings:
|
||||
mock_settings.downloader.type = "qbittorrent"
|
||||
mock_settings.downloader.host = "localhost:8080"
|
||||
mock_settings.downloader.username = "admin"
|
||||
mock_settings.downloader.password = "admin"
|
||||
mock_settings.downloader.ssl = False
|
||||
mock_settings.downloader.path = "/downloads/Bangumi"
|
||||
mock_settings.bangumi_manage.group_tag = False
|
||||
mock_settings.bangumi_manage.remove_bad_torrent = False
|
||||
with patch(
|
||||
"module.downloader.download_client.DownloadClient._DownloadClient__getClient",
|
||||
return_value=mock_qb_client,
|
||||
):
|
||||
r = Renamer()
|
||||
r.client = mock_qb_client
|
||||
return r
|
||||
|
||||
async def test_single_file_rename(self, renamer):
|
||||
"""Full rename flow for a single-file torrent."""
|
||||
renamer.client.torrents_info.return_value = [
|
||||
{"hash": "h1", "name": "[Sub] Anime - 01.mkv", "save_path": "/downloads/Bangumi/Anime (2024)/Season 1"}
|
||||
]
|
||||
renamer.client.torrents_files.return_value = [
|
||||
{"name": "[Sub] Anime - 01.mkv"}
|
||||
]
|
||||
renamer.client.torrents_rename_file.return_value = True
|
||||
|
||||
ep = EpisodeFile(
|
||||
media_path="[Sub] Anime - 01.mkv",
|
||||
title="Anime",
|
||||
season=1,
|
||||
episode=1,
|
||||
suffix=".mkv",
|
||||
)
|
||||
with patch.object(renamer._parser, "torrent_parser", return_value=ep):
|
||||
with patch("module.manager.renamer.settings") as mock_settings:
|
||||
mock_settings.bangumi_manage.rename_method = "pn"
|
||||
mock_settings.bangumi_manage.remove_bad_torrent = False
|
||||
with patch("module.downloader.path.settings") as mock_path_settings:
|
||||
mock_path_settings.downloader.path = "/downloads/Bangumi"
|
||||
result = await renamer.rename()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].episode == 1
|
||||
|
||||
async def test_collection_sets_category(self, renamer):
|
||||
"""Multi-file torrent triggers collection rename and set_category."""
|
||||
renamer.client.torrents_info.return_value = [
|
||||
{"hash": "h1", "name": "Anime Collection", "save_path": "/downloads/Bangumi/Anime (2024)/Season 1"}
|
||||
]
|
||||
renamer.client.torrents_files.return_value = [
|
||||
{"name": "ep01.mkv"},
|
||||
{"name": "ep02.mkv"},
|
||||
{"name": "ep03.mkv"},
|
||||
]
|
||||
renamer.client.torrents_rename_file.return_value = True
|
||||
|
||||
def mock_parser(torrent_path, season, **kwargs):
|
||||
ep_num = int(torrent_path.replace("ep", "").replace(".mkv", ""))
|
||||
return EpisodeFile(
|
||||
media_path=torrent_path,
|
||||
title="Anime",
|
||||
season=season,
|
||||
episode=ep_num,
|
||||
suffix=".mkv",
|
||||
)
|
||||
|
||||
with patch.object(renamer._parser, "torrent_parser", side_effect=mock_parser):
|
||||
with patch("module.manager.renamer.settings") as mock_settings:
|
||||
mock_settings.bangumi_manage.rename_method = "pn"
|
||||
mock_settings.bangumi_manage.remove_bad_torrent = False
|
||||
with patch("module.downloader.path.settings") as mock_path_settings:
|
||||
mock_path_settings.downloader.path = "/downloads/Bangumi"
|
||||
await renamer.rename()
|
||||
|
||||
renamer.client.set_category.assert_called_once_with("h1", "BangumiCollection")
|
||||
|
||||
async def test_no_media_files_no_crash(self, renamer):
|
||||
"""When torrent has no media files, logs warning but doesn't crash."""
|
||||
renamer.client.torrents_info.return_value = [
|
||||
{"hash": "h1", "name": "No Media", "save_path": "/downloads/Bangumi/Anime/Season 1"}
|
||||
]
|
||||
renamer.client.torrents_files.return_value = [
|
||||
{"name": "readme.txt"},
|
||||
{"name": "info.nfo"},
|
||||
]
|
||||
with patch("module.manager.renamer.settings") as mock_settings:
|
||||
mock_settings.bangumi_manage.rename_method = "pn"
|
||||
with patch("module.downloader.path.settings") as mock_path_settings:
|
||||
mock_path_settings.downloader.path = "/downloads/Bangumi"
|
||||
result = await renamer.rename()
|
||||
|
||||
assert result == []
|
||||
renamer.client.torrents_rename_file.assert_not_called()
|
||||
334
backend/src/test/test_rss_engine_new.py
Normal file
334
backend/src/test/test_rss_engine_new.py
Normal file
@@ -0,0 +1,334 @@
|
||||
"""Tests for RSS engine: pull_rss, match_torrent, refresh_rss, add_rss."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from sqlmodel import Session
|
||||
|
||||
from module.database.bangumi import BangumiDatabase, _invalidate_bangumi_cache
|
||||
from module.database.rss import RSSDatabase
|
||||
from module.database.torrent import TorrentDatabase
|
||||
from module.models import Bangumi, RSSItem, Torrent
|
||||
from module.rss.engine import RSSEngine
|
||||
|
||||
from test.factories import make_bangumi, make_torrent, make_rss_item
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def rss_engine(db_engine):
|
||||
"""RSSEngine backed by in-memory database."""
|
||||
engine = RSSEngine(_engine=db_engine)
|
||||
return engine
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_bangumi_cache():
|
||||
"""Invalidate bangumi cache before each test."""
|
||||
_invalidate_bangumi_cache()
|
||||
yield
|
||||
_invalidate_bangumi_cache()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# pull_rss
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestPullRss:
|
||||
async def test_returns_only_new_torrents(self, rss_engine):
|
||||
"""pull_rss filters out torrents already in the database."""
|
||||
rss_item = make_rss_item()
|
||||
rss_engine.rss.add(rss_item)
|
||||
rss_item = rss_engine.rss.search_id(1)
|
||||
|
||||
# Pre-insert one torrent into DB
|
||||
existing = make_torrent(url="https://example.com/existing.torrent", rss_id=1)
|
||||
rss_engine.torrent.add(existing)
|
||||
|
||||
# Mock _get_torrents to return 3 torrents (1 existing + 2 new)
|
||||
all_torrents = [
|
||||
Torrent(name="existing", url="https://example.com/existing.torrent"),
|
||||
Torrent(name="new1", url="https://example.com/new1.torrent"),
|
||||
Torrent(name="new2", url="https://example.com/new2.torrent"),
|
||||
]
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = all_torrents
|
||||
result = await rss_engine.pull_rss(rss_item)
|
||||
|
||||
assert len(result) == 2
|
||||
assert all(t.url != "https://example.com/existing.torrent" for t in result)
|
||||
|
||||
async def test_all_existing_returns_empty(self, rss_engine):
|
||||
"""When all torrents already exist, returns empty list."""
|
||||
rss_item = make_rss_item()
|
||||
rss_engine.rss.add(rss_item)
|
||||
rss_item = rss_engine.rss.search_id(1)
|
||||
|
||||
existing = make_torrent(url="https://example.com/only.torrent", rss_id=1)
|
||||
rss_engine.torrent.add(existing)
|
||||
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = [
|
||||
Torrent(name="only", url="https://example.com/only.torrent")
|
||||
]
|
||||
result = await rss_engine.pull_rss(rss_item)
|
||||
|
||||
assert result == []
|
||||
|
||||
async def test_empty_feed_returns_empty(self, rss_engine):
|
||||
"""When RSS feed has no torrents, returns empty list."""
|
||||
rss_item = make_rss_item()
|
||||
rss_engine.rss.add(rss_item)
|
||||
rss_item = rss_engine.rss.search_id(1)
|
||||
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = []
|
||||
result = await rss_engine.pull_rss(rss_item)
|
||||
|
||||
assert result == []
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# match_torrent
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestMatchTorrent:
|
||||
def test_matches_by_title_raw_substring(self, rss_engine):
|
||||
"""match_torrent finds Bangumi when title_raw is a substring of torrent name."""
|
||||
bangumi = make_bangumi(title_raw="Mushoku Tensei", filter="")
|
||||
rss_engine.bangumi.add(bangumi)
|
||||
|
||||
torrent = make_torrent(
|
||||
name="[Lilith-Raws] Mushoku Tensei - 11 [1080p].mkv"
|
||||
)
|
||||
result = rss_engine.match_torrent(torrent)
|
||||
|
||||
assert result is not None
|
||||
assert result.title_raw == "Mushoku Tensei"
|
||||
|
||||
def test_no_match_returns_none(self, rss_engine):
|
||||
"""Returns None when no Bangumi matches the torrent name."""
|
||||
bangumi = make_bangumi(title_raw="Mushoku Tensei", filter="")
|
||||
rss_engine.bangumi.add(bangumi)
|
||||
|
||||
torrent = make_torrent(name="[Sub] Completely Different Anime - 01.mkv")
|
||||
result = rss_engine.match_torrent(torrent)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_filter_excludes_matching_torrent(self, rss_engine):
|
||||
"""When torrent name matches the filter regex, returns None."""
|
||||
bangumi = make_bangumi(title_raw="Mushoku Tensei", filter="720")
|
||||
rss_engine.bangumi.add(bangumi)
|
||||
|
||||
torrent = make_torrent(
|
||||
name="[Sub] Mushoku Tensei - 01 [720p].mkv"
|
||||
)
|
||||
result = rss_engine.match_torrent(torrent)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_empty_filter_allows_match(self, rss_engine):
|
||||
"""When filter is empty string, all matching torrents pass."""
|
||||
bangumi = make_bangumi(title_raw="Mushoku Tensei", filter="")
|
||||
rss_engine.bangumi.add(bangumi)
|
||||
|
||||
torrent = make_torrent(
|
||||
name="[Sub] Mushoku Tensei - 01 [720p].mkv"
|
||||
)
|
||||
result = rss_engine.match_torrent(torrent)
|
||||
|
||||
assert result is not None
|
||||
|
||||
def test_filter_case_insensitive(self, rss_engine):
|
||||
"""Filter regex matching is case-insensitive."""
|
||||
bangumi = make_bangumi(title_raw="Mushoku Tensei", filter="HEVC")
|
||||
rss_engine.bangumi.add(bangumi)
|
||||
|
||||
# Torrent has "hevc" in lowercase - should still be filtered
|
||||
torrent = make_torrent(
|
||||
name="[Sub] Mushoku Tensei - 01 [1080p][hevc].mkv"
|
||||
)
|
||||
result = rss_engine.match_torrent(torrent)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_deleted_bangumi_not_matched(self, rss_engine):
|
||||
"""Bangumi with deleted=True should not match."""
|
||||
bangumi = make_bangumi(title_raw="Mushoku Tensei", filter="", deleted=True)
|
||||
rss_engine.bangumi.add(bangumi)
|
||||
|
||||
torrent = make_torrent(name="[Sub] Mushoku Tensei - 01 [1080p].mkv")
|
||||
result = rss_engine.match_torrent(torrent)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_comma_separated_filters(self, rss_engine):
|
||||
"""Multiple comma-separated filters are joined with | for OR matching."""
|
||||
bangumi = make_bangumi(title_raw="Mushoku Tensei", filter="720,480")
|
||||
rss_engine.bangumi.add(bangumi)
|
||||
|
||||
# Matches one of the filters
|
||||
torrent = make_torrent(name="[Sub] Mushoku Tensei - 01 [480p].mkv")
|
||||
result = rss_engine.match_torrent(torrent)
|
||||
|
||||
assert result is None
|
||||
|
||||
# Doesn't match any filter
|
||||
torrent2 = make_torrent(name="[Sub] Mushoku Tensei - 01 [1080p].mkv")
|
||||
result2 = rss_engine.match_torrent(torrent2)
|
||||
|
||||
assert result2 is not None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# refresh_rss
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRefreshRss:
|
||||
async def test_downloads_matched_torrents(self, rss_engine, mock_qb_client):
|
||||
"""refresh_rss downloads torrents that match a bangumi rule."""
|
||||
# Setup DB
|
||||
rss_item = make_rss_item(enabled=True)
|
||||
rss_engine.rss.add(rss_item)
|
||||
bangumi = make_bangumi(title_raw="Mushoku Tensei", filter="")
|
||||
rss_engine.bangumi.add(bangumi)
|
||||
|
||||
# Mock network
|
||||
new_torrent = Torrent(
|
||||
name="[Sub] Mushoku Tensei - 12 [1080p].mkv",
|
||||
url="https://example.com/ep12.torrent",
|
||||
)
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = [new_torrent]
|
||||
|
||||
# Create a mock client
|
||||
client = AsyncMock()
|
||||
client.add_torrent = AsyncMock(return_value=True)
|
||||
|
||||
await rss_engine.refresh_rss(client)
|
||||
|
||||
# Verify download was attempted
|
||||
client.add_torrent.assert_called_once()
|
||||
# Verify torrent stored in DB
|
||||
all_torrents = rss_engine.torrent.search_all()
|
||||
assert len(all_torrents) == 1
|
||||
assert all_torrents[0].downloaded is True
|
||||
|
||||
async def test_unmatched_torrents_stored_not_downloaded(self, rss_engine):
|
||||
"""Unmatched torrents are stored in DB but not marked downloaded."""
|
||||
rss_item = make_rss_item(enabled=True)
|
||||
rss_engine.rss.add(rss_item)
|
||||
# No bangumi in DB to match
|
||||
|
||||
unmatched = Torrent(
|
||||
name="[Sub] Unknown Anime - 01 [1080p].mkv",
|
||||
url="https://example.com/unknown.torrent",
|
||||
)
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = [unmatched]
|
||||
client = AsyncMock()
|
||||
await rss_engine.refresh_rss(client)
|
||||
|
||||
client.add_torrent.assert_not_called()
|
||||
all_torrents = rss_engine.torrent.search_all()
|
||||
assert len(all_torrents) == 1
|
||||
assert all_torrents[0].downloaded is False
|
||||
|
||||
async def test_refresh_specific_rss_id(self, rss_engine):
|
||||
"""refresh_rss with rss_id only processes that specific feed."""
|
||||
rss1 = make_rss_item(name="Feed 1", url="https://feed1.com/rss")
|
||||
rss2 = make_rss_item(name="Feed 2", url="https://feed2.com/rss")
|
||||
rss_engine.rss.add(rss1)
|
||||
rss_engine.rss.add(rss2)
|
||||
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
mock_get.return_value = []
|
||||
client = AsyncMock()
|
||||
await rss_engine.refresh_rss(client, rss_id=2)
|
||||
|
||||
# Only called once (for rss_id=2)
|
||||
mock_get.assert_called_once()
|
||||
|
||||
async def test_refresh_nonexistent_rss_id(self, rss_engine):
|
||||
"""refresh_rss with non-existent rss_id does nothing."""
|
||||
with patch.object(RSSEngine, "_get_torrents", new_callable=AsyncMock) as mock_get:
|
||||
client = AsyncMock()
|
||||
await rss_engine.refresh_rss(client, rss_id=999)
|
||||
|
||||
mock_get.assert_not_called()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# add_rss
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestAddRss:
|
||||
async def test_add_with_name(self, rss_engine):
|
||||
"""add_rss with explicit name skips HTTP fetch and creates record."""
|
||||
result = await rss_engine.add_rss(
|
||||
rss_link="https://mikanani.me/RSS/test",
|
||||
name="My Feed",
|
||||
aggregate=True,
|
||||
parser="mikan",
|
||||
)
|
||||
|
||||
assert result.status is True
|
||||
assert result.status_code == 200
|
||||
rss = rss_engine.rss.search_id(1)
|
||||
assert rss.name == "My Feed"
|
||||
assert rss.url == "https://mikanani.me/RSS/test"
|
||||
|
||||
async def test_add_without_name_fetches_title(self, rss_engine):
|
||||
"""add_rss without name calls get_rss_title to auto-discover title."""
|
||||
with patch(
|
||||
"module.rss.engine.RequestContent"
|
||||
) as MockReq:
|
||||
mock_instance = AsyncMock()
|
||||
mock_instance.get_rss_title = AsyncMock(return_value="Fetched Title")
|
||||
MockReq.return_value.__aenter__ = AsyncMock(return_value=mock_instance)
|
||||
MockReq.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
result = await rss_engine.add_rss(
|
||||
rss_link="https://mikanani.me/RSS/auto",
|
||||
name=None,
|
||||
)
|
||||
|
||||
assert result.status is True
|
||||
rss = rss_engine.rss.search_id(1)
|
||||
assert rss.name == "Fetched Title"
|
||||
|
||||
async def test_add_without_name_fetch_fails(self, rss_engine):
|
||||
"""add_rss returns error when title fetch fails."""
|
||||
with patch(
|
||||
"module.rss.engine.RequestContent"
|
||||
) as MockReq:
|
||||
mock_instance = AsyncMock()
|
||||
mock_instance.get_rss_title = AsyncMock(return_value=None)
|
||||
MockReq.return_value.__aenter__ = AsyncMock(return_value=mock_instance)
|
||||
MockReq.return_value.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
result = await rss_engine.add_rss(
|
||||
rss_link="https://mikanani.me/RSS/broken",
|
||||
name=None,
|
||||
)
|
||||
|
||||
assert result.status is False
|
||||
assert result.status_code == 406
|
||||
|
||||
async def test_add_duplicate_url_fails(self, rss_engine):
|
||||
"""add_rss with an already-existing URL returns failure."""
|
||||
await rss_engine.add_rss(
|
||||
rss_link="https://mikanani.me/RSS/dup",
|
||||
name="First",
|
||||
)
|
||||
result = await rss_engine.add_rss(
|
||||
rss_link="https://mikanani.me/RSS/dup",
|
||||
name="Second",
|
||||
)
|
||||
|
||||
assert result.status is False
|
||||
assert result.status_code == 406
|
||||
125
backend/src/test/test_searcher.py
Normal file
125
backend/src/test/test_searcher.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""Tests for search providers: URL construction, keyword handling."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
|
||||
from module.models import Bangumi, RSSItem
|
||||
from module.searcher.provider import search_url
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# search_url
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestSearchUrl:
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_search_config(self):
|
||||
"""Ensure SEARCH_CONFIG has default providers."""
|
||||
config = {
|
||||
"mikan": "https://mikanani.me/RSS/Search?searchstr=%s",
|
||||
"nyaa": "https://nyaa.si/?page=rss&q=%s&c=0_0&f=0",
|
||||
"dmhy": "http://dmhy.org/topics/rss/rss.xml?keyword=%s",
|
||||
}
|
||||
with patch("module.searcher.provider.SEARCH_CONFIG", config):
|
||||
yield
|
||||
|
||||
def test_mikan_url(self):
|
||||
"""Mikan search URL is constructed correctly."""
|
||||
result = search_url("mikan", ["Mushoku", "Tensei"])
|
||||
assert isinstance(result, RSSItem)
|
||||
assert "mikanani.me" in result.url
|
||||
assert "Mushoku" in result.url
|
||||
assert "Tensei" in result.url
|
||||
assert result.parser == "mikan"
|
||||
|
||||
def test_nyaa_url(self):
|
||||
"""Nyaa search URL is constructed correctly."""
|
||||
result = search_url("nyaa", ["Mushoku", "Tensei"])
|
||||
assert "nyaa.si" in result.url
|
||||
assert result.parser == "tmdb"
|
||||
|
||||
def test_dmhy_url(self):
|
||||
"""DMHY search URL is constructed correctly."""
|
||||
result = search_url("dmhy", ["Mushoku", "Tensei"])
|
||||
assert "dmhy.org" in result.url
|
||||
assert result.parser == "tmdb"
|
||||
|
||||
def test_unsupported_site_raises(self):
|
||||
"""Unknown site raises ValueError."""
|
||||
with pytest.raises(ValueError, match="not supported"):
|
||||
search_url("unknown_site", ["test"])
|
||||
|
||||
def test_keyword_sanitization(self):
|
||||
"""Non-word characters are replaced with +."""
|
||||
result = search_url("mikan", ["Test Anime (2024)"])
|
||||
# Spaces and parentheses should be replaced with +
|
||||
assert "(" not in result.url
|
||||
assert ")" not in result.url
|
||||
|
||||
def test_multiple_keywords_joined(self):
|
||||
"""Multiple keywords are joined with +."""
|
||||
result = search_url("mikan", ["word1", "word2", "word3"])
|
||||
# All keywords should appear in the URL
|
||||
url = result.url
|
||||
assert "word1" in url
|
||||
assert "word2" in url
|
||||
assert "word3" in url
|
||||
|
||||
def test_aggregate_is_false(self):
|
||||
"""Search RSS items have aggregate=False."""
|
||||
result = search_url("mikan", ["test"])
|
||||
assert result.aggregate is False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# SearchTorrent.special_url
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestSpecialUrl:
|
||||
def test_uses_bangumi_fields(self):
|
||||
"""special_url builds keywords from SEARCH_KEY fields of Bangumi."""
|
||||
from module.searcher.searcher import SearchTorrent, SEARCH_KEY
|
||||
from test.factories import make_bangumi
|
||||
|
||||
bangumi = make_bangumi(
|
||||
group_name="SubGroup",
|
||||
title_raw="Test Raw",
|
||||
season_raw="S2",
|
||||
dpi="1080p",
|
||||
source="Web",
|
||||
subtitle="CHT",
|
||||
)
|
||||
|
||||
with patch("module.searcher.provider.SEARCH_CONFIG", {
|
||||
"mikan": "https://mikanani.me/RSS/Search?searchstr=%s",
|
||||
}):
|
||||
result = SearchTorrent.special_url(bangumi, "mikan")
|
||||
|
||||
assert isinstance(result, RSSItem)
|
||||
# All non-None SEARCH_KEY fields should contribute to the URL
|
||||
assert "SubGroup" in result.url
|
||||
assert "Test" in result.url
|
||||
|
||||
def test_skips_none_fields(self):
|
||||
"""special_url skips fields that are None."""
|
||||
from module.searcher.searcher import SearchTorrent
|
||||
from test.factories import make_bangumi
|
||||
|
||||
bangumi = make_bangumi(
|
||||
group_name=None,
|
||||
title_raw="Test",
|
||||
season_raw=None,
|
||||
dpi=None,
|
||||
source=None,
|
||||
subtitle=None,
|
||||
)
|
||||
|
||||
with patch("module.searcher.provider.SEARCH_CONFIG", {
|
||||
"mikan": "https://mikanani.me/RSS/Search?searchstr=%s",
|
||||
}):
|
||||
result = SearchTorrent.special_url(bangumi, "mikan")
|
||||
|
||||
# Only title_raw should be in the URL
|
||||
assert "Test" in result.url
|
||||
16
backend/uv.lock
generated
16
backend/uv.lock
generated
@@ -54,7 +54,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "auto-bangumi"
|
||||
version = "3.1.0"
|
||||
version = "3.2.0b4"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aiosqlite" },
|
||||
@@ -85,6 +85,7 @@ dev = [
|
||||
{ name = "pre-commit" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-mock" },
|
||||
{ name = "ruff" },
|
||||
]
|
||||
|
||||
@@ -118,6 +119,7 @@ dev = [
|
||||
{ name = "pre-commit", specifier = ">=3.0.0" },
|
||||
{ name = "pytest", specifier = ">=8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.23.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.12.0" },
|
||||
{ name = "ruff", specifier = ">=0.1.0" },
|
||||
]
|
||||
|
||||
@@ -1133,6 +1135,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.15.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.2.1"
|
||||
|
||||
Reference in New Issue
Block a user