Merge pull request #1027 from EstrellaXD/fix/batch-3.2.7-followup

fix: batch bug fixes for 3.2.7 (#1016, #983, #1025, #994, #1015)
This commit is contained in:
Estrella Pan
2026-04-19 13:04:17 +02:00
committed by GitHub
7 changed files with 115 additions and 27 deletions

View File

@@ -314,9 +314,18 @@ jobs:
echo ${{ needs.version-info.outputs.version }}
echo "VERSION='${{ needs.version-info.outputs.version }}'" >> module/__version__.py
- uses: astral-sh/setup-uv@v4
with:
version: "latest"
- name: Generate requirements.txt for non-uv consumers (#994)
run: |
cd backend && uv export --format requirements-txt --no-hashes --no-dev -o requirements.txt
- name: Zip app
run: |
cd backend && zip -r app-v${{ needs.version-info.outputs.version }}.zip src
cd backend && zip -r app-v${{ needs.version-info.outputs.version }}.zip \
src pyproject.toml uv.lock requirements.txt
- name: Generate Release info
id: release-info

View File

@@ -1,6 +1,7 @@
import logging
import re
from os import PathLike
from pathlib import PureWindowsPath
from module.conf import PLATFORM, settings
from module.models import Bangumi, BangumiUpdate
@@ -36,9 +37,11 @@ class TorrentPath:
@staticmethod
def _path_to_bangumi(save_path: PathLike[str] | str, torrent_name: str = ""):
# Split save path and download path
save_parts = Path(save_path).parts
download_parts = Path(settings.downloader.path).parts
# Use PureWindowsPath regardless of the host AB runs on: it accepts
# both "\" and "/" separators, so a qBittorrent-on-Windows save_path
# reaching a Linux AB still splits into segments correctly (#1016).
save_parts = PureWindowsPath(save_path).parts
download_parts = PureWindowsPath(settings.downloader.path).parts
# Get bangumi name and season
bangumi_name = ""
season = 1

View File

@@ -36,30 +36,32 @@ async def get_shared_client() -> httpx.AsyncClient:
if _shared_client is not None:
await _shared_client.aclose()
timeout = httpx.Timeout(connect=10.0, read=30.0, write=10.0, pool=10.0)
# follow_redirects=True: Mikan mirrors and some CDNs respond with 302 to the
# canonical host; without this, raise_for_status treats the redirect as an
# error and the RSS pull fails (#983).
common_kwargs = {
"timeout": timeout,
"limits": _CONNECTION_LIMITS,
"follow_redirects": True,
}
if settings.proxy.enable:
if "http" in settings.proxy.type:
if settings.proxy.username:
proxy_url = f"http://{settings.proxy.username}:{settings.proxy.password}@{settings.proxy.host}:{settings.proxy.port}"
else:
proxy_url = f"http://{settings.proxy.host}:{settings.proxy.port}"
_shared_client = httpx.AsyncClient(
proxy=proxy_url, timeout=timeout, limits=_CONNECTION_LIMITS
)
_shared_client = httpx.AsyncClient(proxy=proxy_url, **common_kwargs)
elif settings.proxy.type == "socks5":
if settings.proxy.username:
socks_url = f"socks5://{settings.proxy.username}:{settings.proxy.password}@{settings.proxy.host}:{settings.proxy.port}"
else:
socks_url = f"socks5://{settings.proxy.host}:{settings.proxy.port}"
transport = AsyncProxyTransport.from_url(socks_url, rdns=True)
_shared_client = httpx.AsyncClient(
transport=transport, timeout=timeout, limits=_CONNECTION_LIMITS
)
_shared_client = httpx.AsyncClient(transport=transport, **common_kwargs)
else:
_shared_client = httpx.AsyncClient(
timeout=timeout, limits=_CONNECTION_LIMITS
)
_shared_client = httpx.AsyncClient(**common_kwargs)
else:
_shared_client = httpx.AsyncClient(timeout=timeout, limits=_CONNECTION_LIMITS)
_shared_client = httpx.AsyncClient(**common_kwargs)
_shared_client_proxy_key = current_key
return _shared_client
@@ -91,7 +93,9 @@ class RequestURL:
}
# For torrent files, use different Accept header
if url.endswith(".torrent") or "/download/" in url:
base_headers["Accept"] = "application/x-bittorrent, application/octet-stream, */*"
base_headers["Accept"] = (
"application/x-bittorrent, application/octet-stream, */*"
)
else:
base_headers["Accept"] = "application/xml, text/xml, */*"
return base_headers
@@ -102,7 +106,11 @@ class RequestURL:
while True:
try:
req = await self._client.get(url=url, headers=headers)
logger.debug("[Network] Successfully connected to %s. Status: %s", url, req.status_code)
logger.debug(
"[Network] Successfully connected to %s. Status: %s",
url,
req.status_code,
)
req.raise_for_status()
return req
except httpx.HTTPStatusError as e:
@@ -122,16 +130,16 @@ class RequestURL:
except Exception as e:
logger.warning(f"[Network] Unexpected error for {url}: {e}")
break
logger.error(f"[Network] Unable to connect to {url}, Please check your network settings")
logger.error(
f"[Network] Unable to connect to {url}, Please check your network settings"
)
return None
async def post_url(self, url: str, data: dict, retry=3):
try_time = 0
while True:
try:
req = await self._client.post(
url=url, headers=self.header, data=data
)
req = await self._client.post(url=url, headers=self.header, data=data)
req.raise_for_status()
return req
except httpx.RequestError:

View File

@@ -59,7 +59,11 @@ def pre_process(raw_name: str) -> str:
def prefix_process(raw: str, group: str) -> str:
raw = re.sub(f".{re.escape(group)}.", "", raw)
# Guard against empty group: without this, the pattern degenerates to ".."
# and every pair of characters gets deleted, destroying titles that lack a
# [group] prefix (#1025).
if group:
raw = re.sub(f".{re.escape(group)}.", "", raw)
raw_process = PREFIX_RE.sub("/", raw)
arg_group = raw_process.split("/")
while "" in arg_group:

View File

@@ -13,6 +13,34 @@ def test_path_to_bangumi():
assert season == 2
def test_path_to_bangumi_windows_style_save_path():
"""Regression for #1016: when qBittorrent runs on Windows and AB runs on
Linux, qB returns backslash paths. PurePosixPath treats the whole string
as one segment, leaving season stuck at 1."""
from module.downloader.path import TorrentPath
with patch("module.downloader.path.settings") as mock_settings:
mock_settings.downloader.path = r"D:\video\Bangumis"
path = r"D:\video\Bangumis\小书痴的下克上\Season 4"
bangumi_name, season = TorrentPath._path_to_bangumi(path)
assert bangumi_name == "小书痴的下克上"
assert season == 4
def test_path_to_bangumi_posix_path_on_linux_ab():
"""Regression guard: POSIX paths still parse correctly after the fix."""
from module.downloader.path import TorrentPath
with patch("module.downloader.path.settings") as mock_settings:
mock_settings.downloader.path = "/downloads/Bangumi"
path = "/downloads/Bangumi/葬送的芙莉莲/Season 2"
bangumi_name, season = TorrentPath._path_to_bangumi(path)
assert bangumi_name == "葬送的芙莉莲"
assert season == 2
class TestGenSavePath:
"""Tests for TorrentPath._gen_save_path with season_offset."""

View File

@@ -56,7 +56,9 @@ def test_raw_parser():
assert info.episode == 9
assert info.season == 1
content = "[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"
content = (
"[梦蓝字幕组]New Doraemon 哆啦A梦新番[747][2023.02.25][AVC][1080P][GB_JP][MP4]"
)
info = raw_parser(content)
assert info.group == "梦蓝字幕组"
assert info.title_zh == "哆啦A梦新番"
@@ -65,7 +67,9 @@ def test_raw_parser():
assert info.episode == 747
assert info.season == 1
content = "[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"
content = (
"[织梦字幕组][尼尔:机械纪元 NieR Automata Ver1.1a][02集][1080P][AVC][简日双语]"
)
info = raw_parser(content)
assert info.group == "织梦字幕组"
assert info.title_zh == "尼尔:机械纪元"
@@ -160,7 +164,9 @@ def test_raw_parser():
assert info.season == 1
# Issue #990: Title starting with number — should not misparse "29" as episode
content = "[ANi] 29 岁单身中坚冒险家的日常 - 07 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"
content = (
"[ANi] 29 岁单身中坚冒险家的日常 - 07 [1080P][Baha][WEB-DL][AAC AVC][CHT][MP4]"
)
info = raw_parser(content)
assert info.group == "ANi"
assert info.title_zh == "29 岁单身中坚冒险家的日常"
@@ -310,8 +316,9 @@ class TestIssue764WesternFormat:
assert info.resolution == "1080p"
# No brackets → group detection fails
assert info.group == ""
# No CJK chars → no title_zh/jp; EN detection also fails (short segments)
assert info.title_en is None
# After the #1025 fix, prefix_process no longer destroys titles without
# a [group] prefix, so the English title is now extracted correctly.
assert info.title_en == "Girls Band Cry"
assert info.title_zh is None
@@ -323,7 +330,9 @@ class TestIssue986AtlasFormat:
"[阿特拉斯字幕组·雪原市出差所][命运-奇异赝品_Fatestrange Fake][07_神自黄昏归来][简繁日内封PGS][日语配音版_Japanese Dub][Web-DL Remux][1080p AVC AAC]",
]
@pytest.mark.xfail(reason="Atlas bracket-delimited format not supported by TITLE_RE")
@pytest.mark.xfail(
reason="Atlas bracket-delimited format not supported by TITLE_RE"
)
def test_parse_atlas_format(self):
info = raw_parser(self.TITLES[0])
assert info is not None
@@ -362,3 +371,24 @@ class TestIssue805TitleWithCht:
assert info.source == "Baha"
assert info.sub == "CHT"
class TestIssue1025NoGroupPrefix:
"""Issue #1025: Titles without a [group] prefix must still parse.
prefix_process was calling re.sub(f".{group}.", "", raw) even when
group was empty, which reduced the pattern to `..` and deleted every
pair of characters, leaving a stub like `1` that name_process couldn't
split into en/zh/jp.
"""
def test_mixed_cjk_and_en_without_group(self):
content = (
"冰之城墙「氷の城壁」The Ramparts of Ice S01E02 1080p 日英双语-多国字幕"
)
info = raw_parser(content)
assert info is not None
assert info.episode == 2
assert info.season == 1
# Before the fix all three title fields were None and title_parser
# raised "Cannot extract title_raw". At least one must now be set.
assert any([info.title_en, info.title_zh, info.title_jp])

View File

@@ -33,6 +33,12 @@ class TestSharedClientLimits:
assert pool._max_connections is not None
assert pool._max_connections > 0
async def test_client_follows_redirects(self):
"""Regression for #983: mikanime mirror returns 302 to the canonical
URL but httpx refuses to follow by default, so the RSS fetch fails."""
client = await get_shared_client()
assert client.follow_redirects is True
class TestResetSharedClient:
async def test_reset_closes_existing_client(self):