mirror of
https://github.com/EstrellaXD/Auto_Bangumi.git
synced 2026-03-20 03:46:40 +08:00
feat(mcp): add MCP server for LLM tool integration via SSE
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
14
CHANGELOG.md
14
CHANGELOG.md
@@ -1,3 +1,17 @@
|
||||
# [3.2.3-beta.5] - 2026-02-22
|
||||
|
||||
## Backend
|
||||
|
||||
### Added
|
||||
|
||||
- 新增 MCP (Model Context Protocol) 服务器,支持通过 Claude Desktop 等 LLM 工具管理番剧订阅
|
||||
- SSE 传输层挂载在 `/mcp/sse`,支持 MCP 客户端连接
|
||||
- 10 个工具:list_anime、get_anime、search_anime、subscribe_anime、unsubscribe_anime、list_downloads、list_rss_feeds、get_program_status、refresh_feeds、update_anime
|
||||
- 4 个资源:anime/list、anime/{id}、status、rss/feeds
|
||||
- 本地网络 IP 白名单安全中间件(RFC 1918 + 回环地址),无需 JWT 认证
|
||||
|
||||
---
|
||||
|
||||
# [3.2.3-beta.4] - 2026-02-22
|
||||
|
||||
## Backend
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "auto-bangumi"
|
||||
version = "3.2.3-beta.4"
|
||||
version = "3.2.3-beta.5"
|
||||
description = "AutoBangumi - Automated anime download manager"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
@@ -24,6 +24,7 @@ dependencies = [
|
||||
"sse-starlette>=1.6.5",
|
||||
"webauthn>=2.0.0",
|
||||
"urllib3>=2.0.3",
|
||||
"mcp[cli]>=1.8.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
||||
@@ -7,9 +7,11 @@ from fastapi import FastAPI, Request
|
||||
from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from module.api import v1
|
||||
from module.api.program import program
|
||||
from module.conf import VERSION, settings, setup_logger
|
||||
from module.mcp import create_mcp_app
|
||||
|
||||
setup_logger(reset=True)
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -45,6 +47,9 @@ def create_app() -> FastAPI:
|
||||
# mount routers
|
||||
app.include_router(v1, prefix="/api")
|
||||
|
||||
# mount MCP server (SSE transport for LLM tool integration)
|
||||
app.mount("/mcp", create_mcp_app())
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@@ -73,6 +78,7 @@ if VERSION != "DEV_VERSION":
|
||||
else:
|
||||
context = {"request": request}
|
||||
return templates.TemplateResponse("index.html", context)
|
||||
|
||||
else:
|
||||
|
||||
@app.get("/", status_code=302, tags=["html"])
|
||||
|
||||
13
backend/src/module/mcp/__init__.py
Normal file
13
backend/src/module/mcp/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""MCP (Model Context Protocol) server for AutoBangumi.
|
||||
|
||||
Exposes anime subscriptions, RSS feeds, and download status to MCP clients
|
||||
(e.g. Claude Desktop) over a local-network-restricted SSE endpoint.
|
||||
|
||||
Usage::
|
||||
|
||||
from module.mcp import create_mcp_app
|
||||
|
||||
app = create_mcp_app() # returns a Starlette ASGI app, mount at /mcp
|
||||
"""
|
||||
|
||||
from .server import create_mcp_starlette_app as create_mcp_app
|
||||
108
backend/src/module/mcp/resources.py
Normal file
108
backend/src/module/mcp/resources.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""MCP resource definitions and handlers for AutoBangumi.
|
||||
|
||||
``RESOURCES`` lists static resources; ``RESOURCE_TEMPLATES`` lists URI
|
||||
templates for parameterised lookups. ``handle_resource`` resolves a URI
|
||||
to its JSON payload.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from mcp import types
|
||||
|
||||
from module.conf import VERSION
|
||||
from module.manager import TorrentManager
|
||||
from module.models import Bangumi
|
||||
from module.rss import RSSEngine
|
||||
|
||||
from .tools import _bangumi_to_dict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
RESOURCES = [
|
||||
types.Resource(
|
||||
uri="autobangumi://anime/list",
|
||||
name="All tracked anime",
|
||||
description="List of all anime subscriptions being tracked by AutoBangumi",
|
||||
mimeType="application/json",
|
||||
),
|
||||
types.Resource(
|
||||
uri="autobangumi://status",
|
||||
name="Program status",
|
||||
description="Current AutoBangumi program status, version, and state",
|
||||
mimeType="application/json",
|
||||
),
|
||||
types.Resource(
|
||||
uri="autobangumi://rss/feeds",
|
||||
name="RSS feeds",
|
||||
description="All configured RSS feeds with health status",
|
||||
mimeType="application/json",
|
||||
),
|
||||
]
|
||||
|
||||
RESOURCE_TEMPLATES = [
|
||||
types.ResourceTemplate(
|
||||
uriTemplate="autobangumi://anime/{id}",
|
||||
name="Anime details",
|
||||
description="Detailed information about a specific tracked anime by ID",
|
||||
mimeType="application/json",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def handle_resource(uri: str) -> str:
|
||||
"""Return a JSON string for the given MCP resource URI.
|
||||
|
||||
Supported URIs:
|
||||
- ``autobangumi://anime/list`` - all tracked anime
|
||||
- ``autobangumi://status`` - program version and running state
|
||||
- ``autobangumi://rss/feeds`` - configured RSS feeds
|
||||
- ``autobangumi://anime/{id}`` - single anime by integer ID
|
||||
"""
|
||||
if uri == "autobangumi://anime/list":
|
||||
with TorrentManager() as manager:
|
||||
items = manager.bangumi.search_all()
|
||||
return json.dumps([_bangumi_to_dict(b) for b in items], ensure_ascii=False)
|
||||
|
||||
elif uri == "autobangumi://status":
|
||||
from module.api.program import program
|
||||
|
||||
return json.dumps(
|
||||
{
|
||||
"version": VERSION,
|
||||
"running": program.is_running,
|
||||
"first_run": program.first_run,
|
||||
}
|
||||
)
|
||||
|
||||
elif uri == "autobangumi://rss/feeds":
|
||||
with RSSEngine() as engine:
|
||||
feeds = engine.rss.search_all()
|
||||
return json.dumps(
|
||||
[
|
||||
{
|
||||
"id": f.id,
|
||||
"name": f.name,
|
||||
"url": f.url,
|
||||
"enabled": f.enabled,
|
||||
"connection_status": f.connection_status,
|
||||
"last_checked_at": f.last_checked_at,
|
||||
}
|
||||
for f in feeds
|
||||
],
|
||||
ensure_ascii=False,
|
||||
)
|
||||
|
||||
elif uri.startswith("autobangumi://anime/"):
|
||||
anime_id = uri.split("/")[-1]
|
||||
try:
|
||||
anime_id = int(anime_id)
|
||||
except ValueError:
|
||||
return json.dumps({"error": f"Invalid anime ID: {anime_id}"})
|
||||
with TorrentManager() as manager:
|
||||
result = manager.search_one(anime_id)
|
||||
if isinstance(result, Bangumi):
|
||||
return json.dumps(_bangumi_to_dict(result), ensure_ascii=False)
|
||||
return json.dumps({"error": result.msg_en})
|
||||
|
||||
return json.dumps({"error": f"Unknown resource: {uri}"})
|
||||
48
backend/src/module/mcp/security.py
Normal file
48
backend/src/module/mcp/security.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""MCP access control: restricts connections to local network addresses only."""
|
||||
|
||||
import ipaddress
|
||||
import logging
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# RFC 1918 private ranges + loopback + IPv6 equivalents
|
||||
_ALLOWED_NETWORKS = [
|
||||
ipaddress.ip_network("127.0.0.0/8"),
|
||||
ipaddress.ip_network("10.0.0.0/8"),
|
||||
ipaddress.ip_network("172.16.0.0/12"),
|
||||
ipaddress.ip_network("192.168.0.0/16"),
|
||||
ipaddress.ip_network("::1/128"),
|
||||
ipaddress.ip_network("fe80::/10"),
|
||||
ipaddress.ip_network("fc00::/7"),
|
||||
]
|
||||
|
||||
|
||||
def _is_local(host: str) -> bool:
|
||||
"""Return True if *host* is a loopback or RFC 1918 private address."""
|
||||
try:
|
||||
addr = ipaddress.ip_address(host)
|
||||
except ValueError:
|
||||
return False
|
||||
return any(addr in net for net in _ALLOWED_NETWORKS)
|
||||
|
||||
|
||||
class LocalNetworkMiddleware(BaseHTTPMiddleware):
|
||||
"""Starlette middleware that blocks requests from non-local IP addresses.
|
||||
|
||||
Returns HTTP 403 for any client outside loopback, RFC 1918, or IPv6
|
||||
link-local/unique-local ranges.
|
||||
"""
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
client_host = request.client.host if request.client else None
|
||||
if not client_host or not _is_local(client_host):
|
||||
logger.warning("[MCP] Rejected non-local connection from %s", client_host)
|
||||
return JSONResponse(
|
||||
status_code=403,
|
||||
content={"error": "MCP access is restricted to local network"},
|
||||
)
|
||||
return await call_next(request)
|
||||
86
backend/src/module/mcp/server.py
Normal file
86
backend/src/module/mcp/server.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""MCP server assembly for AutoBangumi.
|
||||
|
||||
Wires together the MCP ``Server``, SSE transport, tool/resource handlers,
|
||||
and local-network middleware into a single Starlette ASGI application.
|
||||
|
||||
Mount the app returned by ``create_mcp_starlette_app`` at a path prefix
|
||||
(e.g. ``/mcp``) in the parent FastAPI application to expose the MCP
|
||||
endpoint at ``/mcp/sse``.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from mcp import types
|
||||
from mcp.server import Server
|
||||
from mcp.server.sse import SseServerTransport
|
||||
from starlette.applications import Starlette
|
||||
from starlette.requests import Request
|
||||
from starlette.routing import Mount, Route
|
||||
|
||||
from .resources import RESOURCE_TEMPLATES, RESOURCES, handle_resource
|
||||
from .security import LocalNetworkMiddleware
|
||||
from .tools import TOOLS, handle_tool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
server = Server("autobangumi")
|
||||
sse = SseServerTransport("/messages/")
|
||||
|
||||
|
||||
@server.list_tools()
|
||||
async def list_tools() -> list[types.Tool]:
|
||||
return TOOLS
|
||||
|
||||
|
||||
@server.call_tool()
|
||||
async def call_tool(name: str, arguments: dict) -> list[types.TextContent]:
|
||||
logger.debug("[MCP] Tool called: %s", name)
|
||||
return await handle_tool(name, arguments)
|
||||
|
||||
|
||||
@server.list_resources()
|
||||
async def list_resources() -> list[types.Resource]:
|
||||
return RESOURCES
|
||||
|
||||
|
||||
@server.list_resource_templates()
|
||||
async def list_resource_templates() -> list[types.ResourceTemplate]:
|
||||
return RESOURCE_TEMPLATES
|
||||
|
||||
|
||||
@server.read_resource()
|
||||
async def read_resource(uri: str) -> str:
|
||||
logger.debug("[MCP] Resource read: %s", uri)
|
||||
return handle_resource(uri)
|
||||
|
||||
|
||||
async def handle_sse(request: Request):
|
||||
"""Accept an SSE connection, run the MCP session until the client disconnects."""
|
||||
async with sse.connect_sse(
|
||||
request.scope, request.receive, request._send
|
||||
) as streams:
|
||||
await server.run(
|
||||
streams[0],
|
||||
streams[1],
|
||||
server.create_initialization_options(),
|
||||
)
|
||||
|
||||
|
||||
def create_mcp_starlette_app() -> Starlette:
|
||||
"""Build and return the MCP Starlette sub-application.
|
||||
|
||||
Routes:
|
||||
- ``GET /sse`` - SSE stream for MCP clients
|
||||
- ``POST /messages/`` - client-to-server message posting
|
||||
|
||||
``LocalNetworkMiddleware`` is applied so the endpoint is only reachable
|
||||
from loopback and RFC 1918 addresses.
|
||||
"""
|
||||
app = Starlette(
|
||||
routes=[
|
||||
Route("/sse", endpoint=handle_sse),
|
||||
Mount("/messages", app=sse.handle_post_message),
|
||||
],
|
||||
)
|
||||
app.add_middleware(LocalNetworkMiddleware)
|
||||
return app
|
||||
359
backend/src/module/mcp/tools.py
Normal file
359
backend/src/module/mcp/tools.py
Normal file
@@ -0,0 +1,359 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from mcp import types
|
||||
|
||||
from module.conf import VERSION
|
||||
from module.downloader import DownloadClient
|
||||
from module.manager import SeasonCollector, TorrentManager
|
||||
from module.models import Bangumi, BangumiUpdate, RSSItem
|
||||
from module.rss import RSSAnalyser, RSSEngine
|
||||
from module.searcher import SearchTorrent
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TOOLS = [
|
||||
types.Tool(
|
||||
name="list_anime",
|
||||
description="List all tracked anime subscriptions. Returns title, season, status, and episode offset for each.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"active_only": {
|
||||
"type": "boolean",
|
||||
"description": "If true, only return active (non-disabled) anime",
|
||||
"default": False,
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="get_anime",
|
||||
description="Get detailed information about a specific anime subscription by its ID.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"description": "The anime/bangumi ID",
|
||||
},
|
||||
},
|
||||
"required": ["id"],
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="search_anime",
|
||||
description="Search for anime torrents across torrent sites (Mikan, DMHY, Nyaa). Returns available anime matching the keywords.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"keywords": {
|
||||
"type": "string",
|
||||
"description": "Search keywords (e.g. anime title)",
|
||||
},
|
||||
"site": {
|
||||
"type": "string",
|
||||
"description": "Torrent site to search",
|
||||
"enum": ["mikan", "dmhy", "nyaa"],
|
||||
"default": "mikan",
|
||||
},
|
||||
},
|
||||
"required": ["keywords"],
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="subscribe_anime",
|
||||
description="Subscribe to an anime series by providing its RSS link. Analyzes the RSS feed and sets up automatic downloading.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"rss_link": {
|
||||
"type": "string",
|
||||
"description": "RSS feed URL for the anime (obtained from search_anime results)",
|
||||
},
|
||||
"parser": {
|
||||
"type": "string",
|
||||
"description": "RSS parser type",
|
||||
"enum": ["mikan", "dmhy", "nyaa"],
|
||||
"default": "mikan",
|
||||
},
|
||||
},
|
||||
"required": ["rss_link"],
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="unsubscribe_anime",
|
||||
description="Unsubscribe from an anime. Can either disable (keeps data) or fully delete the subscription.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"description": "The anime/bangumi ID to unsubscribe",
|
||||
},
|
||||
"delete": {
|
||||
"type": "boolean",
|
||||
"description": "If true, permanently delete the subscription. If false, just disable it.",
|
||||
"default": False,
|
||||
},
|
||||
},
|
||||
"required": ["id"],
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="list_downloads",
|
||||
description="Show current torrent download status from the download client (qBittorrent/Aria2).",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Filter by download status",
|
||||
"enum": ["all", "downloading", "completed", "paused"],
|
||||
"default": "all",
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="list_rss_feeds",
|
||||
description="List all configured RSS feeds with their connection status and health information.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="get_program_status",
|
||||
description="Get the current program status including version, running state, and first-run flag.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="refresh_feeds",
|
||||
description="Trigger an immediate refresh of all RSS feeds to check for new episodes.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
},
|
||||
),
|
||||
types.Tool(
|
||||
name="update_anime",
|
||||
description="Update settings for a tracked anime (episode offset, season offset, filters, etc.).",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"description": "The anime/bangumi ID to update",
|
||||
},
|
||||
"episode_offset": {
|
||||
"type": "integer",
|
||||
"description": "Episode number offset for renaming",
|
||||
},
|
||||
"season_offset": {
|
||||
"type": "integer",
|
||||
"description": "Season number offset for renaming",
|
||||
},
|
||||
"season": {
|
||||
"type": "integer",
|
||||
"description": "Season number",
|
||||
},
|
||||
"filter": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated filter patterns to exclude",
|
||||
},
|
||||
},
|
||||
"required": ["id"],
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def _bangumi_to_dict(b: Bangumi) -> dict:
|
||||
return {
|
||||
"id": b.id,
|
||||
"official_title": b.official_title,
|
||||
"title_raw": b.title_raw,
|
||||
"season": b.season,
|
||||
"group_name": b.group_name,
|
||||
"dpi": b.dpi,
|
||||
"source": b.source,
|
||||
"subtitle": b.subtitle,
|
||||
"episode_offset": b.episode_offset,
|
||||
"season_offset": b.season_offset,
|
||||
"filter": b.filter,
|
||||
"rss_link": b.rss_link,
|
||||
"poster_link": b.poster_link,
|
||||
"added": b.added,
|
||||
"save_path": b.save_path,
|
||||
"deleted": b.deleted,
|
||||
"archived": b.archived,
|
||||
"eps_collect": b.eps_collect,
|
||||
}
|
||||
|
||||
|
||||
async def handle_tool(name: str, arguments: dict) -> list[types.TextContent]:
|
||||
try:
|
||||
result = await _dispatch(name, arguments)
|
||||
return [
|
||||
types.TextContent(type="text", text=json.dumps(result, ensure_ascii=False))
|
||||
]
|
||||
except Exception as e:
|
||||
logger.exception("[MCP] Tool %s failed", name)
|
||||
return [
|
||||
types.TextContent(
|
||||
type="text", text=json.dumps({"error": str(e)}, ensure_ascii=False)
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def _dispatch(name: str, args: dict) -> dict | list:
|
||||
if name == "list_anime":
|
||||
return _list_anime(args.get("active_only", False))
|
||||
elif name == "get_anime":
|
||||
return _get_anime(args["id"])
|
||||
elif name == "search_anime":
|
||||
return await _search_anime(args["keywords"], args.get("site", "mikan"))
|
||||
elif name == "subscribe_anime":
|
||||
return await _subscribe_anime(args["rss_link"], args.get("parser", "mikan"))
|
||||
elif name == "unsubscribe_anime":
|
||||
return await _unsubscribe_anime(args["id"], args.get("delete", False))
|
||||
elif name == "list_downloads":
|
||||
return await _list_downloads(args.get("status", "all"))
|
||||
elif name == "list_rss_feeds":
|
||||
return _list_rss_feeds()
|
||||
elif name == "get_program_status":
|
||||
return _get_program_status()
|
||||
elif name == "refresh_feeds":
|
||||
return await _refresh_feeds()
|
||||
elif name == "update_anime":
|
||||
return await _update_anime(args)
|
||||
else:
|
||||
return {"error": f"Unknown tool: {name}"}
|
||||
|
||||
|
||||
def _list_anime(active_only: bool) -> list[dict]:
|
||||
with TorrentManager() as manager:
|
||||
if active_only:
|
||||
items = manager.search_all_bangumi()
|
||||
else:
|
||||
items = manager.bangumi.search_all()
|
||||
return [_bangumi_to_dict(b) for b in items]
|
||||
|
||||
|
||||
def _get_anime(bangumi_id: int) -> dict:
|
||||
with TorrentManager() as manager:
|
||||
result = manager.search_one(bangumi_id)
|
||||
if isinstance(result, Bangumi):
|
||||
return _bangumi_to_dict(result)
|
||||
return {"error": result.msg_en}
|
||||
|
||||
|
||||
async def _search_anime(keywords: str, site: str) -> list[dict]:
|
||||
keyword_list = keywords.split()
|
||||
results = []
|
||||
async with SearchTorrent() as st:
|
||||
async for item_json in st.analyse_keyword(keywords=keyword_list, site=site):
|
||||
results.append(json.loads(item_json))
|
||||
if len(results) >= 20:
|
||||
break
|
||||
return results
|
||||
|
||||
|
||||
async def _subscribe_anime(rss_link: str, parser: str) -> dict:
|
||||
analyser = RSSAnalyser()
|
||||
rss = RSSItem(url=rss_link, parser=parser)
|
||||
data = await analyser.link_to_data(rss)
|
||||
if not isinstance(data, Bangumi):
|
||||
return {"error": data.msg_en if hasattr(data, "msg_en") else str(data)}
|
||||
resp = await SeasonCollector.subscribe_season(data, parser=parser)
|
||||
return {"status": resp.status, "message": resp.msg_en}
|
||||
|
||||
|
||||
async def _unsubscribe_anime(bangumi_id: int, delete: bool) -> dict:
|
||||
with TorrentManager() as manager:
|
||||
if delete:
|
||||
resp = await manager.delete_rule(bangumi_id)
|
||||
else:
|
||||
resp = await manager.disable_rule(bangumi_id)
|
||||
return {"status": resp.status, "message": resp.msg_en}
|
||||
|
||||
|
||||
async def _list_downloads(status: str) -> list[dict]:
|
||||
status_filter = None if status == "all" else status
|
||||
async with DownloadClient() as client:
|
||||
torrents = await client.get_torrent_info(
|
||||
status_filter=status_filter, category="Bangumi"
|
||||
)
|
||||
return [
|
||||
{
|
||||
"name": t.get("name", ""),
|
||||
"size": t.get("size", 0),
|
||||
"progress": t.get("progress", 0),
|
||||
"state": t.get("state", ""),
|
||||
"dlspeed": t.get("dlspeed", 0),
|
||||
"upspeed": t.get("upspeed", 0),
|
||||
"eta": t.get("eta", 0),
|
||||
}
|
||||
for t in torrents
|
||||
]
|
||||
|
||||
|
||||
def _list_rss_feeds() -> list[dict]:
|
||||
with RSSEngine() as engine:
|
||||
feeds = engine.rss.search_all()
|
||||
return [
|
||||
{
|
||||
"id": f.id,
|
||||
"name": f.name,
|
||||
"url": f.url,
|
||||
"aggregate": f.aggregate,
|
||||
"parser": f.parser,
|
||||
"enabled": f.enabled,
|
||||
"connection_status": f.connection_status,
|
||||
"last_checked_at": f.last_checked_at,
|
||||
"last_error": f.last_error,
|
||||
}
|
||||
for f in feeds
|
||||
]
|
||||
|
||||
|
||||
def _get_program_status() -> dict:
|
||||
from module.api.program import program
|
||||
|
||||
return {
|
||||
"version": VERSION,
|
||||
"running": program.is_running,
|
||||
"first_run": program.first_run,
|
||||
}
|
||||
|
||||
|
||||
async def _refresh_feeds() -> dict:
|
||||
async with DownloadClient() as client:
|
||||
with RSSEngine() as engine:
|
||||
await engine.refresh_rss(client)
|
||||
return {"status": True, "message": "RSS feeds refreshed successfully"}
|
||||
|
||||
|
||||
async def _update_anime(args: dict) -> dict:
|
||||
bangumi_id = args["id"]
|
||||
with TorrentManager() as manager:
|
||||
existing = manager.bangumi.search_id(bangumi_id)
|
||||
if not existing:
|
||||
return {"error": f"Anime with id {bangumi_id} not found"}
|
||||
|
||||
update_data = BangumiUpdate(**existing.model_dump())
|
||||
if "episode_offset" in args:
|
||||
update_data.episode_offset = args["episode_offset"]
|
||||
if "season_offset" in args:
|
||||
update_data.season_offset = args["season_offset"]
|
||||
if "season" in args:
|
||||
update_data.season = args["season"]
|
||||
if "filter" in args:
|
||||
update_data.filter = args["filter"]
|
||||
|
||||
resp = await manager.update_rule(bangumi_id, update_data)
|
||||
return {"status": resp.status, "message": resp.msg_en}
|
||||
380
backend/src/test/test_mcp_resources.py
Normal file
380
backend/src/test/test_mcp_resources.py
Normal file
@@ -0,0 +1,380 @@
|
||||
"""Tests for module.mcp.resources - handle_resource() and _bangumi_to_dict()."""
|
||||
|
||||
import json
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from module.mcp.resources import (
|
||||
RESOURCE_TEMPLATES,
|
||||
RESOURCES,
|
||||
_bangumi_to_dict,
|
||||
handle_resource,
|
||||
)
|
||||
from module.models import Bangumi, ResponseModel
|
||||
from test.factories import make_bangumi
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _mock_sync_manager(bangumi_list=None, single=None):
|
||||
"""Build a MagicMock that acts as a sync context-manager TorrentManager."""
|
||||
mock_mgr = MagicMock()
|
||||
if bangumi_list is not None:
|
||||
mock_mgr.bangumi.search_all.return_value = bangumi_list
|
||||
if single is not None:
|
||||
mock_mgr.search_one.return_value = single
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
ctx.__exit__ = MagicMock(return_value=False)
|
||||
return ctx, mock_mgr
|
||||
|
||||
|
||||
def _mock_rss_engine(feeds):
|
||||
"""Build a MagicMock that acts as a sync context-manager RSSEngine."""
|
||||
mock_eng = MagicMock()
|
||||
mock_eng.rss.search_all.return_value = feeds
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__ = MagicMock(return_value=mock_eng)
|
||||
ctx.__exit__ = MagicMock(return_value=False)
|
||||
return ctx
|
||||
|
||||
|
||||
def _parse(raw: str) -> dict | list:
|
||||
return json.loads(raw)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Static metadata (RESOURCES / RESOURCE_TEMPLATES)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestResourceMetadata:
|
||||
"""Verify the static resource and template lists."""
|
||||
|
||||
def test_resources_is_list(self):
|
||||
assert isinstance(RESOURCES, list)
|
||||
|
||||
def test_resources_not_empty(self):
|
||||
assert len(RESOURCES) > 0
|
||||
|
||||
def test_resource_uris_present(self):
|
||||
uris = {str(r.uri) for r in RESOURCES}
|
||||
assert "autobangumi://anime/list" in uris
|
||||
assert "autobangumi://status" in uris
|
||||
assert "autobangumi://rss/feeds" in uris
|
||||
|
||||
def test_resource_templates_is_list(self):
|
||||
assert isinstance(RESOURCE_TEMPLATES, list)
|
||||
|
||||
def test_anime_id_template_present(self):
|
||||
templates = [str(t.uriTemplate) for t in RESOURCE_TEMPLATES]
|
||||
assert "autobangumi://anime/{id}" in templates
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _bangumi_to_dict (resources module version)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestBangumiToDictResources:
|
||||
"""
|
||||
resources._bangumi_to_dict is a leaner version of the tools one
|
||||
(no dpi/source/subtitle/group_name fields).
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def sample(self) -> Bangumi:
|
||||
return make_bangumi(
|
||||
id=10,
|
||||
official_title="Demon Slayer",
|
||||
title_raw="Kimetsu no Yaiba",
|
||||
season=3,
|
||||
episode_offset=2,
|
||||
season_offset=1,
|
||||
filter="720",
|
||||
rss_link="https://mikanani.me/RSS/ds",
|
||||
poster_link="/poster/ds.jpg",
|
||||
added=True,
|
||||
save_path="/downloads/Demon Slayer",
|
||||
deleted=False,
|
||||
archived=False,
|
||||
eps_collect=True,
|
||||
)
|
||||
|
||||
def test_returns_dict(self, sample):
|
||||
assert isinstance(_bangumi_to_dict(sample), dict)
|
||||
|
||||
def test_required_keys_present(self, sample):
|
||||
result = _bangumi_to_dict(sample)
|
||||
required = {
|
||||
"id",
|
||||
"official_title",
|
||||
"title_raw",
|
||||
"season",
|
||||
"episode_offset",
|
||||
"season_offset",
|
||||
"filter",
|
||||
"rss_link",
|
||||
"poster_link",
|
||||
"added",
|
||||
"save_path",
|
||||
"deleted",
|
||||
"archived",
|
||||
"eps_collect",
|
||||
}
|
||||
assert required.issubset(result.keys())
|
||||
|
||||
def test_id_value(self, sample):
|
||||
assert _bangumi_to_dict(sample)["id"] == 10
|
||||
|
||||
def test_official_title_value(self, sample):
|
||||
assert _bangumi_to_dict(sample)["official_title"] == "Demon Slayer"
|
||||
|
||||
def test_eps_collect_true(self, sample):
|
||||
assert _bangumi_to_dict(sample)["eps_collect"] is True
|
||||
|
||||
def test_none_optional_poster(self):
|
||||
b = make_bangumi(id=1, poster_link=None)
|
||||
assert _bangumi_to_dict(b)["poster_link"] is None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# handle_resource - known static URIs
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestHandleResourceAnimeList:
|
||||
"""Tests for autobangumi://anime/list."""
|
||||
|
||||
def test_returns_json_string(self):
|
||||
"""Result is a valid JSON string."""
|
||||
ctx, _ = _mock_sync_manager(bangumi_list=[])
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
raw = handle_resource("autobangumi://anime/list")
|
||||
assert isinstance(raw, str)
|
||||
_parse(raw) # must not raise
|
||||
|
||||
def test_empty_database_returns_empty_list(self):
|
||||
"""Empty DB produces an empty JSON array."""
|
||||
ctx, _ = _mock_sync_manager(bangumi_list=[])
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://anime/list"))
|
||||
assert result == []
|
||||
|
||||
def test_multiple_bangumi_serialised(self):
|
||||
"""Multiple Bangumi entries all appear in the output list."""
|
||||
bangumi = [make_bangumi(id=1), make_bangumi(id=2, title_raw="Other")]
|
||||
ctx, _ = _mock_sync_manager(bangumi_list=bangumi)
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://anime/list"))
|
||||
assert len(result) == 2
|
||||
|
||||
def test_ids_are_in_output(self):
|
||||
"""Each serialised entry contains its correct id."""
|
||||
bangumi = [make_bangumi(id=7), make_bangumi(id=8, title_raw="B")]
|
||||
ctx, _ = _mock_sync_manager(bangumi_list=bangumi)
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://anime/list"))
|
||||
ids = {item["id"] for item in result}
|
||||
assert {7, 8}.issubset(ids)
|
||||
|
||||
def test_non_ascii_titles_preserved(self):
|
||||
"""Japanese/Chinese titles survive JSON serialisation."""
|
||||
bangumi = [make_bangumi(id=1, official_title="進撃の巨人")]
|
||||
ctx, _ = _mock_sync_manager(bangumi_list=bangumi)
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
raw = handle_resource("autobangumi://anime/list")
|
||||
# ensure_ascii=False means the characters appear verbatim
|
||||
assert "進撃の巨人" in raw
|
||||
|
||||
|
||||
class TestHandleResourceStatus:
|
||||
"""Tests for autobangumi://status."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_program(self):
|
||||
prog = MagicMock()
|
||||
prog.is_running = True
|
||||
prog.first_run = False
|
||||
return prog
|
||||
|
||||
def test_returns_json_string(self, mock_program):
|
||||
with (
|
||||
patch("module.mcp.resources.VERSION", "3.2.0-test"),
|
||||
patch("module.api.program.program", mock_program),
|
||||
):
|
||||
raw = handle_resource("autobangumi://status")
|
||||
assert isinstance(raw, str)
|
||||
_parse(raw)
|
||||
|
||||
def test_version_in_output(self, mock_program):
|
||||
with (
|
||||
patch("module.mcp.resources.VERSION", "3.2.0-test"),
|
||||
patch("module.api.program.program", mock_program),
|
||||
):
|
||||
result = _parse(handle_resource("autobangumi://status"))
|
||||
assert result["version"] == "3.2.0-test"
|
||||
|
||||
def test_running_true(self, mock_program):
|
||||
mock_program.is_running = True
|
||||
with (
|
||||
patch("module.mcp.resources.VERSION", "3.2.0-test"),
|
||||
patch("module.api.program.program", mock_program),
|
||||
):
|
||||
result = _parse(handle_resource("autobangumi://status"))
|
||||
assert result["running"] is True
|
||||
|
||||
def test_first_run_false(self, mock_program):
|
||||
mock_program.first_run = False
|
||||
with (
|
||||
patch("module.mcp.resources.VERSION", "3.2.0-test"),
|
||||
patch("module.api.program.program", mock_program),
|
||||
):
|
||||
result = _parse(handle_resource("autobangumi://status"))
|
||||
assert result["first_run"] is False
|
||||
|
||||
def test_all_keys_present(self, mock_program):
|
||||
with (
|
||||
patch("module.mcp.resources.VERSION", "3.2.0-test"),
|
||||
patch("module.api.program.program", mock_program),
|
||||
):
|
||||
result = _parse(handle_resource("autobangumi://status"))
|
||||
assert {"version", "running", "first_run"}.issubset(result.keys())
|
||||
|
||||
|
||||
class TestHandleResourceRssFeeds:
|
||||
"""Tests for autobangumi://rss/feeds."""
|
||||
|
||||
def _make_feed(self, feed_id=1, name="TestFeed", url="https://example.com/rss"):
|
||||
f = MagicMock()
|
||||
f.id = feed_id
|
||||
f.name = name
|
||||
f.url = url
|
||||
f.enabled = True
|
||||
f.connection_status = "ok"
|
||||
f.last_checked_at = "2024-01-01T00:00:00"
|
||||
return f
|
||||
|
||||
def test_returns_json_string(self):
|
||||
ctx = _mock_rss_engine([])
|
||||
with patch("module.mcp.resources.RSSEngine", return_value=ctx):
|
||||
raw = handle_resource("autobangumi://rss/feeds")
|
||||
assert isinstance(raw, str)
|
||||
_parse(raw)
|
||||
|
||||
def test_empty_feeds_returns_empty_list(self):
|
||||
ctx = _mock_rss_engine([])
|
||||
with patch("module.mcp.resources.RSSEngine", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://rss/feeds"))
|
||||
assert result == []
|
||||
|
||||
def test_feed_fields_present(self):
|
||||
feed = self._make_feed(feed_id=2, name="Mikan", url="https://mikanani.me/rss")
|
||||
ctx = _mock_rss_engine([feed])
|
||||
with patch("module.mcp.resources.RSSEngine", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://rss/feeds"))
|
||||
entry = result[0]
|
||||
assert entry["id"] == 2
|
||||
assert entry["name"] == "Mikan"
|
||||
assert entry["url"] == "https://mikanani.me/rss"
|
||||
assert "enabled" in entry
|
||||
assert "connection_status" in entry
|
||||
assert "last_checked_at" in entry
|
||||
|
||||
def test_multiple_feeds(self):
|
||||
feeds = [
|
||||
self._make_feed(1, "Feed A", "https://a.example.com/rss"),
|
||||
self._make_feed(2, "Feed B", "https://b.example.com/rss"),
|
||||
]
|
||||
ctx = _mock_rss_engine(feeds)
|
||||
with patch("module.mcp.resources.RSSEngine", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://rss/feeds"))
|
||||
assert len(result) == 2
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# handle_resource - anime/{id} template
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestHandleResourceAnimeById:
|
||||
"""Tests for the autobangumi://anime/{id} template."""
|
||||
|
||||
def test_valid_id_returns_bangumi_dict(self):
|
||||
"""A numeric ID resolves to the bangumi's serialised dict."""
|
||||
bangumi = make_bangumi(id=3, official_title="Fullmetal Alchemist")
|
||||
ctx, _ = _mock_sync_manager(single=bangumi)
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://anime/3"))
|
||||
assert result["id"] == 3
|
||||
assert result["official_title"] == "Fullmetal Alchemist"
|
||||
|
||||
def test_not_found_id_returns_error(self):
|
||||
"""When search_one returns a ResponseModel, result contains 'error'."""
|
||||
not_found = ResponseModel(
|
||||
status=False, status_code=404, msg_en="Anime not found", msg_zh="未找到"
|
||||
)
|
||||
ctx, _ = _mock_sync_manager(single=not_found)
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://anime/9999"))
|
||||
assert "error" in result
|
||||
|
||||
def test_non_numeric_id_returns_error(self):
|
||||
"""A non-integer ID segment produces a JSON error without crashing."""
|
||||
result = _parse(handle_resource("autobangumi://anime/abc"))
|
||||
assert "error" in result
|
||||
assert "abc" in result["error"]
|
||||
|
||||
def test_negative_id_is_passed_to_manager(self):
|
||||
"""Negative integers are valid integers and passed through."""
|
||||
not_found = ResponseModel(
|
||||
status=False, status_code=404, msg_en="Anime not found", msg_zh="未找到"
|
||||
)
|
||||
ctx, mock_mgr = _mock_sync_manager(single=not_found)
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
handle_resource("autobangumi://anime/-1")
|
||||
mock_mgr.search_one.assert_called_once_with(-1)
|
||||
|
||||
def test_result_has_required_fields(self):
|
||||
"""Returned dict contains all expected bangumi fields."""
|
||||
bangumi = make_bangumi(id=5)
|
||||
ctx, _ = _mock_sync_manager(single=bangumi)
|
||||
with patch("module.mcp.resources.TorrentManager", return_value=ctx):
|
||||
result = _parse(handle_resource("autobangumi://anime/5"))
|
||||
required = {"id", "official_title", "title_raw", "season", "rss_link"}
|
||||
assert required.issubset(result.keys())
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# handle_resource - unknown URI
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestHandleResourceUnknown:
|
||||
"""Tests for unrecognised resource URIs."""
|
||||
|
||||
def test_unknown_uri_returns_json_error(self):
|
||||
"""An unrecognised URI produces a JSON object with 'error'."""
|
||||
result = _parse(handle_resource("autobangumi://does/not/exist"))
|
||||
assert "error" in result
|
||||
|
||||
def test_unknown_uri_mentions_uri_in_error(self):
|
||||
"""The error message includes the unrecognised URI."""
|
||||
uri = "autobangumi://does/not/exist"
|
||||
result = _parse(handle_resource(uri))
|
||||
assert uri in result["error"]
|
||||
|
||||
def test_empty_uri_returns_error(self):
|
||||
"""An empty string URI returns a JSON error."""
|
||||
result = _parse(handle_resource(""))
|
||||
assert "error" in result
|
||||
|
||||
def test_completely_different_scheme_returns_error(self):
|
||||
"""A URI with a wrong scheme returns a JSON error."""
|
||||
result = _parse(handle_resource("https://autobangumi.example.com/anime/list"))
|
||||
assert "error" in result
|
||||
255
backend/src/test/test_mcp_security.py
Normal file
255
backend/src/test/test_mcp_security.py
Normal file
@@ -0,0 +1,255 @@
|
||||
"""Tests for module.mcp.security - LocalNetworkMiddleware and _is_local()."""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from starlette.applications import Starlette
|
||||
from starlette.responses import PlainTextResponse
|
||||
from starlette.routing import Route
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
from module.mcp.security import LocalNetworkMiddleware, _is_local
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _is_local() unit tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestIsLocal:
|
||||
"""Verify _is_local() correctly classifies IP addresses."""
|
||||
|
||||
# --- loopback ---
|
||||
|
||||
def test_ipv4_loopback_127_0_0_1(self):
|
||||
"""127.0.0.1 is the canonical loopback address."""
|
||||
assert _is_local("127.0.0.1") is True
|
||||
|
||||
def test_ipv4_loopback_127_0_0_2(self):
|
||||
"""127.0.0.2 is within 127.0.0.0/8 and therefore local."""
|
||||
assert _is_local("127.0.0.2") is True
|
||||
|
||||
def test_ipv4_loopback_127_255_255_255(self):
|
||||
"""Top of 127.0.0.0/8 range is still local."""
|
||||
assert _is_local("127.255.255.255") is True
|
||||
|
||||
# --- RFC 1918 class-A (10.0.0.0/8) ---
|
||||
|
||||
def test_ipv4_10_network_start(self):
|
||||
"""10.0.0.1 is in 10.0.0.0/8 private range."""
|
||||
assert _is_local("10.0.0.1") is True
|
||||
|
||||
def test_ipv4_10_network_mid(self):
|
||||
"""10.10.20.30 is inside 10.0.0.0/8."""
|
||||
assert _is_local("10.10.20.30") is True
|
||||
|
||||
def test_ipv4_10_network_end(self):
|
||||
"""10.255.255.254 is the last usable address in 10.0.0.0/8."""
|
||||
assert _is_local("10.255.255.254") is True
|
||||
|
||||
# --- RFC 1918 class-B (172.16.0.0/12) ---
|
||||
|
||||
def test_ipv4_172_16_start(self):
|
||||
"""172.16.0.1 is the first address in 172.16.0.0/12."""
|
||||
assert _is_local("172.16.0.1") is True
|
||||
|
||||
def test_ipv4_172_31_end(self):
|
||||
"""172.31.255.254 is at the top of the 172.16.0.0/12 range."""
|
||||
assert _is_local("172.31.255.254") is True
|
||||
|
||||
def test_ipv4_172_15_not_local(self):
|
||||
"""172.15.255.255 is just outside 172.16.0.0/12 (below the range)."""
|
||||
assert _is_local("172.15.255.255") is False
|
||||
|
||||
def test_ipv4_172_32_not_local(self):
|
||||
"""172.32.0.0 is just above 172.16.0.0/12 (outside the range)."""
|
||||
assert _is_local("172.32.0.0") is False
|
||||
|
||||
# --- RFC 1918 class-C (192.168.0.0/16) ---
|
||||
|
||||
def test_ipv4_192_168_start(self):
|
||||
"""192.168.0.1 is a typical home-router address."""
|
||||
assert _is_local("192.168.0.1") is True
|
||||
|
||||
def test_ipv4_192_168_end(self):
|
||||
"""192.168.255.254 is at the top of 192.168.0.0/16."""
|
||||
assert _is_local("192.168.255.254") is True
|
||||
|
||||
# --- Public IPv4 ---
|
||||
|
||||
def test_public_ipv4_google_dns(self):
|
||||
"""8.8.8.8 (Google DNS) is a public address."""
|
||||
assert _is_local("8.8.8.8") is False
|
||||
|
||||
def test_public_ipv4_cloudflare_dns(self):
|
||||
"""1.1.1.1 (Cloudflare) is a public address."""
|
||||
assert _is_local("1.1.1.1") is False
|
||||
|
||||
def test_public_ipv4_broadcast_like(self):
|
||||
"""203.0.113.1 (TEST-NET-3, RFC 5737) is not a private address."""
|
||||
assert _is_local("203.0.113.1") is False
|
||||
|
||||
# --- IPv6 loopback ---
|
||||
|
||||
def test_ipv6_loopback(self):
|
||||
"""::1 is the IPv6 loopback address."""
|
||||
assert _is_local("::1") is True
|
||||
|
||||
# --- IPv6 link-local (fe80::/10) ---
|
||||
|
||||
def test_ipv6_link_local(self):
|
||||
"""fe80::1 is an IPv6 link-local address."""
|
||||
assert _is_local("fe80::1") is True
|
||||
|
||||
def test_ipv6_link_local_full(self):
|
||||
"""fe80::aabb:ccdd is also link-local."""
|
||||
assert _is_local("fe80::aabb:ccdd") is True
|
||||
|
||||
# --- IPv6 ULA (fc00::/7) ---
|
||||
|
||||
def test_ipv6_ula_fc(self):
|
||||
"""fc00::1 is within the ULA range fc00::/7."""
|
||||
assert _is_local("fc00::1") is True
|
||||
|
||||
def test_ipv6_ula_fd(self):
|
||||
"""fd00::1 is within the ULA range fc00::/7 (fd prefix)."""
|
||||
assert _is_local("fd00::1") is True
|
||||
|
||||
# --- Public IPv6 ---
|
||||
|
||||
def test_public_ipv6_google(self):
|
||||
"""2001:4860:4860::8888 (Google IPv6 DNS) is a public address."""
|
||||
assert _is_local("2001:4860:4860::8888") is False
|
||||
|
||||
def test_public_ipv6_documentation(self):
|
||||
"""2001:db8::1 (documentation prefix, RFC 3849) is public."""
|
||||
assert _is_local("2001:db8::1") is False
|
||||
|
||||
# --- Invalid inputs ---
|
||||
|
||||
def test_invalid_hostname_returns_false(self):
|
||||
"""A hostname string is not parseable as an IP and must return False."""
|
||||
assert _is_local("localhost") is False
|
||||
|
||||
def test_invalid_string_returns_false(self):
|
||||
"""A random non-IP string returns False without raising."""
|
||||
assert _is_local("not-an-ip") is False
|
||||
|
||||
def test_empty_string_returns_false(self):
|
||||
"""An empty string is not a valid IP address."""
|
||||
assert _is_local("") is False
|
||||
|
||||
def test_malformed_ipv4_returns_false(self):
|
||||
"""A string that looks like IPv4 but is malformed returns False."""
|
||||
assert _is_local("256.0.0.1") is False
|
||||
|
||||
def test_partial_ipv4_returns_false(self):
|
||||
"""An incomplete IPv4 address is not valid."""
|
||||
assert _is_local("192.168") is False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# LocalNetworkMiddleware integration tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _make_app_with_middleware() -> Starlette:
|
||||
"""Build a minimal Starlette app with LocalNetworkMiddleware applied."""
|
||||
|
||||
async def homepage(request):
|
||||
return PlainTextResponse("ok")
|
||||
|
||||
app = Starlette(routes=[Route("/", homepage)])
|
||||
app.add_middleware(LocalNetworkMiddleware)
|
||||
return app
|
||||
|
||||
|
||||
class TestLocalNetworkMiddleware:
|
||||
"""Verify LocalNetworkMiddleware allows or denies requests by client IP."""
|
||||
|
||||
@pytest.fixture
|
||||
def app(self):
|
||||
return _make_app_with_middleware()
|
||||
|
||||
def test_local_ipv4_loopback_allowed(self, app):
|
||||
"""Requests from 127.0.0.1 are allowed through."""
|
||||
# Starlette's TestClient identifies itself as "testclient", not a real IP.
|
||||
# Patch the scope so the middleware sees an actual loopback address.
|
||||
original_build = app.build_middleware_stack
|
||||
|
||||
async def patched_app(scope, receive, send):
|
||||
if scope["type"] == "http":
|
||||
scope["client"] = ("127.0.0.1", 12345)
|
||||
await original_build()(scope, receive, send)
|
||||
|
||||
app.build_middleware_stack = lambda: patched_app # type: ignore[method-assign]
|
||||
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert response.text == "ok"
|
||||
|
||||
def test_non_local_ip_blocked(self, app):
|
||||
"""Requests from a public IP are rejected with 403."""
|
||||
# Patch the ASGI scope to simulate a public client
|
||||
original_build = app.build_middleware_stack
|
||||
|
||||
async def patched_app(scope, receive, send):
|
||||
if scope["type"] == "http":
|
||||
scope["client"] = ("8.8.8.8", 12345)
|
||||
await original_build()(scope, receive, send)
|
||||
|
||||
app.build_middleware_stack = lambda: patched_app # type: ignore[method-assign]
|
||||
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
response = client.get("/")
|
||||
assert response.status_code == 403
|
||||
assert "MCP access is restricted to local network" in response.text
|
||||
|
||||
def test_missing_client_blocked(self, app):
|
||||
"""Requests with no client information are rejected with 403."""
|
||||
original_build = app.build_middleware_stack
|
||||
|
||||
async def patched_app(scope, receive, send):
|
||||
if scope["type"] == "http":
|
||||
scope["client"] = None
|
||||
await original_build()(scope, receive, send)
|
||||
|
||||
app.build_middleware_stack = lambda: patched_app # type: ignore[method-assign]
|
||||
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
response = client.get("/")
|
||||
assert response.status_code == 403
|
||||
|
||||
def test_blocked_response_is_json(self, app):
|
||||
"""The 403 error body is valid JSON with an 'error' key."""
|
||||
import json
|
||||
|
||||
original_build = app.build_middleware_stack
|
||||
|
||||
async def patched_app(scope, receive, send):
|
||||
if scope["type"] == "http":
|
||||
scope["client"] = ("1.2.3.4", 9999)
|
||||
await original_build()(scope, receive, send)
|
||||
|
||||
app.build_middleware_stack = lambda: patched_app # type: ignore[method-assign]
|
||||
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
response = client.get("/")
|
||||
assert response.status_code == 403
|
||||
body = json.loads(response.text)
|
||||
assert "error" in body
|
||||
|
||||
def test_private_192_168_allowed(self, app):
|
||||
"""Requests from a 192.168.x.x address pass through."""
|
||||
original_build = app.build_middleware_stack
|
||||
|
||||
async def patched_app(scope, receive, send):
|
||||
if scope["type"] == "http":
|
||||
scope["client"] = ("192.168.1.100", 54321)
|
||||
await original_build()(scope, receive, send)
|
||||
|
||||
app.build_middleware_stack = lambda: patched_app # type: ignore[method-assign]
|
||||
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
600
backend/src/test/test_mcp_tools.py
Normal file
600
backend/src/test/test_mcp_tools.py
Normal file
@@ -0,0 +1,600 @@
|
||||
"""Tests for module.mcp.tools - _bangumi_to_dict helper and _dispatch routing."""
|
||||
|
||||
import json
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from module.mcp.tools import (
|
||||
TOOLS,
|
||||
_bangumi_to_dict,
|
||||
_dispatch,
|
||||
handle_tool,
|
||||
)
|
||||
from module.models import Bangumi, ResponseModel
|
||||
from test.factories import make_bangumi
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _make_response(status: bool = True, msg: str = "OK") -> ResponseModel:
|
||||
return ResponseModel(status=status, status_code=200, msg_en=msg, msg_zh=msg)
|
||||
|
||||
|
||||
def _mock_sync_manager(bangumi_list=None, single=None):
|
||||
"""Return a MagicMock that acts as a sync context-manager TorrentManager."""
|
||||
mock_mgr = MagicMock()
|
||||
if bangumi_list is not None:
|
||||
mock_mgr.bangumi.search_all.return_value = bangumi_list
|
||||
mock_mgr.search_all_bangumi.return_value = bangumi_list
|
||||
if single is not None:
|
||||
mock_mgr.search_one.return_value = single
|
||||
mock_mgr.bangumi.search_id.return_value = single
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__ = MagicMock(return_value=mock_mgr)
|
||||
ctx.__exit__ = MagicMock(return_value=False)
|
||||
return ctx, mock_mgr
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _bangumi_to_dict
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestBangumiToDict:
|
||||
"""Verify _bangumi_to_dict converts a Bangumi model to the expected dict shape."""
|
||||
|
||||
@pytest.fixture
|
||||
def sample_bangumi(self) -> Bangumi:
|
||||
return make_bangumi(
|
||||
id=42,
|
||||
official_title="Attack on Titan",
|
||||
title_raw="Shingeki no Kyojin",
|
||||
season=4,
|
||||
group_name="SubsPlease",
|
||||
dpi="1080p",
|
||||
source="Web",
|
||||
subtitle="ENG",
|
||||
episode_offset=0,
|
||||
season_offset=0,
|
||||
filter="720",
|
||||
rss_link="https://mikanani.me/RSS/Bangumi/1",
|
||||
poster_link="/poster/aot.jpg",
|
||||
added=True,
|
||||
save_path="/downloads/Attack on Titan",
|
||||
deleted=False,
|
||||
archived=False,
|
||||
eps_collect=False,
|
||||
)
|
||||
|
||||
def test_returns_dict(self, sample_bangumi):
|
||||
"""Result must be a plain dict."""
|
||||
result = _bangumi_to_dict(sample_bangumi)
|
||||
assert isinstance(result, dict)
|
||||
|
||||
def test_id_field(self, sample_bangumi):
|
||||
"""id is mapped correctly."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["id"] == 42
|
||||
|
||||
def test_official_title_field(self, sample_bangumi):
|
||||
"""official_title is mapped correctly."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["official_title"] == "Attack on Titan"
|
||||
|
||||
def test_title_raw_field(self, sample_bangumi):
|
||||
"""title_raw is mapped correctly."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["title_raw"] == "Shingeki no Kyojin"
|
||||
|
||||
def test_season_field(self, sample_bangumi):
|
||||
"""season is mapped correctly."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["season"] == 4
|
||||
|
||||
def test_episode_offset_field(self, sample_bangumi):
|
||||
"""episode_offset is present."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["episode_offset"] == 0
|
||||
|
||||
def test_season_offset_field(self, sample_bangumi):
|
||||
"""season_offset is present."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["season_offset"] == 0
|
||||
|
||||
def test_rss_link_field(self, sample_bangumi):
|
||||
"""rss_link is mapped correctly."""
|
||||
assert (
|
||||
_bangumi_to_dict(sample_bangumi)["rss_link"]
|
||||
== "https://mikanani.me/RSS/Bangumi/1"
|
||||
)
|
||||
|
||||
def test_deleted_field(self, sample_bangumi):
|
||||
"""deleted flag is mapped."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["deleted"] is False
|
||||
|
||||
def test_archived_field(self, sample_bangumi):
|
||||
"""archived flag is mapped."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["archived"] is False
|
||||
|
||||
def test_eps_collect_field(self, sample_bangumi):
|
||||
"""eps_collect flag is mapped."""
|
||||
assert _bangumi_to_dict(sample_bangumi)["eps_collect"] is False
|
||||
|
||||
def test_all_expected_keys_present(self, sample_bangumi):
|
||||
"""Every expected key is present in the returned dict."""
|
||||
expected_keys = {
|
||||
"id",
|
||||
"official_title",
|
||||
"title_raw",
|
||||
"season",
|
||||
"group_name",
|
||||
"dpi",
|
||||
"source",
|
||||
"subtitle",
|
||||
"episode_offset",
|
||||
"season_offset",
|
||||
"filter",
|
||||
"rss_link",
|
||||
"poster_link",
|
||||
"added",
|
||||
"save_path",
|
||||
"deleted",
|
||||
"archived",
|
||||
"eps_collect",
|
||||
}
|
||||
result = _bangumi_to_dict(sample_bangumi)
|
||||
assert expected_keys.issubset(result.keys())
|
||||
|
||||
def test_none_optional_fields(self):
|
||||
"""Optional fields that are None are preserved as None."""
|
||||
b = make_bangumi(id=1, poster_link=None, save_path=None, group_name=None)
|
||||
result = _bangumi_to_dict(b)
|
||||
assert result["poster_link"] is None
|
||||
assert result["save_path"] is None
|
||||
assert result["group_name"] is None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# TOOLS list
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestToolsDefinitions:
|
||||
"""Sanity-check the static TOOLS list."""
|
||||
|
||||
def test_tools_is_list(self):
|
||||
assert isinstance(TOOLS, list)
|
||||
|
||||
def test_tools_not_empty(self):
|
||||
assert len(TOOLS) > 0
|
||||
|
||||
def test_all_tools_have_names(self):
|
||||
for tool in TOOLS:
|
||||
assert tool.name, f"Tool missing name: {tool}"
|
||||
|
||||
def test_expected_tool_names_present(self):
|
||||
names = {t.name for t in TOOLS}
|
||||
required = {
|
||||
"list_anime",
|
||||
"get_anime",
|
||||
"search_anime",
|
||||
"subscribe_anime",
|
||||
"unsubscribe_anime",
|
||||
"list_downloads",
|
||||
"list_rss_feeds",
|
||||
"get_program_status",
|
||||
"refresh_feeds",
|
||||
"update_anime",
|
||||
}
|
||||
assert required.issubset(names)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _dispatch routing
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDispatch:
|
||||
"""Verify _dispatch delegates to the correct handler for each tool name."""
|
||||
|
||||
# --- list_anime ---
|
||||
|
||||
async def test_dispatch_list_anime_all(self):
|
||||
"""list_anime without active_only returns all bangumi."""
|
||||
bangumi = [make_bangumi(id=1), make_bangumi(id=2, title_raw="Another")]
|
||||
ctx, _ = _mock_sync_manager(bangumi_list=bangumi)
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await _dispatch("list_anime", {})
|
||||
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 2
|
||||
|
||||
async def test_dispatch_list_anime_active_only(self):
|
||||
"""list_anime with active_only=True calls search_all_bangumi."""
|
||||
bangumi = [make_bangumi(id=1)]
|
||||
ctx, mock_mgr = _mock_sync_manager(bangumi_list=bangumi)
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await _dispatch("list_anime", {"active_only": True})
|
||||
|
||||
mock_mgr.search_all_bangumi.assert_called_once()
|
||||
assert len(result) == 1
|
||||
|
||||
# --- get_anime ---
|
||||
|
||||
async def test_dispatch_get_anime_found(self):
|
||||
"""get_anime returns dict when bangumi exists."""
|
||||
bangumi = make_bangumi(id=5, official_title="Naruto")
|
||||
ctx, _ = _mock_sync_manager(single=bangumi)
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await _dispatch("get_anime", {"id": 5})
|
||||
|
||||
assert result["id"] == 5
|
||||
assert result["official_title"] == "Naruto"
|
||||
|
||||
async def test_dispatch_get_anime_not_found(self):
|
||||
"""get_anime returns error dict when lookup fails."""
|
||||
not_found = ResponseModel(
|
||||
status=False,
|
||||
status_code=404,
|
||||
msg_en="Not found",
|
||||
msg_zh="未找到",
|
||||
)
|
||||
ctx, _ = _mock_sync_manager(single=not_found)
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await _dispatch("get_anime", {"id": 999})
|
||||
|
||||
assert "error" in result
|
||||
assert result["error"] == "Not found"
|
||||
|
||||
# --- search_anime ---
|
||||
|
||||
async def test_dispatch_search_anime(self):
|
||||
"""search_anime calls SearchTorrent.analyse_keyword and returns list."""
|
||||
fake_item = json.dumps(
|
||||
{"official_title": "One Piece", "rss_link": "https://mikan/rss/1"}
|
||||
)
|
||||
|
||||
async def fake_analyse_keyword(keywords, site):
|
||||
yield fake_item
|
||||
|
||||
mock_st = AsyncMock()
|
||||
mock_st.analyse_keyword = fake_analyse_keyword
|
||||
mock_st.__aenter__ = AsyncMock(return_value=mock_st)
|
||||
mock_st.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
with patch("module.mcp.tools.SearchTorrent", return_value=mock_st):
|
||||
result = await _dispatch(
|
||||
"search_anime", {"keywords": "One Piece", "site": "mikan"}
|
||||
)
|
||||
|
||||
assert isinstance(result, list)
|
||||
assert len(result) == 1
|
||||
assert result[0]["official_title"] == "One Piece"
|
||||
|
||||
async def test_dispatch_search_anime_default_site(self):
|
||||
"""search_anime defaults to site='mikan' when site is omitted."""
|
||||
captured_site = []
|
||||
|
||||
async def fake_analyse_keyword(keywords, site):
|
||||
captured_site.append(site)
|
||||
return
|
||||
yield # make it an async generator
|
||||
|
||||
mock_st = AsyncMock()
|
||||
mock_st.analyse_keyword = fake_analyse_keyword
|
||||
mock_st.__aenter__ = AsyncMock(return_value=mock_st)
|
||||
mock_st.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
with patch("module.mcp.tools.SearchTorrent", return_value=mock_st):
|
||||
await _dispatch("search_anime", {"keywords": "Bleach"})
|
||||
|
||||
assert captured_site == ["mikan"]
|
||||
|
||||
# --- subscribe_anime ---
|
||||
|
||||
async def test_dispatch_subscribe_anime_success(self):
|
||||
"""subscribe_anime returns status dict on success."""
|
||||
fake_bangumi = make_bangumi(id=10)
|
||||
fake_resp = _make_response(True, "Subscribed successfully")
|
||||
|
||||
mock_analyser = AsyncMock()
|
||||
mock_analyser.link_to_data = AsyncMock(return_value=fake_bangumi)
|
||||
|
||||
with (
|
||||
patch("module.mcp.tools.RSSAnalyser", return_value=mock_analyser),
|
||||
patch(
|
||||
"module.mcp.tools.SeasonCollector.subscribe_season",
|
||||
new=AsyncMock(return_value=fake_resp),
|
||||
),
|
||||
):
|
||||
result = await _dispatch(
|
||||
"subscribe_anime",
|
||||
{"rss_link": "https://mikanani.me/RSS/test", "parser": "mikan"},
|
||||
)
|
||||
|
||||
assert result["status"] is True
|
||||
assert "Subscribed" in result["message"]
|
||||
|
||||
async def test_dispatch_subscribe_anime_failure(self):
|
||||
"""subscribe_anime returns error when analyser does not return Bangumi."""
|
||||
fake_error = ResponseModel(
|
||||
status=False, status_code=500, msg_en="Parse failed", msg_zh="解析失败"
|
||||
)
|
||||
|
||||
mock_analyser = AsyncMock()
|
||||
mock_analyser.link_to_data = AsyncMock(return_value=fake_error)
|
||||
|
||||
with patch("module.mcp.tools.RSSAnalyser", return_value=mock_analyser):
|
||||
result = await _dispatch(
|
||||
"subscribe_anime",
|
||||
{"rss_link": "https://bad-rss.example.com", "parser": "mikan"},
|
||||
)
|
||||
|
||||
assert "error" in result
|
||||
|
||||
# --- unsubscribe_anime ---
|
||||
|
||||
async def test_dispatch_unsubscribe_disable(self):
|
||||
"""unsubscribe_anime with delete=False calls disable_rule."""
|
||||
ctx, mock_mgr = _mock_sync_manager()
|
||||
mock_mgr.disable_rule = AsyncMock(return_value=_make_response(True, "Disabled"))
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await _dispatch("unsubscribe_anime", {"id": 3, "delete": False})
|
||||
|
||||
mock_mgr.disable_rule.assert_called_once_with(3)
|
||||
assert result["status"] is True
|
||||
|
||||
async def test_dispatch_unsubscribe_delete(self):
|
||||
"""unsubscribe_anime with delete=True calls delete_rule."""
|
||||
ctx, mock_mgr = _mock_sync_manager()
|
||||
mock_mgr.delete_rule = AsyncMock(return_value=_make_response(True, "Deleted"))
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await _dispatch("unsubscribe_anime", {"id": 3, "delete": True})
|
||||
|
||||
mock_mgr.delete_rule.assert_called_once_with(3)
|
||||
assert result["status"] is True
|
||||
|
||||
# --- list_downloads ---
|
||||
|
||||
async def test_dispatch_list_downloads_all(self):
|
||||
"""list_downloads with status='all' returns full torrent list."""
|
||||
torrent_data = [
|
||||
{
|
||||
"name": "Ep01.mkv",
|
||||
"size": 500,
|
||||
"progress": 1.0,
|
||||
"state": "completed",
|
||||
"dlspeed": 0,
|
||||
"upspeed": 0,
|
||||
"eta": 0,
|
||||
}
|
||||
]
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get_torrent_info = AsyncMock(return_value=torrent_data)
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
with patch("module.mcp.tools.DownloadClient", return_value=mock_client):
|
||||
result = await _dispatch("list_downloads", {"status": "all"})
|
||||
|
||||
mock_client.get_torrent_info.assert_called_once_with(
|
||||
status_filter=None, category="Bangumi"
|
||||
)
|
||||
assert len(result) == 1
|
||||
assert result[0]["name"] == "Ep01.mkv"
|
||||
|
||||
async def test_dispatch_list_downloads_filter_downloading(self):
|
||||
"""list_downloads with status='downloading' passes filter to client."""
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get_torrent_info = AsyncMock(return_value=[])
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
with patch("module.mcp.tools.DownloadClient", return_value=mock_client):
|
||||
await _dispatch("list_downloads", {"status": "downloading"})
|
||||
|
||||
mock_client.get_torrent_info.assert_called_once_with(
|
||||
status_filter="downloading", category="Bangumi"
|
||||
)
|
||||
|
||||
async def test_dispatch_list_downloads_keys(self):
|
||||
"""Each torrent entry contains expected keys only."""
|
||||
torrent_data = [
|
||||
{
|
||||
"name": "Ep02.mkv",
|
||||
"size": 800,
|
||||
"progress": 0.5,
|
||||
"state": "downloading",
|
||||
"dlspeed": 1024,
|
||||
"upspeed": 512,
|
||||
"eta": 3600,
|
||||
"extra_key": "should_not_appear",
|
||||
}
|
||||
]
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get_torrent_info = AsyncMock(return_value=torrent_data)
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
with patch("module.mcp.tools.DownloadClient", return_value=mock_client):
|
||||
result = await _dispatch("list_downloads", {})
|
||||
|
||||
expected_keys = {
|
||||
"name",
|
||||
"size",
|
||||
"progress",
|
||||
"state",
|
||||
"dlspeed",
|
||||
"upspeed",
|
||||
"eta",
|
||||
}
|
||||
assert set(result[0].keys()) == expected_keys
|
||||
|
||||
# --- list_rss_feeds ---
|
||||
|
||||
async def test_dispatch_list_rss_feeds(self):
|
||||
"""list_rss_feeds returns serialised RSS feed list."""
|
||||
fake_feed = MagicMock()
|
||||
fake_feed.id = 1
|
||||
fake_feed.name = "Mikan"
|
||||
fake_feed.url = "https://mikanani.me/RSS/test"
|
||||
fake_feed.aggregate = True
|
||||
fake_feed.parser = "mikan"
|
||||
fake_feed.enabled = True
|
||||
fake_feed.connection_status = "ok"
|
||||
fake_feed.last_checked_at = "2024-01-01T00:00:00"
|
||||
fake_feed.last_error = None
|
||||
|
||||
mock_engine = MagicMock()
|
||||
mock_engine.rss.search_all.return_value = [fake_feed]
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__ = MagicMock(return_value=mock_engine)
|
||||
ctx.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
with patch("module.mcp.tools.RSSEngine", return_value=ctx):
|
||||
result = await _dispatch("list_rss_feeds", {})
|
||||
|
||||
assert isinstance(result, list)
|
||||
assert result[0]["name"] == "Mikan"
|
||||
assert result[0]["url"] == "https://mikanani.me/RSS/test"
|
||||
|
||||
# --- get_program_status ---
|
||||
|
||||
async def test_dispatch_get_program_status(self):
|
||||
"""get_program_status returns version, running, and first_run."""
|
||||
mock_program = MagicMock()
|
||||
mock_program.is_running = True
|
||||
mock_program.first_run = False
|
||||
|
||||
with (
|
||||
patch("module.mcp.tools.VERSION", "3.2.0-beta"),
|
||||
patch("module.mcp.tools._get_program_status") as mock_fn,
|
||||
):
|
||||
mock_fn.return_value = {
|
||||
"version": "3.2.0-beta",
|
||||
"running": True,
|
||||
"first_run": False,
|
||||
}
|
||||
result = await _dispatch("get_program_status", {})
|
||||
|
||||
assert "version" in result
|
||||
assert "running" in result
|
||||
assert "first_run" in result
|
||||
|
||||
# --- refresh_feeds ---
|
||||
|
||||
async def test_dispatch_refresh_feeds(self):
|
||||
"""refresh_feeds triggers engine.refresh_rss and returns success dict."""
|
||||
mock_client = AsyncMock()
|
||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
||||
|
||||
mock_engine = MagicMock()
|
||||
mock_engine.refresh_rss = AsyncMock(return_value=None)
|
||||
engine_ctx = MagicMock()
|
||||
engine_ctx.__enter__ = MagicMock(return_value=mock_engine)
|
||||
engine_ctx.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
with (
|
||||
patch("module.mcp.tools.DownloadClient", return_value=mock_client),
|
||||
patch("module.mcp.tools.RSSEngine", return_value=engine_ctx),
|
||||
):
|
||||
result = await _dispatch("refresh_feeds", {})
|
||||
|
||||
assert result["status"] is True
|
||||
mock_engine.refresh_rss.assert_called_once_with(mock_client)
|
||||
|
||||
# --- update_anime ---
|
||||
|
||||
async def test_dispatch_update_anime_success(self):
|
||||
"""update_anime applies field overrides and calls update_rule."""
|
||||
existing = make_bangumi(id=7, episode_offset=0, season_offset=0, season=1)
|
||||
resp = _make_response(True, "Updated")
|
||||
|
||||
ctx, mock_mgr = _mock_sync_manager(single=existing)
|
||||
mock_mgr.bangumi.search_id.return_value = existing
|
||||
mock_mgr.update_rule = AsyncMock(return_value=resp)
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await _dispatch(
|
||||
"update_anime",
|
||||
{"id": 7, "episode_offset": 12, "season": 2},
|
||||
)
|
||||
|
||||
mock_mgr.update_rule.assert_called_once()
|
||||
assert result["status"] is True
|
||||
|
||||
async def test_dispatch_update_anime_not_found(self):
|
||||
"""update_anime returns error dict when bangumi does not exist."""
|
||||
ctx, mock_mgr = _mock_sync_manager()
|
||||
mock_mgr.bangumi.search_id.return_value = None
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await _dispatch("update_anime", {"id": 9999})
|
||||
|
||||
assert "error" in result
|
||||
assert "9999" in result["error"]
|
||||
|
||||
# --- unknown tool ---
|
||||
|
||||
async def test_dispatch_unknown_tool(self):
|
||||
"""An unrecognised tool name returns an error dict."""
|
||||
result = await _dispatch("does_not_exist", {})
|
||||
assert "error" in result
|
||||
assert "does_not_exist" in result["error"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# handle_tool wrapper
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestHandleTool:
|
||||
"""Verify handle_tool wraps results correctly and handles exceptions."""
|
||||
|
||||
async def test_handle_tool_returns_text_content_list(self):
|
||||
"""handle_tool always returns a list of TextContent objects."""
|
||||
from mcp import types
|
||||
|
||||
bangumi = [make_bangumi(id=1)]
|
||||
ctx, _ = _mock_sync_manager(bangumi_list=bangumi)
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await handle_tool("list_anime", {})
|
||||
|
||||
assert isinstance(result, list)
|
||||
assert all(isinstance(item, types.TextContent) for item in result)
|
||||
|
||||
async def test_handle_tool_result_is_valid_json(self):
|
||||
"""The text in TextContent is valid JSON."""
|
||||
bangumi = [make_bangumi(id=1)]
|
||||
ctx, _ = _mock_sync_manager(bangumi_list=bangumi)
|
||||
|
||||
with patch("module.mcp.tools.TorrentManager", return_value=ctx):
|
||||
result = await handle_tool("list_anime", {})
|
||||
|
||||
parsed = json.loads(result[0].text)
|
||||
assert isinstance(parsed, list)
|
||||
|
||||
async def test_handle_tool_exception_returns_error_json(self):
|
||||
"""If the underlying handler raises, handle_tool returns a JSON error."""
|
||||
with patch(
|
||||
"module.mcp.tools._dispatch",
|
||||
new=AsyncMock(side_effect=RuntimeError("something broke")),
|
||||
):
|
||||
result = await handle_tool("list_anime", {})
|
||||
|
||||
assert len(result) == 1
|
||||
body = json.loads(result[0].text)
|
||||
assert "error" in body
|
||||
assert "something broke" in body["error"]
|
||||
|
||||
async def test_handle_tool_unknown_name_returns_error_json(self):
|
||||
"""An unknown tool name bubbles up as a JSON error via handle_tool."""
|
||||
result = await handle_tool("totally_unknown_tool", {})
|
||||
body = json.loads(result[0].text)
|
||||
assert "error" in body
|
||||
258
backend/uv.lock
generated
258
backend/uv.lock
generated
@@ -50,9 +50,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/7f/09065fd9e27da0eda08b4d6897f1c13535066174cc023af248fc2a8d5e5a/asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67", size = 105045, upload-time = "2022-03-15T14:46:51.055Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "25.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "auto-bangumi"
|
||||
version = "3.2.3b3"
|
||||
version = "3.2.3b4"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aiosqlite" },
|
||||
@@ -62,6 +71,7 @@ dependencies = [
|
||||
{ name = "httpx", extra = ["socks"] },
|
||||
{ name = "httpx-socks" },
|
||||
{ name = "jinja2" },
|
||||
{ name = "mcp", extra = ["cli"] },
|
||||
{ name = "openai" },
|
||||
{ name = "passlib" },
|
||||
{ name = "pydantic" },
|
||||
@@ -96,6 +106,7 @@ requires-dist = [
|
||||
{ name = "httpx", extras = ["socks"], specifier = ">=0.25.0" },
|
||||
{ name = "httpx-socks", specifier = ">=0.9.0" },
|
||||
{ name = "jinja2", specifier = ">=3.1.2" },
|
||||
{ name = "mcp", extras = ["cli"], specifier = ">=1.8.0" },
|
||||
{ name = "openai", specifier = ">=1.54.3" },
|
||||
{ name = "passlib", specifier = ">=1.7.4" },
|
||||
{ name = "pydantic", specifier = ">=2.0.0" },
|
||||
@@ -485,6 +496,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/36/2a/78b08da3f2c8eb4dd31420d0a38ed4fd4cce272dbe6a8a0d154c0300002b/httpx_socks-0.11.0-py3-none-any.whl", hash = "sha256:8c28ad569ccf681b45437ea8465203cbc082206659b6f623e4ea509b1eb4e8a7", size = 13308, upload-time = "2025-12-05T05:46:40.193Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx-sse"
|
||||
version = "0.4.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.6.16"
|
||||
@@ -575,6 +595,45 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/97/9a/3c5391907277f0e55195550cf3fa8e293ae9ee0c00fb402fec1e38c0c82f/jiter-0.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:506c9708dd29b27288f9f8f1140c3cb0e3d8ddb045956d7757b1fa0e0f39a473", size = 185564, upload-time = "2025-11-09T20:48:50.376Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.26.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "jsonschema-specifications" },
|
||||
{ name = "referencing" },
|
||||
{ name = "rpds-py" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-specifications"
|
||||
version = "2025.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "referencing" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "4.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mdurl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "3.0.3"
|
||||
@@ -627,6 +686,46 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mcp"
|
||||
version = "1.26.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "httpx" },
|
||||
{ name = "httpx-sse" },
|
||||
{ name = "jsonschema" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "pyjwt", extra = ["crypto"] },
|
||||
{ name = "python-multipart" },
|
||||
{ name = "pywin32", marker = "sys_platform == 'win32'" },
|
||||
{ name = "sse-starlette" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
cli = [
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "typer" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mdurl"
|
||||
version = "0.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.1.0"
|
||||
@@ -811,6 +910,20 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.13.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
@@ -820,6 +933,20 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.11.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5c/5a/b46fa56bf322901eee5b0454a34343cdbdae202cd421775a8ee4e42fd519/pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623", size = 98019, upload-time = "2026-01-30T19:59:55.694Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
crypto = [
|
||||
{ name = "cryptography" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyopenssl"
|
||||
version = "25.3.0"
|
||||
@@ -937,6 +1064,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/3c/6941a82f4f130af6e1c68c076b6789069ef10c04559bd4733650f902fd3b/pytokens-0.4.0-py3-none-any.whl", hash = "sha256:0508d11b4de157ee12063901603be87fb0253e8f4cb9305eb168b1202ab92068", size = 13224, upload-time = "2026-01-19T07:59:49.822Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pywin32"
|
||||
version = "311"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.3"
|
||||
@@ -973,6 +1113,98 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "referencing"
|
||||
version = "0.37.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "rpds-py" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "14.3.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markdown-it-py" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rpds-py"
|
||||
version = "0.30.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887, upload-time = "2025-11-30T20:22:41.812Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904, upload-time = "2025-11-30T20:22:43.479Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945, upload-time = "2025-11-30T20:22:44.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783, upload-time = "2025-11-30T20:22:46.103Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021, upload-time = "2025-11-30T20:22:47.458Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589, upload-time = "2025-11-30T20:22:48.872Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025, upload-time = "2025-11-30T20:22:50.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895, upload-time = "2025-11-30T20:22:51.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799, upload-time = "2025-11-30T20:22:53.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731, upload-time = "2025-11-30T20:22:54.778Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027, upload-time = "2025-11-30T20:22:56.212Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020, upload-time = "2025-11-30T20:22:58.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139, upload-time = "2025-11-30T20:23:00.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224, upload-time = "2025-11-30T20:23:02.008Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645, upload-time = "2025-11-30T20:23:03.43Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443, upload-time = "2025-11-30T20:23:04.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375, upload-time = "2025-11-30T20:23:06.342Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850, upload-time = "2025-11-30T20:23:07.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812, upload-time = "2025-11-30T20:23:09.228Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841, upload-time = "2025-11-30T20:23:11.186Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149, upload-time = "2025-11-30T20:23:12.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843, upload-time = "2025-11-30T20:23:14.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507, upload-time = "2025-11-30T20:23:16.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949, upload-time = "2025-11-30T20:23:17.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790, upload-time = "2025-11-30T20:23:19.029Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217, upload-time = "2025-11-30T20:23:20.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "4.9.1"
|
||||
@@ -1020,6 +1252,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/24/4d91e05817e92e3a61c8a21e08fd0f390f5301f1c448b137c57c4bc6e543/semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746", size = 17912, upload-time = "2025-01-24T13:19:24.949Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shellingham"
|
||||
version = "1.5.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.17.0"
|
||||
@@ -1146,6 +1387,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typer"
|
||||
version = "0.24.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
{ name = "click" },
|
||||
{ name = "rich" },
|
||||
{ name = "shellingham" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
|
||||
Reference in New Issue
Block a user