diff --git a/.github/workflows/dev-latest.yml b/.github/workflows/dev-latest.yml index 2e1eeaae..aa9d8b25 100644 --- a/.github/workflows/dev-latest.yml +++ b/.github/workflows/dev-latest.yml @@ -3,7 +3,7 @@ name: Deploy To Dockerhub(dev) on: push: tags: - - '\d+\.\d+\.\d+-beta\d+' + - '\d+\.\d+\.\d+-beta(?:\d+)?' jobs: latest: diff --git a/.github/workflows/docker-armv7.yml b/.github/workflows/docker-armv7.yml deleted file mode 100644 index bededff4..00000000 --- a/.github/workflows/docker-armv7.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Build(Docker) - -on: - push: - tags: - - '\d+\.\d+\.\d+' - -jobs: - docker: - runs-on: ubuntu-latest - steps: - - - name: Checkout - uses: actions/checkout@v3 - - name: Create Version info - working-directory: ./src - run: | - echo "VERSION = '$GITHUB_REF_NAME'" > module/__version__.py - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKER_HUB_USERNAME }} - password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - - - name: Build and push - uses: docker/build-push-action@v3 - with: - context: . - platforms: linux/arm - push: true - tags: | - estrellaxd/auto_bangumi:latest - estrellaxd/auto_bangumi:${{ github.ref_name }} - labels: ${{ steps.meta.outputs.labels }} \ No newline at end of file diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 811c4f22..857e611b 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -1,6 +1,7 @@ name: Build(Docker) on: + pull_request: push: tags: - '\d+\.\d+\.\d+' @@ -9,33 +10,47 @@ jobs: docker: runs-on: ubuntu-latest steps: - - - name: Checkout + - name: Checkout uses: actions/checkout@v3 - name: Create Version info working-directory: ./src run: | echo "VERSION = '$GITHUB_REF_NAME'" > module/__version__.py - - - name: Set up QEMU + + - name: Set up QEMU uses: docker/setup-qemu-action@v2 - - - name: Set up Docker Buildx + + - name: Set up Docker Buildx + id: buildx uses: docker/setup-buildx-action@v2 - - - name: Login to DockerHub + + - name: Docker metadata + id: meta + uses: docker/metadata-action@v4 + with: + images: | + estrellaxd/auto_bangumi + tags: | + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=raw,value=latest + + - name: Login to DockerHub + if: ${{ github.event_name == 'push' }} uses: docker/login-action@v2 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - - - name: Build and push - uses: docker/build-push-action@v3 + + - name: Build and push + uses: docker/build-push-action@v4 with: context: . - platforms: linux/amd64,linux/arm64 - push: true - tags: | - estrellaxd/auto_bangumi:latest - estrellaxd/auto_bangumi:${{ github.ref_name }} + builder: ${{ steps.buildx.output.name }} + platforms: linux/amd64,linux/arm64,linux/arm + push: ${{ github.event_name == 'push' }} + tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha, scope=${{ github.workflow }} + cache-to: type=gha, scope=${{ github.workflow }} diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..3d0bec7f --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "docs/wiki"] + path = docs/wiki + url = https://github.com/EstrellaXD/Auto_Bangumi.wiki.git diff --git a/Dockerfile b/Dockerfile index cc750ece..e6a50f78 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,25 +1,6 @@ # syntax=docker/dockerfile:1 -FROM python:3.11-alpine AS Builder -WORKDIR /app -COPY requirements.txt . -RUN apk add --no-cache --virtual=build-dependencies \ - libxml2-dev \ - libxslt-dev \ - gcc \ - g++ \ - linux-headers \ - build-base && \ - python3 -m pip install --upgrade pip && \ - pip install cython && \ - pip install --no-cache-dir -r requirements.txt && \ - apk del --purge \ - build-dependencies && \ - rm -rf \ - /root/.cache \ - /tmp/* - -FROM scratch AS APP +FROM python:3.11-alpine AS APP ENV S6_SERVICES_GRACETIME=30000 \ S6_KILL_GRACETIME=60000 \ @@ -33,31 +14,30 @@ ENV S6_SERVICES_GRACETIME=30000 \ PGID=1000 \ UMASK=022 -COPY --from=Builder / / - WORKDIR /app +COPY requirements.txt . RUN apk add --no-cache \ curl \ - wget \ jq \ shadow \ s6-overlay \ bash && \ + python3 -m pip install --upgrade pip && \ + pip install --no-cache-dir -r requirements.txt && \ # Download WebUI - wget "https://github.com/Rewrite0/Auto_Bangumi_WebUI/releases/latest/download/dist.zip" -O /tmp/dist.zip && \ - unzip -q -d /tmp /tmp/dist.zip && \ - mv /tmp/dist /app/templates && \ + curl -sL "https://github.com/Rewrite0/Auto_Bangumi_WebUI/releases/latest/download/dist.zip" | busybox unzip -q -d /app - && \ + mv /app/dist /app/templates && \ # Add user - mkdir /ab && \ addgroup -S ab -g 911 && \ adduser -S ab -G ab -h /ab -s /bin/bash -u 911 && \ + # Clear rm -rf \ /root/.cache \ /tmp/* COPY --chmod=755 src/. . -COPY --chmod=755 ./docker / +COPY --chmod=755 src/docker / ENTRYPOINT [ "/init" ] diff --git a/README.md b/README.md index 27f99ea3..f566dc13 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,16 @@
-
-
+
+
-
-
+
+
@@ -30,9 +24,10 @@

-- 主项目地址:[AutoBangumi](https://www.github.com/EstrellaXD/Auto_Bangumi)
-- 项目资源仓库:[ab_resource](https://www.github.com/EstrellaXD/ab_resource)
-- WebUI 仓库:[AutoBangumi_WebUI](https://github.com/Rewrite0/Auto_Bangumi_WebUI)
+[主项目地址](https://www.github.com/EstrellaXD/Auto_Bangumi)
+/ [WebUI 仓库](https://github.com/Rewrite0/Auto_Bangumi_WebUI)
+/ [Wiki 说明](https://www.github.com/EstrellaXD/Auto_Bangumi/wiki)
+
## AutoBangumi 功能说明
@@ -66,6 +61,13 @@
- 高度可自定义的功能选项,可以针对不同媒体库软件微调
- 无需维护完全无感使用
- 内置 TDMB 解析器,可以直接生成完整的 TMDB 格式的文件以及番剧信息。
+- 对于 Mikan RSS 的反代支持。
+
+## 如何开始
+
+- **[部署说明 (Official)](https://github.com/EstrellaXD/Auto_Bangumi/wiki)**
+- **[2.6版本更新说明](https://github.com/EstrellaXD/Auto_Bangumi/wiki/2.6更新说明)**
+- **[部署说明 (手把手)](https://www.himiku.com/archives/auto-bangumi.html)**
## 相关群组
@@ -75,18 +77,18 @@
## Roadmap
***开发中的功能:***
-- Web UI [#57](https://github.com/EstrellaXD/Auto_Bangumi/issues/57)
+- Web UI #57 ✅
- 文件统一整理,对单个规则或者文件微调文件夹可以自动调整所有对应的文件。
-- 通知功能,可以通过 IFTTT 等方式通知用户番剧更新进度。
-- 剧场版以及合集的支持。
+- 通知功能,可以通过 IFTTT 等方式通知用户番剧更新进度。✅
+- 剧场版以及合集的支持。✅
- 各类 API 接口。
***计划开发的功能:***
- 对其他站点种子的解析归类。
- 本地化番剧订阅方式。
+- Transmission & Aria2 的支持。
+- 更完善的 WebUI。
-***未计划开发的功能:***
-- Transmission & Aria2 的支持,可以转到 [RanKKI/Bangumi](https://github.com/RanKKI/Bangumi),该项目具有 Aria2/Tr 的支持
# 声明
## 致谢
diff --git a/docs/docker-compose/All-in-one/docker-compose.yml b/docs/docker-compose/All-in-one/docker-compose.yml
index 3eff1b60..e99402a2 100644
--- a/docs/docker-compose/All-in-one/docker-compose.yml
+++ b/docs/docker-compose/All-in-one/docker-compose.yml
@@ -19,7 +19,7 @@ services:
networks:
- auto_bangumi
restart: unless-stopped
- image: lscr.io/linuxserver/qbittorrent:4.4.3
+ image: superng6/qbittorrent:latest
auto_bangumi:
container_name: AutoBangumi
@@ -27,22 +27,12 @@ services:
- TZ=Asia/Shanghai
- PGID=${GID}
- PUID=${UID}
- - AB_INTERVAL_TIME=1800
- AB_DOWNLOADER_HOST=qbittorrent:${QB_PORT}
- - AB_DOWNLOADER_USERNAME=admin
- - AB_DOWNLOADER_PASSWORD=adminadmin
- - AB_METHOD=pn
- - AB_GROUP_TAG=True
- - AB_NOT_CONTAIN=720
- - AB_DOWNLOAD_PATH=/downloads/Bangumi
- - AB_RSS=${RSS}
- - AB_DEBUG_MODE=False
- - AB_EP_COMPLETE=False
- - AB_SEASON_ONE=True
networks:
- auto_bangumi
volumes:
- - auto_bangumi:/config
+ - /path/to/config:/app/config
+ - /path/to/data:/app/data
ports:
- 7892:7892
dns:
@@ -78,6 +68,4 @@ volumes:
external: false
plex_config:
external: false
- auto_bangumi:
- external: false
diff --git a/docs/docker-compose/AutoBangumi/docker-compose.yml b/docs/docker-compose/AutoBangumi/docker-compose.yml
index 107cda55..07487d6d 100644
--- a/docs/docker-compose/AutoBangumi/docker-compose.yml
+++ b/docs/docker-compose/AutoBangumi/docker-compose.yml
@@ -6,17 +6,13 @@ services:
- TZ=Asia/Shanghai
- PGID=${GID}
- PUID=${UID}
- - AB_DOWNLOADER_HOST=localhost:${QB_PORT}
- - AB_DOWNLOADER_USERNAME=admin
- - AB_DOWNLOADER_PASSWORD=adminadmin
- - AB_DOWNLOAD_PATH=${DOWNLOAD_PATH}
- - AB_RSS=${RSS}
networks:
- auto_bangumi
ports:
- '7892:7892'
volumes:
- - auto_bangumi:/config
+ - ./config:/app/config
+ - ./data:/app/data
dns:
- 8.8.8.8
- 223.5.5.5
diff --git a/docs/docker-compose/qBittorrent+AutoBangumi/docker-compose.yml b/docs/docker-compose/qBittorrent+AutoBangumi/docker-compose.yml
index 547cfffb..ee7509e7 100644
--- a/docs/docker-compose/qBittorrent+AutoBangumi/docker-compose.yml
+++ b/docs/docker-compose/qBittorrent+AutoBangumi/docker-compose.yml
@@ -28,13 +28,11 @@ services:
- PGID=${GID}
- PUID=${UID}
- AB_DOWNLOADER_HOST=qbittorrent:${QB_PORT}
- - AB_DOWNLOADER_USERNAME=admin
- - AB_DOWNLOADER_PASSWORD=adminadmin
+ - AB_RSS=${AB_RSS}
- AB_DOWNLOAD_PATH=/downloads/Bangumi
- - AB_RSS=${RSS}
volumes:
- - /path/to/config:/app/config
- - /path/to/data:/app/data
+ - ./config:/app/config
+ - ./data:/app/data
networks:
- auto_bangumi
ports:
diff --git a/docs/wiki b/docs/wiki
new file mode 160000
index 00000000..a4f8c3b8
--- /dev/null
+++ b/docs/wiki
@@ -0,0 +1 @@
+Subproject commit a4f8c3b83b379624dadbcabd378fe27a46bfdff5
diff --git a/requirements.txt b/requirements.txt
index cc092cf0..1c2ff2be 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,5 @@
anyio
beautifulsoup4
-lxml
certifi
charset-normalizer
click
diff --git a/docker/etc/cont-init.d/010-old-compatible b/src/docker/etc/cont-init.d/010-old-compatible
similarity index 100%
rename from docker/etc/cont-init.d/010-old-compatible
rename to src/docker/etc/cont-init.d/010-old-compatible
diff --git a/docker/etc/cont-init.d/020-fixuser b/src/docker/etc/cont-init.d/020-fixuser
similarity index 88%
rename from docker/etc/cont-init.d/020-fixuser
rename to src/docker/etc/cont-init.d/020-fixuser
index 51856377..76cf9163 100644
--- a/docker/etc/cont-init.d/020-fixuser
+++ b/src/docker/etc/cont-init.d/020-fixuser
@@ -6,7 +6,7 @@ function __fixuser {
groupmod -o -g "${PGID}" ab
usermod -o -u "${PUID}" ab
- chown ab:ab -R /app
+ chown ab:ab -R /app /ab
}
diff --git a/docker/etc/services.d/auto_bangumi/finish b/src/docker/etc/services.d/auto_bangumi/finish
similarity index 100%
rename from docker/etc/services.d/auto_bangumi/finish
rename to src/docker/etc/services.d/auto_bangumi/finish
diff --git a/docker/etc/services.d/auto_bangumi/notification-fd b/src/docker/etc/services.d/auto_bangumi/notification-fd
similarity index 100%
rename from docker/etc/services.d/auto_bangumi/notification-fd
rename to src/docker/etc/services.d/auto_bangumi/notification-fd
diff --git a/docker/etc/services.d/auto_bangumi/run b/src/docker/etc/services.d/auto_bangumi/run
similarity index 100%
rename from docker/etc/services.d/auto_bangumi/run
rename to src/docker/etc/services.d/auto_bangumi/run
diff --git a/src/main.py b/src/main.py
index b5e5d620..435389d2 100644
--- a/src/main.py
+++ b/src/main.py
@@ -16,7 +16,7 @@ from module.conf import VERSION, settings
logger = logging.getLogger(__name__)
-main_process = multiprocessing.Process(target=app.run)
+main_process = multiprocessing.Process(target=app.run, args=(settings,))
@router.get("/api/v1/restart", tags=["program"])
@@ -27,7 +27,8 @@ async def restart():
logger.info("Restarting...")
else:
logger.info("Starting...")
- main_process = multiprocessing.Process(target=app.run)
+ settings.reload()
+ main_process = multiprocessing.Process(target=app.run, args=(settings,))
main_process.start()
logger.info("Restarted")
return {"status": "success"}
@@ -50,7 +51,8 @@ async def start():
if main_process.is_alive():
return {"status": "failed", "reason": "Already started"}
logger.info("Starting...")
- main_process = multiprocessing.Process(target=app.run)
+ settings.reload()
+ main_process = multiprocessing.Process(target=app.run, args=(settings,))
main_process.start()
logger.info("Started")
return {"status": "success"}
diff --git a/src/module/api.py b/src/module/api.py
index b0664dd5..48c66129 100644
--- a/src/module/api.py
+++ b/src/module/api.py
@@ -12,7 +12,7 @@ from module.models import Config
router = FastAPI()
-api_func = APIProcess()
+api_func = APIProcess(settings)
@router.on_event("startup")
diff --git a/src/module/app.py b/src/module/app.py
index 5eb02247..34540dd9 100644
--- a/src/module/app.py
+++ b/src/module/app.py
@@ -2,13 +2,13 @@ import os
import time
import logging
-from module.conf import settings, setup_logger, LOG_PATH, DATA_PATH, RSSLink, VERSION
+from module.conf import setup_logger, LOG_PATH, DATA_PATH, RSSLink, VERSION
from module.utils import load_program_data, save_program_data
from module.core import DownloadClient
from module.manager import Renamer, FullSeasonGet
from module.rss import RSSAnalyser
-from module.models import ProgramData
+from module.models import ProgramData, Config
logger = logging.getLogger(__name__)
@@ -19,10 +19,10 @@ def reset_log():
os.remove(LOG_PATH)
-def load_data_file(rss_link: str) -> ProgramData:
+def load_data_file(rss_link: str, data_version) -> ProgramData:
empty_data = ProgramData(
rss_link=rss_link,
- data_version=settings.data_version,
+ data_version=data_version,
)
if not os.path.exists(DATA_PATH):
program_data = empty_data
@@ -30,28 +30,29 @@ def load_data_file(rss_link: str) -> ProgramData:
logger.info("Building data information...")
else:
program_data = load_program_data(DATA_PATH)
- if program_data.rss_link != rss_link or program_data.data_version != settings.data_version:
+ if program_data.rss_link != rss_link or program_data.data_version != data_version:
program_data = empty_data
logger.info("Rebuilding data information...")
return program_data
-def main_process(program_data: ProgramData, download_client: DownloadClient):
- rename = Renamer(download_client)
- rss_analyser = RSSAnalyser()
+def main_process(program_data: ProgramData, download_client: DownloadClient, _settings: Config):
+ rename = Renamer(download_client, _settings)
+ rss_analyser = RSSAnalyser(_settings)
while True:
times = 0
- if settings.rss_parser.enable:
- rss_analyser.run(program_data.bangumi_info, download_client, program_data.rss_link)
- if settings.bangumi_manage.eps_complete and program_data.bangumi_info != []:
- FullSeasonGet().eps_complete(program_data.bangumi_info, download_client)
+ if _settings.rss_parser.enable:
+ rss_analyser.run(program_data.bangumi_info, program_data.rss_link)
+ download_client.add_rules(program_data.bangumi_info, program_data.rss_link)
+ if _settings.bangumi_manage.eps_complete and program_data.bangumi_info != []:
+ FullSeasonGet(settings=_settings).eps_complete(program_data.bangumi_info, download_client)
logger.info("Running....")
save_program_data(DATA_PATH, program_data)
- while times < settings.program.rename_times:
- if settings.bangumi_manage.enable:
+ while times < _settings.program.rename_times:
+ if _settings.bangumi_manage.enable:
rename.rename()
times += 1
- time.sleep(settings.program.sleep_time / settings.program.rename_times)
+ time.sleep(_settings.program.sleep_time / _settings.program.rename_times)
def show_info():
@@ -63,9 +64,8 @@ def show_info():
logger.info("Starting AutoBangumi...")
-def run():
+def run(settings: Config):
# 初始化
- settings.reload()
rss_link = RSSLink()
reset_log()
setup_logger()
@@ -73,10 +73,10 @@ def run():
if settings.rss_parser.token in ["", "token", None]:
logger.error("Please set your RSS token in config file.")
exit(1)
- download_client = DownloadClient()
+ download_client = DownloadClient(settings)
download_client.auth()
download_client.init_downloader()
download_client.rss_feed(rss_link)
- bangumi_data = load_data_file(rss_link)
+ bangumi_data = load_data_file(rss_link, settings.data_version)
# 主程序循环
- main_process(bangumi_data, download_client)
+ main_process(bangumi_data, download_client, settings)
diff --git a/src/module/conf/__init__.py b/src/module/conf/__init__.py
index ffc8702d..a84acc3e 100644
--- a/src/module/conf/__init__.py
+++ b/src/module/conf/__init__.py
@@ -11,3 +11,6 @@ class RSSLink(str):
if "://" not in settings.rss_parser.custom_url:
return f"https://{settings.rss_parser.custom_url}/RSS/MyBangumi?token={settings.rss_parser.token}"
return f"{settings.rss_parser.custom_url}/RSS/MyBangumi?token={settings.rss_parser.token}"
+
+
+PLATFORM = "Windows" if "\\" in settings.downloader.path else "Unix"
diff --git a/src/module/core/api_func.py b/src/module/core/api_func.py
index 4374cfe9..69bc482a 100644
--- a/src/module/core/api_func.py
+++ b/src/module/core/api_func.py
@@ -5,7 +5,7 @@ from module.core import DownloadClient
from module.manager import FullSeasonGet
from module.rss import RSSAnalyser
from module.utils import json_config
-from module.conf import DATA_PATH, settings
+from module.conf import DATA_PATH
from module.conf.config import save_config_to_file, CONFIG_PATH
from module.models import Config
from module.network import RequestContent
@@ -16,13 +16,14 @@ logger = logging.getLogger(__name__)
class APIProcess:
- def __init__(self):
- self._rss_analyser = RSSAnalyser()
- self._client = DownloadClient()
- self._full_season_get = FullSeasonGet()
+ def __init__(self, settings: Config):
+ self._rss_analyser = RSSAnalyser(settings)
+ self._client = DownloadClient(settings)
+ self._full_season_get = FullSeasonGet(settings)
+ self._custom_url = settings.rss_parser.custom_url
def link_process(self, link):
- return self._rss_analyser.rss_to_data(link, filter=False)
+ return self._rss_analyser.rss_to_data(link, _filter=False)
@api_failed
def download_collection(self, link):
@@ -58,7 +59,6 @@ class APIProcess:
json_config.save(DATA_PATH, datas)
return "Success"
-
@staticmethod
def add_rule(title, season):
data = json_config.load(DATA_PATH)
@@ -85,10 +85,9 @@ class APIProcess:
def get_config() -> dict:
return json_config.load(CONFIG_PATH)
- @staticmethod
- def get_rss(full_path: str):
+ def get_rss(self, full_path: str):
url = f"https://mikanani.me/RSS/{full_path}"
- custom_url = settings.rss_parser.custom_url
+ custom_url = self._custom_url
if "://" not in custom_url:
custom_url = f"https://{custom_url}"
with RequestContent() as request:
diff --git a/src/module/core/download_client.py b/src/module/core/download_client.py
index eb4c376a..2d05a111 100644
--- a/src/module/core/download_client.py
+++ b/src/module/core/download_client.py
@@ -3,25 +3,22 @@ import logging
import os
from module.downloader import getClient
-from module.conf import settings
-from module.models import BangumiData
+from module.models import BangumiData, Config
logger = logging.getLogger(__name__)
class DownloadClient:
- def __init__(self):
- self.client = getClient()
+ def __init__(self, settings: Config):
+ self.client = getClient(settings)
self.authed = False
+ self.download_path = settings.downloader.path
+ self.group_tag = settings.bangumi_manage.group_tag
def auth(self):
- host, username, password = settings.downloader.host, settings.downloader.username, settings.downloader.password
- try:
- self.client.auth(host, username, password)
- self.authed = True
- except Exception as e:
- logger.error(f"Can't login {host} by {username}, {e}")
+ self.client.auth()
+ self.authed = True
def init_downloader(self):
prefs = {
@@ -36,16 +33,16 @@ class DownloadClient:
except Exception as e:
logger.warning("Cannot add new category, maybe already exists.")
logger.debug(e)
- if settings.downloader.path == "":
+ if self.download_path == "":
prefs = self.client.get_app_prefs()
- settings.downloader.path = os.path.join(prefs["save_path"], "Bangumi")
+ self.download_path = os.path.join(prefs["save_path"], "Bangumi")
def set_rule(self, info: BangumiData, rss_link):
official_name, raw_name, season, group = info.official_title, info.title_raw, info.season, info.group
rule = {
"enable": True,
"mustContain": raw_name,
- "mustNotContain": "|".join(settings.rss_parser.filter),
+ "mustNotContain": "|".join(info.filter),
"useRegex": True,
"episodeFilter": "",
"smartFilter": False,
@@ -57,28 +54,23 @@ class DownloadClient:
"assignedCategory": "Bangumi",
"savePath": str(
os.path.join(
- settings.downloader.path,
+ self.download_path,
re.sub(r"[:/.]", " ", official_name).strip(),
f"Season {season}",
)
),
}
- rule_name = f"[{group}] {official_name}" if settings.bangumi_manage.group_tag else official_name
+ rule_name = f"[{group}] {official_name}" if self.group_tag else official_name
self.client.rss_set_rule(rule_name=f"{rule_name} S{season}", rule_def=rule)
logger.info(f"Add {official_name} Season {season}")
def rss_feed(self, rss_link, item_path="Mikan_RSS"):
# TODO: 定时刷新 RSS
- if self.client.get_rss_info(rss_link):
- logger.info("RSS Already exists.")
- else:
- logger.info("No feed exists, start adding feed.")
- self.client.rss_add_feed(url=rss_link, item_path="Mikan_RSS")
- logger.info("Add RSS Feed successfully.")
+ self.client.rss_add_feed(url=rss_link, item_path=item_path)
def add_collection_feed(self, rss_link, item_path):
self.client.rss_add_feed(url=rss_link, item_path=item_path)
- logger.info("Add RSS Feed successfully.")
+ logger.info("Add Collection RSS Feed successfully.")
def add_rules(self, bangumi_info: list[BangumiData], rss_link: str):
logger.debug("Start adding rules.")
diff --git a/src/module/core/download_fliter.py b/src/module/core/download_fliter.py
index 9bd8f040..f779898b 100644
--- a/src/module/core/download_fliter.py
+++ b/src/module/core/download_fliter.py
@@ -1,7 +1,7 @@
import re
import logging
-
-from bs4 import BeautifulSoup
+import xml.etree.ElementTree
+from typing import Tuple
from module.conf import settings
from module.utils import json_config
@@ -13,9 +13,9 @@ class RSSFilter:
def __init__(self):
self.filter_rule = json_config.load(settings.filter_rule)
- def filter(self, item: BeautifulSoup):
- title = item.title.string
- torrent = item.find("enclosure")
+ def filter(self, item: xml.etree.ElementTree.Element) -> Tuple[bool, str]:
+ title = item.find('title').text
+ torrent = item.find("enclosure").attrib['url']
download = False
for rule in self.filter_rule:
if re.search(rule["include"], title):
@@ -23,4 +23,3 @@ class RSSFilter:
download = True
logger.debug(f"{title} added")
return download, torrent
-
diff --git a/src/module/downloader/__init__.py b/src/module/downloader/__init__.py
index a7612e6c..b7b33989 100644
--- a/src/module/downloader/__init__.py
+++ b/src/module/downloader/__init__.py
@@ -1,11 +1,15 @@
-from module.conf import settings
+from module.models import Config
-def getClient():
+def getClient(settings: Config):
# TODO 多下载器支持
- # 从 settings 里读取下载器名称,然后返回对应 Client
- if settings.downloader.type == "qbittorrent":
+ type = settings.downloader.type
+ host = settings.downloader.host
+ username = settings.downloader.username
+ password = settings.downloader.password
+ ssl = settings.downloader.ssl
+ if type == "qbittorrent":
from .qb_downloader import QbDownloader
- return QbDownloader()
+ return QbDownloader(host, username, password, ssl)
else:
- raise Exception(f"Unsupported downloader type: {settings.downloader.type}")
+ raise Exception(f"Unsupported downloader type: {type}")
diff --git a/src/module/downloader/aria2_downloader.py b/src/module/downloader/aria2_downloader.py
index 8a4e9ec1..95400334 100644
--- a/src/module/downloader/aria2_downloader.py
+++ b/src/module/downloader/aria2_downloader.py
@@ -4,9 +4,9 @@ import time
from aria2p import Client, ClientException, API
-from conf import settings
+from module.conf import settings
-from downloader.exceptions import ConflictError
+from .exceptions import ConflictError
logger = logging.getLogger(__name__)
diff --git a/src/module/downloader/exceptions.py b/src/module/downloader/exceptions.py
index ba0d18a4..7ec28c73 100644
--- a/src/module/downloader/exceptions.py
+++ b/src/module/downloader/exceptions.py
@@ -1,2 +1,2 @@
class ConflictError(Exception):
- pass
\ No newline at end of file
+ pass
diff --git a/src/module/downloader/qb_downloader.py b/src/module/downloader/qb_downloader.py
index d25ee2e6..9074c916 100644
--- a/src/module/downloader/qb_downloader.py
+++ b/src/module/downloader/qb_downloader.py
@@ -4,33 +4,32 @@ import time
from qbittorrentapi import Client, LoginFailed
from qbittorrentapi.exceptions import Conflict409Error
-from module.conf import settings
from module.ab_decorator import qb_connect_failed_wait
-
from module.downloader.exceptions import ConflictError
logger = logging.getLogger(__name__)
class QbDownloader:
- def __init__(self):
- self._client: Client | None = None
-
- @qb_connect_failed_wait
- def auth(self, host, username, password):
- self._client = Client(
+ def __init__(self, host: str, username: str, password: str, ssl: bool):
+ self._client: Client = Client(
host=host,
username=username,
password=password,
- VERIFY_WEBUI_CERTIFICATE=settings.downloader.ssl
+ VERIFY_WEBUI_CERTIFICATE=ssl
)
+ self.host = host
+ self.username = username
+
+ @qb_connect_failed_wait
+ def auth(self):
while True:
try:
self._client.auth_log_in()
break
except LoginFailed:
- logger.warning(
- f"Can't login qBittorrent Server {host} by {username}, retry in {5} seconds."
+ logger.error(
+ f"Can't login qBittorrent Server {self.host} by {self.username}, retry in {5} seconds."
)
time.sleep(5)
@@ -66,21 +65,35 @@ class QbDownloader:
def torrents_rename_file(self, torrent_hash, old_path, new_path):
self._client.torrents_rename_file(torrent_hash=torrent_hash, old_path=old_path, new_path=new_path)
- def get_rss_info(self, url) -> str | None:
+ def check_rss(self, url, item_path) -> tuple[str | None, bool]:
items = self._client.rss_items()
- for item in items.items():
- if item[1].url == url:
- return item[0]
- return None
+ for key, value in items.items():
+ rss_url = value.get("url")
+ if key == item_path:
+ if rss_url != url:
+ return key, False
+ return None, True
+ else:
+ if rss_url == url:
+ return key, True
+ return None, False
def rss_add_feed(self, url, item_path):
- try:
- path = self.get_rss_info(url)
- if path:
- self.rss_remove_item(path)
- self._client.rss_add_feed(url, item_path)
- except Conflict409Error:
- logger.exception("RSS Exist.")
+ path, added = self.check_rss(url, item_path)
+ if path:
+ if not added:
+ logger.info("RSS Exist, Update URL.")
+ self._client.rss_remove_item(path)
+ self._client.rss_add_feed(url, item_path)
+ else:
+ logger.info("RSS Exist.")
+ else:
+ if added:
+ logger.info("RSS Exist.")
+ else:
+ logger.info("Add new RSS")
+ self._client.rss_add_feed(url, item_path)
+ logger.info("Successfully added RSS")
def rss_remove_item(self, item_path):
try:
diff --git a/src/module/manager/eps_complete.py b/src/module/manager/eps_complete.py
index 5f3cf412..1ebb6794 100644
--- a/src/module/manager/eps_complete.py
+++ b/src/module/manager/eps_complete.py
@@ -2,29 +2,27 @@ import os.path
import re
import logging
-from module.conf import settings
from module.network import RequestContent
from module.core import DownloadClient
-from module.models import BangumiData
+from module.models import BangumiData, Config
logger = logging.getLogger(__name__)
-SEARCH_KEY = ["group", "title_raw", "season_raw", "subtitle", "source", "dpi"]
-CUSTOM_URL = "https://mikanani.me" if settings.rss_parser.custom_url == "" else settings.rss_parser.custom_url
-if "://" not in CUSTOM_URL:
- if re.match(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", CUSTOM_URL):
- CUSTOM_URL = f"http://{CUSTOM_URL}"
- CUSTOM_URL = f"https://{CUSTOM_URL}"
class FullSeasonGet:
- def __init__(self):
- pass
+ def __init__(self, settings: Config):
+ self.SEARCH_KEY = ["group", "title_raw", "season_raw", "subtitle", "source", "dpi"]
+ self.CUSTOM_URL = "https://mikanani.me" if settings.rss_parser.custom_url == "" else settings.rss_parser.custom_url
+ if "://" not in self.CUSTOM_URL:
+ if re.match(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", self.CUSTOM_URL):
+ self.CUSTOM_URL = f"http://{self.CUSTOM_URL}"
+ self.CUSTOM_URL = f"https://{self.CUSTOM_URL}"
+ self.save_path = settings.downloader.path
- @staticmethod
- def init_eps_complete_search_str(data: BangumiData):
+ def init_eps_complete_search_str(self, data: BangumiData):
test = []
- for key in SEARCH_KEY:
+ for key in self.SEARCH_KEY:
data_dict = data.dict()
if data_dict[key] is not None:
test.append(data_dict[key])
@@ -35,17 +33,16 @@ class FullSeasonGet:
def get_season_torrents(self, data: BangumiData):
keyword = self.init_eps_complete_search_str(data)
with RequestContent() as req:
- torrents = req.get_torrents(f"{CUSTOM_URL}/RSS/Search?searchstr={keyword}")
+ torrents = req.get_torrents(f"{self.CUSTOM_URL}/RSS/Search?searchstr={keyword}")
return [torrent for torrent in torrents if data.title_raw in torrent.name]
- @staticmethod
- def collect_season_torrents(data: BangumiData, torrents):
+ def collect_season_torrents(self, data: BangumiData, torrents):
downloads = []
for torrent in torrents:
download_info = {
"url": torrent.torrent_link,
"save_path": os.path.join(
- settings.downloader.path,
+ self.save_path,
data.official_title,
f"Season {data.season}")
}
diff --git a/src/module/manager/renamer.py b/src/module/manager/renamer.py
index 7bd97172..8c49d06b 100644
--- a/src/module/manager/renamer.py
+++ b/src/module/manager/renamer.py
@@ -5,18 +5,19 @@ from pathlib import PurePath, PureWindowsPath
from module.core.download_client import DownloadClient
-from module.conf import settings
from module.parser import TitleParser
from module.network import PostNotification
+from module.models import Config
logger = logging.getLogger(__name__)
class Renamer:
- def __init__(self, download_client: DownloadClient):
- self.client = download_client
+ def __init__(self, download_client: DownloadClient, settings: Config):
+ self._client = download_client
self._renamer = TitleParser()
- self.notification = PostNotification()
+ self._notification = PostNotification()
+ self.settings = settings
@staticmethod
def print_result(torrent_count, rename_count):
@@ -25,78 +26,104 @@ class Renamer:
logger.debug(f"Checked {torrent_count} files")
def get_torrent_info(self, category="Bangumi"):
- recent_info = self.client.get_torrent_info(category=category)
+ recent_info = self._client.get_torrent_info(category=category)
torrent_count = len(recent_info)
return recent_info, torrent_count
@staticmethod
- def check_files(info, suffix_type: str = "media"):
- if suffix_type == "subtitle":
- suffix_list = [".ass", ".srt"]
- else:
- suffix_list = [".mp4", ".mkv"]
- file_list = []
+ def check_files(info):
+ media_list = []
+ subtitle_list = []
for f in info.files:
file_name = f.name
suffix = os.path.splitext(file_name)[-1]
- if suffix.lower() in suffix_list:
- file_list.append(file_name)
- return file_list
+ if suffix.lower() in [".mp4", ".mkv"]:
+ media_list.append(file_name)
+ elif suffix.lower() in [".ass", ".srt"]:
+ subtitle_list.append(file_name)
+ return media_list, subtitle_list
- def rename_file(self, info, media_path):
- old_name = info.name
+ def rename_file(self, info, media_path: str, rename_method: str, bangumi_name: str, season: int, remove_bad_torrents: bool):
+ torrent_name = info.name
suffix = os.path.splitext(media_path)[-1]
compare_name = media_path.split(os.path.sep)[-1]
- folder_name, season = self.get_folder_and_season(info.save_path)
- new_path = self._renamer.download_parser(old_name, folder_name, season, suffix)
+ new_path = self._renamer.torrent_parser(
+ torrent_name=torrent_name,
+ bangumi_name=bangumi_name,
+ season=season,
+ suffix=suffix,
+ method=rename_method
+ )
if compare_name != new_path:
try:
- self.client.rename_torrent_file(_hash=info.hash, old_path=media_path, new_path=new_path)
- self.notification.send_msg(folder_name, "update")
+ self._client.rename_torrent_file(_hash=info.hash, old_path=media_path, new_path=new_path)
+ self._notification.send_msg(bangumi_name, "最新剧集已经更新,已自动重命名。")
except Exception as e:
- logger.warning(f"{old_name} rename failed")
- logger.warning(f"Folder name: {folder_name}, Season: {season}, Suffix: {suffix}")
+ logger.warning(f"{torrent_name} rename failed")
+ logger.warning(f"Season name: {bangumi_name}, Season: {season}, Suffix: {suffix}")
logger.debug(e)
# Delete bad torrent
- self.delete_bad_torrent(info)
+ self.delete_bad_torrent(info, remove_bad_torrents)
- def rename_collection(self, info, media_list: list[str]):
- folder_name, season = self.get_folder_and_season(info.save_path)
+ def rename_collection(self, info, media_list: list[str],bangumi_name: str, season: int, remove_bad_torrents: bool):
_hash = info.hash
for media_path in media_list:
path_len = len(media_path.split(os.path.sep))
if path_len <= 2:
suffix = os.path.splitext(media_path)[-1]
- old_name = media_path.split(os.path.sep)[-1]
- new_name = self._renamer.download_parser(old_name, folder_name, season, suffix)
- if old_name != new_name:
+ torrent_name = media_path.split(os.path.sep)[-1]
+ new_name = self._renamer.torrent_parser(
+ torrent_name=torrent_name,
+ bangumi_name=bangumi_name,
+ season=season,
+ suffix=suffix,
+ method="pn"
+ )
+ if torrent_name != new_name:
try:
- self.client.rename_torrent_file(_hash=_hash, old_path=media_path, new_path=new_name)
+ self._client.rename_torrent_file(_hash=_hash, old_path=media_path, new_path=new_name)
except Exception as e:
- logger.warning(f"{old_name} rename failed")
- logger.warning(f"Folder name: {folder_name}, Season: {season}, Suffix: {suffix}")
+ logger.warning(f"{torrent_name} rename failed")
+ logger.warning(f"Bangumi name: {bangumi_name}, Season: {season}, Suffix: {suffix}")
logger.debug(e)
# Delete bad torrent.
- self.delete_bad_torrent(info)
- self.client.set_category(category="BangumiCollection", hashes=_hash)
+ self.delete_bad_torrent(info, remove_bad_torrents)
+ self._client.set_category(category="BangumiCollection", hashes=_hash)
- def rename_subtitles(self, subtitle_list: list[str], media_old_name, media_new_name, _hash):
- for subtitle_file in subtitle_list:
- if re.search(media_old_name, subtitle_file) is not None:
- subtitle_lang = subtitle_file.split(".")[-2]
- new_subtitle_name = f"{media_new_name}.{subtitle_lang}.ass"
- self.client.rename_torrent_file(_hash, subtitle_file, new_subtitle_name)
- logger.info(f"Rename subtitles for {media_old_name} to {media_new_name}")
+ def rename_subtitles(
+ self,
+ subtitle_list: list[str],
+ bangumi_name: str,
+ season: int,
+ _hash
+ ):
+ for subtitle_path in subtitle_list:
+ suffix = os.path.splitext(subtitle_path)[-1]
+ old_name = subtitle_path.split(os.path.sep)[-1]
+ new_name = self._renamer.torrent_parser(
+ method="subtitle",
+ torrent_name=old_name,
+ bangumi_name=bangumi_name,
+ season=season,
+ suffix=suffix
+ )
+ if old_name != new_name:
+ try:
+ self._client.rename_torrent_file(_hash=_hash, old_path=subtitle_path, new_path=new_name)
+ except Exception as e:
+ logger.warning(f"{old_name} rename failed")
+ logger.warning(f"Suffix: {suffix}")
+ logger.debug(e)
- def delete_bad_torrent(self, info):
- if settings.bangumi_manage.remove_bad_torrent:
- self.client.delete_torrent(info.hash)
+ def delete_bad_torrent(self, info, remove_bad_torrent: bool):
+ if remove_bad_torrent:
+ self._client.delete_torrent(info.hash)
logger.info(f"{info.name} have been deleted.")
@staticmethod
- def get_folder_and_season(save_path: str):
+ def get_season_info(save_path: str, download_path: str):
# Remove default save path
- save_path = save_path.replace(settings.downloader.path, "")
+ save_path = save_path.replace(download_path, "")
# Check windows or linux path
path_parts = PurePath(save_path).parts \
if PurePath(save_path).name != save_path \
@@ -117,24 +144,63 @@ class Renamer:
def rename(self):
# Get torrent info
+ download_path = self.settings.downloader.path
+ rename_method = self.settings.bangumi_manage.rename_method
+ remove_bad_torrents = self.settings.bangumi_manage.remove_bad_torrent
recent_info, torrent_count = self.get_torrent_info()
- rename_count = 0
for info in recent_info:
- media_list = self.check_files(info)
+ media_list, subtitle_list = self.check_files(info)
+ bangumi_name, season = self.get_season_info(info.save_path, download_path)
if len(media_list) == 1:
- self.rename_file(info, media_list[0])
- rename_count += 1
- # TODO: Rename subtitles
+ self.rename_file(
+ info=info,
+ media_path=media_list[0],
+ rename_method=rename_method,
+ bangumi_name=bangumi_name,
+ season=season,
+ remove_bad_torrents=remove_bad_torrents
+ )
+ if len(subtitle_list) > 0:
+ self.rename_subtitles(
+ subtitle_list=subtitle_list,
+ bangumi_name=bangumi_name,
+ season=season,
+ _hash=info.hash
+ )
elif len(media_list) > 1:
logger.info("Start rename collection")
- self.rename_collection(info, media_list)
- rename_count += len(media_list)
+ self.rename_collection(
+ info,
+ media_list,
+ bangumi_name,
+ season,
+ remove_bad_torrents
+ )
+ if len(subtitle_list) > 0:
+ self.rename_subtitles(
+ subtitle_list=subtitle_list,
+ bangumi_name=bangumi_name,
+ season=season,
+ _hash=info.hash
+ )
else:
logger.warning(f"{info.name} has no media file")
if __name__ == '__main__':
- client = DownloadClient()
- rn = Renamer(client)
- rn.rename()
-
+ from module.conf import settings, setup_logger
+ setup_logger()
+ client = DownloadClient(settings)
+ renamer = Renamer(client, settings)
+ info, _ = renamer.get_torrent_info(category="BangumiCollection")
+ for i in info:
+ _hash = i.hash
+ _, subtitle_list = renamer.check_files(i)
+ print(_hash)
+ bangumi_name, season = renamer.get_season_info(i.save_path, settings.downloader.path)
+ renamer.rename_subtitles(
+ subtitle_list,
+ bangumi_name=bangumi_name,
+ season=season,
+ _hash=_hash
+ )
\ No newline at end of file
diff --git a/src/module/models/torrent.py b/src/module/models/torrent.py
new file mode 100644
index 00000000..59ae7bcb
--- /dev/null
+++ b/src/module/models/torrent.py
@@ -0,0 +1,12 @@
+from pydantic import BaseModel, Field
+
+
+class TorrentInfo(BaseModel):
+ name: str = Field(...)
+ link: str = Field(...)
+
+
+class FileSet(BaseModel):
+ media_path: str = Field(...)
+ sc_subtitle: str | None = Field(None)
+ tc_subtitle: str | None = Field(None)
\ No newline at end of file
diff --git a/src/module/network/notification.py b/src/module/network/notification.py
index b6c5f991..993c231b 100644
--- a/src/module/network/notification.py
+++ b/src/module/network/notification.py
@@ -42,6 +42,7 @@ class TelegramNotification:
}
with RequestContent() as req:
resp = req.post_data(self.notification_url, data)
+ logger.debug(f"Telegram notification: {resp.status_code}")
return resp.status_code == 200
@@ -58,4 +59,5 @@ class ServerChanNotification:
}
with RequestContent() as req:
resp = req.post_data(self.notification_url, data)
+ logger.debug(f"ServerChan notification: {resp.status_code}")
return resp.status_code == 200
diff --git a/src/module/network/request_contents.py b/src/module/network/request_contents.py
index d20ec81a..1678390c 100644
--- a/src/module/network/request_contents.py
+++ b/src/module/network/request_contents.py
@@ -1,13 +1,10 @@
import re
-
+import xml.etree.ElementTree
from dataclasses import dataclass
-from bs4 import BeautifulSoup
from .request_url import RequestURL
-
from module.conf import settings
-
FILTER = "|".join(settings.rss_parser.filter)
@@ -19,27 +16,26 @@ class TorrentInfo:
class RequestContent(RequestURL):
# Mikanani RSS
- def get_torrents(self, _url: str, filter: bool = True) -> [TorrentInfo]:
+ def get_torrents(self, _url: str, _filter: bool = True) -> [TorrentInfo]:
soup = self.get_xml(_url)
- torrent_titles = [item.title.string for item in soup.find_all("item")]
- torrent_urls = [item.get("url") for item in soup.find_all("enclosure")]
+ torrent_titles = []
+ torrent_urls = []
+
+ for item in soup.findall("./channel/item"):
+ torrent_titles.append(item.find("title").text)
+ torrent_urls.append(item.find("enclosure").attrib['url'])
+
torrents = []
for _title, torrent_url in zip(torrent_titles, torrent_urls):
- if filter:
+ if _filter:
if re.search(FILTER, _title) is None:
torrents.append(TorrentInfo(_title, torrent_url))
else:
torrents.append(TorrentInfo(_title, torrent_url))
return torrents
- def get_torrent(self, _url) -> TorrentInfo:
- soup = self.get_xml(_url)
- item = soup.find("item")
- enclosure = item.find("enclosure")
- return TorrentInfo(item.title.string, enclosure["url"])
-
- def get_xml(self, _url):
- return BeautifulSoup(self.get_url(_url).text, "xml")
+ def get_xml(self, _url) -> xml.etree.ElementTree.ElementTree:
+ return xml.etree.ElementTree.fromstring(self.get_url(_url).text)
# API JSON
def get_json(self, _url) -> dict:
diff --git a/src/module/network/request_url.py b/src/module/network/request_url.py
index 1b98d905..8558dc45 100644
--- a/src/module/network/request_url.py
+++ b/src/module/network/request_url.py
@@ -27,7 +27,8 @@ class RequestURL:
except requests.RequestException as e:
logger.debug(f"URL: {url}")
logger.debug(e)
- logger.warning("ERROR with Connection.Please check DNS/Connection settings")
+ logger.warning(f"Cannot connect to {url}. Wait for 5 seconds.")
+ logger.warning("Please check DNS/Connection settings")
time.sleep(5)
try_time += 1
except Exception as e:
diff --git a/src/module/parser/analyser/__init__.py b/src/module/parser/analyser/__init__.py
index 9eca1780..a4cb6896 100644
--- a/src/module/parser/analyser/__init__.py
+++ b/src/module/parser/analyser/__init__.py
@@ -1,4 +1,4 @@
from .raw_parser import raw_parser
-from .rename_parser import DownloadParser
+from .torrent_parser import torrent_parser
from .tmdb_parser import TMDBMatcher
diff --git a/src/module/parser/analyser/rename_parser.py b/src/module/parser/analyser/rename_parser.py
deleted file mode 100644
index aa169c36..00000000
--- a/src/module/parser/analyser/rename_parser.py
+++ /dev/null
@@ -1,116 +0,0 @@
-import re
-import logging
-from dataclasses import dataclass
-
-
-logger = logging.getLogger(__name__)
-
-
-@dataclass
-class DownloadInfo:
- name: str
- season: int
- suffix: str
- file_name: str
- folder_name: str
-
-
-RULES = [
- r"(.*) - (\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?(.*)",
- r"(.*)[\[ E](\d{1,3}|\d{1,3}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\] ](.*)",
- r"(.*)\[第(\d*\.*\d*)话(?:END)?\](.*)",
- r"(.*)\[第(\d*\.*\d*)話(?:END)?\](.*)",
- r"(.*)第(\d*\.*\d*)话(?:END)?(.*)",
- r"(.*)第(\d*\.*\d*)話(?:END)?(.*)",
- r"(.*)E(\d{1,3})(.*)",
-]
-
-
-class DownloadParser:
- def __init__(self):
- self.method_dict = {
- "normal": self.rename_normal,
- "pn": self.rename_pn,
- "advance": self.rename_advance,
- "no_season_pn": self.rename_no_season_pn,
- "none": self.rename_none
- }
-
-
- @staticmethod
- def rename_init(name, folder_name, season, suffix) -> DownloadInfo:
- n = re.split(r"[\[\]()【】()]", name)
- suffix = suffix if suffix else n[-1]
- if len(n) > 1:
- file_name = name.replace(f"[{n[1]}]", "")
- else:
- file_name = name
- if season < 10:
- season = f"0{season}"
- return DownloadInfo(name, season, suffix, file_name, folder_name)
-
- def rename_normal(self, info: DownloadInfo):
- for rule in RULES:
- match_obj = re.match(rule, info.name, re.I)
- if match_obj is not None:
- title = re.sub(r"([Ss]|Season )\d{1,3}", "", match_obj.group(1)).strip()
- new_name = f"{title} S{info.season}E{match_obj.group(2)}{match_obj.group(3)}"
- return new_name
-
- def rename_pn(self, info: DownloadInfo):
- for rule in RULES:
- match_obj = re.match(rule, info.file_name, re.I)
- if match_obj is not None:
- title = re.sub(r"([Ss]|Season )\d{1,3}", "", match_obj.group(1)).strip()
- title = title if title != "" else info.folder_name
- new_name = re.sub(
- r"[\[\]]",
- "",
- f"{title} S{info.season}E{match_obj.group(2)}{info.suffix}",
- )
- return new_name
-
- def rename_advance(self, info: DownloadInfo):
- for rule in RULES:
- match_obj = re.match(rule, info.file_name, re.I)
- if match_obj is not None:
- new_name = re.sub(
- r"[\[\]]",
- "",
- f"{info.folder_name} S{info.season}E{match_obj.group(2)}{info.suffix}",
- )
- return new_name
-
- def rename_no_season_pn(self, info: DownloadInfo):
- for rule in RULES:
- match_obj = re.match(rule, info.file_name, re.I)
- if match_obj is not None:
- title = match_obj.group(1).strip()
- new_name = re.sub(
- r"[\[\]]",
- "",
- f"{title} E{match_obj.group(2)}{info.suffix}",
- )
- return new_name
-
- @staticmethod
- def rename_none(info: DownloadInfo):
- return info.name
-
- def download_rename(
- self,
- name: str,
- folder_name,
- season,
- suffix,
- method
- ):
- rename_info = self.rename_init(name, folder_name, season, suffix)
- return self.method_dict[method.lower()](rename_info)
-
-
-if __name__ == "__main__":
- name = "[Lilith-Raws] Tate no Yuusha no Nariagari S02 - 02 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4]"
- rename = DownloadParser()
- new_name = rename.download_rename(name, "异世界舅舅(2022)", 1, ".mp4", "normal")
- print(new_name)
diff --git a/src/module/parser/analyser/torrent_parser.py b/src/module/parser/analyser/torrent_parser.py
new file mode 100644
index 00000000..8db2825b
--- /dev/null
+++ b/src/module/parser/analyser/torrent_parser.py
@@ -0,0 +1,138 @@
+import re
+import logging
+from dataclasses import dataclass
+
+logger = logging.getLogger(__name__)
+
+
+@dataclass
+class DownloadInfo:
+ name: str
+ season: int
+ suffix: str
+ file_name: str
+ folder_name: str
+
+
+RULES = [
+ r"(.*) - (\d{1,4}|\d{1,4}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?(.*)",
+ r"(.*)[\[ E](\d{1,3}|\d{1,3}\.\d{1,2})(?:v\d{1,2})?(?: )?(?:END)?[\] ](.*)",
+ r"(.*)\[(?:第)?(\d*\.*\d*)[话集話](?:END)?\](.*)",
+ r"(.*)第(\d*\.*\d*)[话話集](?:END)?(.*)",
+ r"(.*)E(\d{1,4})(.*)",
+]
+
+SUBTITLE_LANG = {
+ "zh-tw": ["TC", "CHT", "繁", "zh-tw"],
+ "zh": ["SC", "CHS", "简", "zh"],
+}
+
+
+def rename_init(name, folder_name, season, suffix) -> DownloadInfo:
+ n = re.split(r"[\[\]()【】()]", name)
+ suffix = suffix if suffix else n[-1]
+ if len(n) > 1:
+ file_name = name.replace(f"[{n[1]}]", "")
+ else:
+ file_name = name
+ if season < 10:
+ season = f"0{season}"
+ return DownloadInfo(name, season, suffix, file_name, folder_name)
+
+
+def rename_normal(info: DownloadInfo):
+ for rule in RULES:
+ match_obj = re.match(rule, info.name, re.I)
+ if match_obj is not None:
+ title = re.sub(r"([Ss]|Season )\d{1,3}", "", match_obj.group(1)).strip()
+ new_name = f"{title} S{info.season}E{match_obj.group(2)}{match_obj.group(3)}"
+ return new_name
+
+
+def rename_pn(info: DownloadInfo):
+ for rule in RULES:
+ match_obj = re.match(rule, info.file_name, re.I)
+ if match_obj is not None:
+ title = re.sub(r"([Ss]|Season )\d{1,3}", "", match_obj.group(1)).strip()
+ title = title if title != "" else info.folder_name
+ new_name = re.sub(
+ r"[\[\]]",
+ "",
+ f"{title} S{info.season}E{match_obj.group(2)}{info.suffix}",
+ )
+ return new_name
+
+
+def rename_advance(info: DownloadInfo):
+ for rule in RULES:
+ match_obj = re.match(rule, info.file_name, re.I)
+ if match_obj is not None:
+ new_name = re.sub(
+ r"[\[\]]",
+ "",
+ f"{info.folder_name} S{info.season}E{match_obj.group(2)}{info.suffix}",
+ )
+ return new_name
+
+
+def rename_no_season_pn(info: DownloadInfo):
+ for rule in RULES:
+ match_obj = re.match(rule, info.file_name, re.I)
+ if match_obj is not None:
+ title = match_obj.group(1).strip()
+ new_name = re.sub(
+ r"[\[\]]",
+ "",
+ f"{title} E{match_obj.group(2)}{info.suffix}",
+ )
+ return new_name
+
+
+def rename_none(info: DownloadInfo):
+ return info.name
+
+
+def rename_subtitle(info: DownloadInfo):
+ subtitle_lang = "zh"
+ break_flag = False
+ for key, value in SUBTITLE_LANG.items():
+ for lang in value:
+ if lang in info.name:
+ subtitle_lang = key
+ break_flag = True
+ break
+ if break_flag:
+ break
+ for rule in RULES:
+ match_obj = re.match(rule, info.file_name, re.I)
+ if match_obj is not None:
+ title = re.sub(r"([Ss]|Season )\d{1,3}", "", match_obj.group(1)).strip()
+ title = title if title != "" else info.folder_name
+ new_name = re.sub(
+ r"[\[\]]",
+ "",
+ f"{title} S{info.season}E{match_obj.group(2)}.{subtitle_lang}{info.suffix}",
+ )
+ return new_name
+
+
+METHODS = {
+ "normal": rename_normal,
+ "pn": rename_pn,
+ "advance": rename_advance,
+ "no_season_pn": rename_no_season_pn,
+ "none": rename_none,
+ "subtitle": rename_subtitle,
+}
+
+
+def torrent_parser(
+ file_name: str,
+ folder_name: str,
+ season: int,
+ suffix: str,
+ method: str = "pn",
+):
+ info = rename_init(file_name, folder_name, season, suffix)
+ return METHODS[method.lower()](info)
+
diff --git a/src/module/parser/title_parser.py b/src/module/parser/title_parser.py
index 75d6491a..c752e611 100644
--- a/src/module/parser/title_parser.py
+++ b/src/module/parser/title_parser.py
@@ -1,31 +1,27 @@
import logging
-from .analyser import raw_parser, DownloadParser, TMDBMatcher
+from .analyser import raw_parser, torrent_parser, TMDBMatcher
-from module.conf import settings
-from module.models import BangumiData
+from module.models import BangumiData, Config
logger = logging.getLogger(__name__)
-LANGUAGE = settings.rss_parser.language
class TitleParser:
def __init__(self):
- self._download_parser = DownloadParser()
self._tmdb_parser = TMDBMatcher()
- def download_parser(
- self,
- download_raw: str,
- folder_name: str | None = None,
+ @staticmethod
+ def torrent_parser(
+ method: str,
+ torrent_name: str,
+ bangumi_name: str | None = None,
season: int | None = None,
suffix: str | None = None,
- method: str = settings.bangumi_manage.rename_method
):
- return self._download_parser.download_rename(download_raw, folder_name, season, suffix, method)
-
- def tmdb_parser(self, title: str, season: int):
+ return torrent_parser(torrent_name, bangumi_name, season, suffix, method)
+ def tmdb_parser(self, title: str, season: int, language: str):
official_title, tmdb_season = None, None
try:
tmdb_info = self._tmdb_parser.tmdb_search(title)
@@ -35,15 +31,21 @@ class TitleParser:
logger.warning(f"{title} can not Matched with TMDB")
logger.info("Please change the bangumi info in webui")
return title, season
- if LANGUAGE == "zh":
+ if language == "zh":
official_title = f"{tmdb_info.title_zh} ({tmdb_info.year_number})"
- elif LANGUAGE == "jp":
+ elif language == "jp":
official_title = f"{tmdb_info.title_jp} ({tmdb_info.year_number})"
tmdb_season = tmdb_info.last_season if tmdb_info.last_season else season
official_title = official_title if official_title else title
return official_title, tmdb_season
- def raw_parser(self, raw: str, _id: int | None = None) -> BangumiData:
+ def raw_parser(
+ self,
+ raw: str,
+ settings: Config,
+ _id: int | None = None
+ ) -> BangumiData:
+ language = settings.rss_parser.language
try:
episode = raw_parser(raw)
titles = {
@@ -54,9 +56,13 @@ class TitleParser:
title_search = episode.title_zh if episode.title_zh else episode.title_en
title_raw = episode.title_en if episode.title_en else episode.title_zh
if settings.rss_parser.enable_tmdb:
- official_title, _season = self.tmdb_parser(title_search, episode.season)
+ official_title, _season = self.tmdb_parser(
+ title_search,
+ episode.season,
+ language
+ )
else:
- official_title = titles[LANGUAGE] if titles[LANGUAGE] else titles["zh"]
+ official_title = titles[language] if titles[language] else titles["zh"]
_season = episode.season
data = BangumiData(
id=_id,
diff --git a/src/module/rss/__init__.py b/src/module/rss/__init__.py
index 9650cb6c..f7c3f4ee 100644
--- a/src/module/rss/__init__.py
+++ b/src/module/rss/__init__.py
@@ -1 +1 @@
-from .rss_analyser import RSSAnalyser
\ No newline at end of file
+from .rss_analyser import RSSAnalyser
diff --git a/src/module/rss/rss_analyser.py b/src/module/rss/rss_analyser.py
index 354cc821..b1174cbe 100644
--- a/src/module/rss/rss_analyser.py
+++ b/src/module/rss/rss_analyser.py
@@ -4,14 +4,15 @@ import logging
from module.network import RequestContent
from module.parser import TitleParser
from module.core import DownloadClient
-from module.models import BangumiData
+from module.models import BangumiData, Config
logger = logging.getLogger(__name__)
class RSSAnalyser:
- def __init__(self):
+ def __init__(self, settings: Config):
self._title_analyser = TitleParser()
+ self.settings = settings
@staticmethod
def find_id(bangumi_info: list[BangumiData]) -> int:
@@ -37,26 +38,31 @@ class RSSAnalyser:
break
if extra_add:
_id += 1
- data = self._title_analyser.raw_parser(raw_title, _id)
+ data = self._title_analyser.raw_parser(
+ raw=raw_title,
+ _id=_id,
+ settings=self.settings)
if data is not None and data.official_title not in bangumi_info:
bangumi_info.append(data)
return bangumi_info
- def rss_to_data(self, url, filter: bool = True) -> BangumiData:
+ def rss_to_data(self, url, _filter: bool = True) -> BangumiData:
with RequestContent() as req:
- rss_torrents = req.get_torrents(url, filter)
+ rss_torrents = req.get_torrents(url, _filter)
for torrent in rss_torrents:
try:
- data = self._title_analyser.raw_parser(torrent.name)
+ data = self._title_analyser.raw_parser(
+ torrent.name,
+ settings=self.settings
+ )
return data
except Exception as e:
logger.debug(e)
- def run(self, bangumi_info: list[BangumiData], download_client: DownloadClient, rss_link: str):
+ def run(self, bangumi_info: list[BangumiData], rss_link: str):
logger.info("Start collecting RSS info.")
try:
self.rss_to_datas(bangumi_info, rss_link)
- download_client.add_rules(bangumi_info, rss_link=rss_link)
except Exception as e:
logger.debug(e)
logger.info("Finished")
diff --git a/src/test/test_rss_parser.py b/src/test/test_rss_parser.py
new file mode 100644
index 00000000..3aaf4c6a
--- /dev/null
+++ b/src/test/test_rss_parser.py
@@ -0,0 +1,14 @@
+from module.models import Config
+from module.core.api_func import RSSAnalyser
+
+
+def test_rss_analyser():
+ settings = Config()
+ rss_analyser = RSSAnalyser(settings)
+ url = "https://mikanani.me/RSS/Bangumi?bangumiId=2966&subgroupid=552"
+
+ data = rss_analyser.rss_to_data(url=url)
+
+ assert data.title_raw == "Yamada-kun to Lv999 no Koi wo Suru"
+ assert data.official_title == "和山田谈场 Lv999 的恋爱"
+ assert data.season == 1
\ No newline at end of file
diff --git a/src/test/test_torrent_parser.py b/src/test/test_torrent_parser.py
new file mode 100644
index 00000000..816fbaa7
--- /dev/null
+++ b/src/test/test_torrent_parser.py
@@ -0,0 +1,26 @@
+from module.parser.analyser import torrent_parser
+
+
+def test_torrent_parser():
+ file_name = "[Lilith-Raws] Boku no Kokoro no Yabai Yatsu - 01 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4].mp4"
+ folder_name = "我内心的糟糕念头(2023)"
+ season = 1
+ suffix = ".mp4"
+ assert torrent_parser(file_name, folder_name, season, suffix, "pn") == "Boku no Kokoro no Yabai Yatsu S01E01.mp4"
+ assert torrent_parser(file_name, folder_name, season, suffix, "advance") == "我内心的糟糕念头(2023) S01E01.mp4"
+ assert torrent_parser(file_name, folder_name, season, suffix, "none") == "[Lilith-Raws] Boku no Kokoro no Yabai Yatsu - 01 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4].mp4"
+
+
+ file_name = "[Sakurato] Tonikaku Kawaii S2 [01][AVC-8bit 1080p AAC][CHS].mp4"
+ folder_name = "总之就是非常可爱(2021)"
+ season = 2
+ suffix = ".mp4"
+ assert torrent_parser(file_name, folder_name, season, suffix, "pn") == "Tonikaku Kawaii S02E01.mp4"
+ assert torrent_parser(file_name, folder_name, season, suffix, "advance") == "总之就是非常可爱(2021) S02E01.mp4"
+
+ file_name = "[SweetSub&LoliHouse] Heavenly Delusion - 01 [WebRip 1080p HEVC-10bit AAC ASSx2].mkv"
+ folder_name = "天国大魔境(2023)"
+ season = 1
+ suffix = ".mkv"
+ assert torrent_parser(file_name, folder_name, season, suffix, "pn") == "Heavenly Delusion S01E01.mkv"
+ assert torrent_parser(file_name, folder_name, season, suffix, "advance") == "天国大魔境(2023) S01E01.mkv"
\ No newline at end of file