Merge pull request #183 from EstrellaXD/2.6.0-dev

2.6.0 dev
This commit is contained in:
Estrella Pan
2023-03-15 10:53:01 +08:00
committed by GitHub
58 changed files with 295 additions and 273 deletions

View File

@@ -16,11 +16,11 @@ coverage.xml
*.log
.git
.mypy_cache
.pytest_cache
../.pytest_cache
.hypothesis
module/tests
module/conf/const_dev.py
src/module/tests
src/module/conf/const_dev.py
config/bangumi.json/config/bangumi.json
/docs
/.github

View File

@@ -1,8 +1,9 @@
name: Deploy To Dockerhub(dev)
on:
workflow_dispatch:
branches: [ 2.6.0-dev ]
push:
tags:
- '\d+\.\d+\.\d+-beta\d+'
jobs:
latest:
@@ -11,8 +12,9 @@ jobs:
- name: Checkout
uses: actions/checkout@v3
- name: Create Version info
working-directory: ./src
run: |
echo "version='2.6.0'" > module/conf/version.py
echo "VERSION = '2.6.0-beta'" > module/__version__.py
-
name: Set up QEMU
uses: docker/setup-qemu-action@v2
@@ -29,6 +31,10 @@ jobs:
name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: true
tags: estrellaxd/auto_bangumi:dev-latest
file: ./Dockerfile
tags: |
estrellaxd/auto_bangumi:dev-latest
estrellaxd/auto_bangumi:dev-${{ github.sha }}
estrellaxd/auto_bangumi:${{ github.ref }}
file: Dockerfile

View File

@@ -1,9 +1,13 @@
name: Build(Docker)
on:
release:
types:
- released
push:
tags:
- '\d+\.\d+\.\d+'
pull_request:
tags:
- '\d+\.\d+\.\d+'
jobs:
docker:
@@ -13,15 +17,9 @@ jobs:
name: Checkout
uses: actions/checkout@v3
- name: Create Version info
working-directory: ./src
run: |
echo "version='$GITHUB_REF_NAME'" > module/conf/version.py
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ secrets.DOCKER_HUB_USERNAME }}/auto_bangumi
tags: |
type=ref, event=tag
echo "VERSION = '$GITHUB_REF_NAME'" > module/__version__.py
-
name: Set up QEMU
uses: docker/setup-qemu-action@v2
@@ -41,5 +39,8 @@ jobs:
context: .
platforms: linux/amd64,linux/arm64,linux/arm
push: true
tags: ${{ steps.meta.outputs.tags }}
tags: |
estrellaxd/auto_bangumi:latest
estrellaxd/auto_bangumi:${{ github.sha }}
estrellaxd/auto_bangumi:${{ github.ref }}
labels: ${{ steps.meta.outputs.labels }}

View File

@@ -17,17 +17,17 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
- name: Set up Python 3.11
uses: actions/setup-python@v3
with:
python-version: "3.10"
python-version: "3.11"
- name: Install dependencies
working-directory: ./src
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
pip install pytest
mkdir config
- name: Test
run: pytest test/test_raw_parser.py
working-directory: ./src
run: |
pytest test/test_raw_parser.py

23
.gitignore vendored
View File

@@ -162,21 +162,20 @@ cython_debug/
#.idea/
# Custom
/module/conf/const_dev.py
/config
/module/tester.py
/module/config
/src/test.py
/module/parser/analyser/tmdb_parser.py
/src/module/parser/analyser/tmdb_parser.py
/module/run_debug.sh
/module/debug_run.sh
/module/__version__.py
/data/
/src/module/run_debug.sh
/src/module/debug_run.sh
/src/module/__version__.py
/src/data/
/module/conf/config_dev.ini
/src/module/conf/config_dev.ini
test.*
.run
/module/conf/version.py
/templates/
/src/templates/
/src/config/
/src/debuger.py

View File

@@ -1,12 +1,4 @@
# syntax=docker/dockerfile:1
FROM python:3.11-buster AS build
RUN mkdir /install
WORKDIR /install
COPY requirements.txt .
RUN python3 -m pip install --upgrade pip \
&& pip install -r requirements.txt --prefix="/install"
FROM python:3.11-alpine
ENV TZ=Asia/Shanghai \
@@ -14,10 +6,14 @@ ENV TZ=Asia/Shanghai \
PGID=1000 \
UMASK=022
WORKDIR /
WORKDIR /app
COPY --from=build --chmod=777 /install /usr/local
COPY --chmod=755 . .
COPY src/requirements.txt .
RUN python3 -m pip install --upgrade pip \
&& pip install -r requirements.txt --no-cache-dir
COPY --chmod=755 src/. /app/.
RUN apk add --no-cache \
curl \
@@ -33,13 +29,13 @@ RUN wget "https://github.com/Rewrite0/Auto_Bangumi_WebUI/releases/download/$(cur
RUN addgroup -S auto_bangumi -g 1000 && \
adduser -S auto_bangumi -G auto_bangumi -h /home/auto_bangumi -u 1000 && \
usermod -s /bin/bash auto_bangumi && \
mkdir -p "/config" && \
mkdir -p "/data" && \
mkdir -p "config" && \
mkdir -p "data" && \
chmod a+x \
run.sh
EXPOSE 7892
VOLUME [ "/config" , "/data"]
VOLUME [ "/app/config" , "/app/data"]
CMD ["sh", "run.sh"]

View File

@@ -1,5 +0,0 @@
from .download_client import DownloadClient
from .eps_complete import FullSeasonGet
from .renamer import Renamer
from .rss_analyser import RSSAnalyser
from .api_func import APIProcess

View File

@@ -1,6 +0,0 @@
from .request_contents import RequestContent
from .notification import PostNotification

View File

@@ -1,14 +0,0 @@
import requests
from module.conf import settings
class PostNotification:
def __init__(self):
self.token = settings.notification_token
self.notification_url = lambda message: f"https://api.pushbullet.com/v2/{self.token}/{message}"
def ifttt_post(self, message):
url = self.notification_url(message)
response = requests.get(url)
return response.status_code == 200

View File

@@ -1,27 +0,0 @@
# coding:utf-8
from setuptools import setup, find_packages
setup(
name="auto_bangumi", # 包名字
version="2.4.0b4", # 包版本
description="一个全自动追番整理下载工具",
long_description="""
本项目是基于 Mikan Project、qBittorrent 的全自动追番整理下载工具。
只需要在 Mikan Project 上订阅番剧,就可以全自动追番。
并且整理完成的名称和目录可以直接被 Plex、Jellyfin 等媒体库软件识别,
无需二次刮削。""", # 简单描述
author="EstrellaXD", # 作者
author_email="estrellaxd05@gmail.com", # 作者邮箱
url="https://github.com/EstrellaXD/Auto_Bangumi", # 包的主页
packages=find_packages(where=".", exclude=("tests",), include=('*',)),
package_data={"auto_bangumi.RssFilter": ["*.json"]},
package_dir={"auto_bangumi": "auto_bangumi"},
install_requires= [
"qbittorrent-api",
"bs4",
"requests",
"lxml",
"zhconv",
]
)

View File

@@ -13,4 +13,5 @@ if __name__ == "__main__":
p.start()
processes.append(p)
for p in processes:
p.join()
p.join()

View File

@@ -4,12 +4,12 @@ from fastapi import FastAPI, Request
from fastapi.responses import HTMLResponse, FileResponse
from fastapi.templating import Jinja2Templates
from fastapi.staticfiles import StaticFiles
from pydantic import BaseModel
import logging
from .core import APIProcess
from .conf import settings, DATA_PATH, LOG_PATH
from .utils import json_config
from .models.api import *
logger = logging.getLogger(__name__)
@@ -47,10 +47,6 @@ def remove_rule(bangumi_title: str):
return api_func.remove_rule(bangumi_title)
class RssLink(BaseModel):
rss_link: str
@app.post("/api/v1/collection")
async def collection(link: RssLink):
return api_func.download_collection(link.rss_link)
@@ -61,11 +57,6 @@ async def subscribe(link: RssLink):
return api_func.add_subscribe(link.rss_link)
class AddRule(BaseModel):
title: str
season: int
@app.post("/api/v1/addRule")
async def add_rule(info: AddRule):
return api_func.add_rule(info.title, info.season)
@@ -76,6 +67,4 @@ def run():
uvicorn.run(app, host="0.0.0.0", port=settings.program.webui_port)
if __name__ == "__main__":
run()

View File

@@ -5,8 +5,9 @@ import logging
from module.conf import settings, setup_logger, LOG_PATH, DATA_PATH, VERSION
from module.utils import json_config
from module.core import RSSAnalyser, DownloadClient, Renamer, FullSeasonGet
from module.core import DownloadClient
from module.manager import Renamer, FullSeasonGet
from module.rss import RSSAnalyser
logger = logging.getLogger(__name__)
@@ -28,8 +29,8 @@ def load_data_file():
bangumi_data = json_config.load(DATA_PATH)
if bangumi_data["data_version"] != settings.program.data_version or bangumi_data["rss_link"] != settings.rss_parser.link:
bangumi_data = {
"rss_link": settings.rss_link,
"data_version": settings.data_version,
"rss_link": settings.rss_parser.link,
"data_version": settings.program.data_version,
"bangumi_info": []
}
logger.info("Rebuilding data information...")
@@ -87,6 +88,3 @@ def run():
bangumi_data = load_data_file()
# 主程序循环
main_process(bangumi_data, download_client)
if __name__ == '__main__':
run()

View File

@@ -1,5 +1,5 @@
from .config import settings, VERSION
from .log import setup_logger, LOG_PATH
from .config import settings, VERSION
TMDB_API = "32b19d6a05b512190a056fa4e747cbbc"

View File

@@ -5,6 +5,10 @@ from dataclasses import dataclass
from .const import DEFAULT_SETTINGS, ENV_TO_ATTR
try:
from ..__version__ import VERSION
except ImportError:
VERSION = "DEV_VERSION"
class ConfLoad(dict):
@@ -29,7 +33,7 @@ class Settings:
self.load(path)
def load(self, path: str | None):
if isinstance(path, dict):
if path is None:
conf = DEFAULT_SETTINGS
elif os.path.isfile(path):
with open(path, "r") as f:
@@ -52,20 +56,19 @@ class Settings:
for key, section in ENV_TO_ATTR.items():
for env, attr in section.items():
if env in os.environ:
settings[key][attr] = self._val_from_env(env, attr)
attr_name = attr[0] if isinstance(attr, tuple) else attr
settings[key][attr_name] = self._val_from_env(env, attr)
with open(CONFIG_PATH, "w") as f:
json.dump(settings, f, indent=4)
return settings
try:
from .version import VERSION
if os.path.isdir("config"):
CONFIG_PATH = "config/config.json"
else:
CONFIG_PATH = None
except ImportError:
VERSION = "DEV_VERSION"
if os.path.isdir("config") and VERSION == "DEV_VERSION":
CONFIG_PATH = "config/config_dev.json"
elif os.path.isdir("config") and VERSION != "DEV_VERSION":
CONFIG_PATH = "config/config.json"
else:
CONFIG_PATH = None
settings = Settings(CONFIG_PATH)

View File

@@ -55,9 +55,9 @@ DEFAULT_SETTINGS = {
ENV_TO_ATTR = {
"program": {
"AB_INTERVAL_TIME": ("sleep_time", lambda e: float(e)),
"AB_RENAME_FREQ": ("times", lambda e: float(e)),
"AB_WEBUI_PORT": ("webui_port", lambda e: int(e)),
"AB_INTERVAL_TIME": ("sleep_time", float),
"AB_RENAME_FREQ": ("times", float),
"AB_WEBUI_PORT": ("webui_port", int),
},
"downloader": {
"AB_DOWNLOADER_HOST": "host",
@@ -66,21 +66,21 @@ ENV_TO_ATTR = {
"AB_DOWNLOAD_PATH": "path",
},
"rss_parser": {
"AB_RSS_COLLECTOR": ("enable", lambda e: e.lower() in ("True", "1", "t")),
"AB_RSS_COLLECTOR": ("enable", lambda e: e.lower() in ("true", "1", "t")),
"AB_RSS": "link",
"AB_NOT_CONTAIN": ("filter", lambda e: e.split("|")),
"AB_LANGUAGE": "language",
"AB_ENABLE_TMDB": ("enable_tmdb", lambda e: e.lower() in ("True", "1", "t")),
"AB_ENABLE_TMDB": ("enable_tmdb", lambda e: e.lower() in ("true", "1", "t")),
},
"bangumi_manage": {
"AB_RENAME": ("enable", lambda e: e.lower() in ("True", "1", "t")),
"AB_RENAME": ("enable", lambda e: e.lower() in ("true", "1", "t")),
"AB_METHOD": "method",
"AB_GROUP_TAG": ("group_tag", lambda e: e.lower() in ("True", "1", "t")),
"AB_EP_COMPLETE": ("eps_complete", lambda e: e.lower() in ("True", "1", "t")),
"AB_REMOVE_BAD_BT": ("remove_bad_torrent", lambda e: e.lower() in ("True", "1", "t")),
"AB_GROUP_TAG": ("group_tag", lambda e: e.lower() in ("true", "1", "t")),
"AB_EP_COMPLETE": ("eps_complete", lambda e: e.lower() in ("true", "1", "t")),
"AB_REMOVE_BAD_BT": ("remove_bad_torrent", lambda e: e.lower() in ("true", "1", "t")),
},
"debug": {
"AB_DEBUG_MODE": ("enable", lambda e: e.lower() in ("True", "1", "t")),
"AB_DEBUG_MODE": ("enable", lambda e: e.lower() in ("true", "1", "t")),
},
"proxy": {
"AB_HTTP_PROXY": "http",

View File

@@ -1,5 +1,6 @@
import logging
from module.conf import settings
from .config import settings
LOG_PATH = "data/log.txt"

View File

@@ -0,0 +1,2 @@
from .download_client import DownloadClient
from .api_func import APIProcess

View File

@@ -1,6 +1,8 @@
import re
from module.core import FullSeasonGet, DownloadClient, RSSAnalyser
from module.core import DownloadClient
from module.manager import FullSeasonGet
from module.rss import RSSAnalyser
from module.utils import json_config
from module.conf import DATA_PATH

View File

@@ -3,7 +3,6 @@ import logging
import os
from module.downloader import getClient
from module.downloader.exceptions import ConflictError
from module.conf import settings
@@ -115,8 +114,3 @@ class DownloadClient:
def get_torrent_path(self, hashes):
return self.client.get_torrent_path(hashes)
if __name__ == "__main__":
put = DownloadClient()
put.rss_feed()

View File

@@ -7,5 +7,5 @@ def getClient():
password = settings.downloader.password
# TODO 多下载器支持
# 从 settings 里读取下载器名称,然后返回对应 Client
from module.downloader.qb_downloader import QbDownloader
return QbDownloader(host, username, password)
from .qb_downloader import QbDownloader
return QbDownloader(host, username, password)

View File

@@ -0,0 +1,2 @@
from .eps_complete import FullSeasonGet
from .renamer import Renamer

View File

@@ -5,7 +5,7 @@ import logging
from module.conf import settings
from module.network import RequestContent
from .download_client import DownloadClient
from module.core.download_client import DownloadClient
logger = logging.getLogger(__name__)
SEARCH_KEY = ["group", "title_raw", "season_raw", "subtitle", "source", "dpi"]
@@ -13,7 +13,7 @@ SEARCH_KEY = ["group", "title_raw", "season_raw", "subtitle", "source", "dpi"]
class FullSeasonGet:
def __init__(self):
self._get_rss = RequestContent()
pass
@staticmethod
def init_eps_complete_search_str(data: dict):
@@ -24,7 +24,8 @@ class FullSeasonGet:
def get_season_torrents(self, data: dict):
keyword = self.init_eps_complete_search_str(data)
torrents = self._get_rss.get_torrents(f"https://mikanani.me/RSS/Search?searchstr={keyword}")
with RequestContent() as req:
torrents = req.get_torrents(f"https://mikanani.me/RSS/Search?searchstr={keyword}")
return torrents
@staticmethod
@@ -34,7 +35,7 @@ class FullSeasonGet:
download_info = {
"url": torrent.torrent_link,
"save_path": os.path.join(
settings.download_path,
settings.downloader.path,
data["official_title"],
f"Season {data['season']}")
}
@@ -56,7 +57,8 @@ class FullSeasonGet:
self.download_eps(data, download_client)
def download_collection(self, data, link, download_client: DownloadClient):
torrents = self._get_rss.get_torrents(link)
with RequestContent() as req:
torrents = req.get_torrents(link)
downloads = self.collect_season_torrents(data, torrents)
logger.info(f"Starting download {data.get('official_title')}")
for download in downloads:
@@ -64,18 +66,3 @@ class FullSeasonGet:
logger.info("Completed!")
if __name__ == "__main__":
a = FullSeasonGet()
data = {
"official_title": "指名!",
"title_raw": "CUE!",
"season": 1,
"season_raw": "",
"group": "喵萌Production",
"dpi": "1080p",
"source": None,
"subtitle": "简日双语",
"added": True,
"eps_collect": True
}
print(a.init_eps_complete_search_str(data))

View File

@@ -1,13 +1,13 @@
import logging
import os.path
import re
import os.path
from pathlib import PurePath, PureWindowsPath
from .download_client import DownloadClient
from module.core.download_client import DownloadClient
from module.conf import settings
from module.parser import TitleParser
from module.network import PostNotification, ServerChanNotification
logger = logging.getLogger(__name__)
@@ -65,8 +65,9 @@ class Renamer:
try:
new_name = self._renamer.download_parser(name, folder_name, season, suffix, settings.bangumi_manage.rename_method)
if path_name != new_name:
old_name = info.content_path.replace(info.save_path, "")
self.client.rename_torrent_file(torrent_hash, new_name, old_name, new_name)
old_path = info.content_path.replace(info.save_path, "")
old_path = old_path[len(os.path.sep):]
self.client.rename_torrent_file(torrent_hash, new_name, old_path, new_name)
rename_count += 1
else:
continue

View File

@@ -45,7 +45,7 @@ class RePath:
path = rules.get(rule).savePath
must_contain = rules.get(rule).mustContain
season, folder_name = self.analyse_path(path)
new_path = PurePath(settings.download_path, folder_name, f"Season {season}").__str__()
new_path = PurePath(settings.downloader.path, folder_name, f"Season {season}").__str__()
all_rule.append(RuleInfo(rule, must_contain, season, folder_name, new_path))
return all_rule

View File

@@ -0,0 +1 @@
from .bangumi import *

19
src/module/models/api.py Normal file
View File

@@ -0,0 +1,19 @@
from pydantic import BaseModel
class RssLink(BaseModel):
rss_link: str
class AddRule(BaseModel):
title: str
season: int
class ChangeConfig(BaseModel):
config: dict
class ChangeRule(BaseModel):
rule: dict

View File

@@ -0,0 +1,28 @@
from dataclasses import dataclass
@dataclass
class MatchRule:
keyword: str
filter: list
rss_link: str
@dataclass
class GroupFilter:
name: str
filter: list
@dataclass
class Episode:
title_en: str | None
title_zh: str | None
title_jp: str | None
season: int
season_raw: str
episode: int
sub: str
group: str
resolution: str
source: str

View File

@@ -0,0 +1,2 @@
from .request_contents import RequestContent
from .notification import PostNotification, ServerChanNotification

View File

@@ -0,0 +1,62 @@
import logging
import requests
from module.conf import settings
logger = logging.getLogger(__name__)
class PostNotification:
def __init__(self):
self.token = settings.notification_token
self.notification_url = lambda message: f"https://api.pushbullet.com/v2/{self.token}/{message}"
def ifttt_post(self, message):
url = self.notification_url(message)
response = requests.get(url)
return response.status_code == 200
class TelegramNotification:
def __init__(self):
self.token = settings.notification_token
self.notification_url = f"https://api.telegram.org/bot{self.token}/sendMessage"
def send_msg(self, title: str, desp: str) -> bool:
if not settings.notification_enable:
return False
data = {
"chat_id": settings.notification_chat_id,
"text": f"{title}\n{desp}",
}
class ServerChanNotification:
"""Server酱推送"""
def __init__(self):
self.token = settings.notification.token
self.notification_url = f"https://sctapi.ftqq.com/{self.token}.send"
def send_msg(self, title: str, desp: str) -> bool:
if not settings.notification.enable:
return False
data = {
"title": title,
"desp": desp,
}
try:
resp = requests.post(self.notification_url, json=data, timeout=3)
resp.raise_for_status()
except requests.RequestException as e:
logging.error("[ServerChanNotification] send fail, error: %s" % e)
return False
return True
if __name__ == '__main__':
name = "勇者、辞职不干了"
notification = ServerChanNotification()
notification.send_msg(f"{name[:10]}》缓存成功", f"[Auto Bangumi]《{name}》缓存成功")

View File

@@ -1,25 +1,26 @@
from dataclasses import dataclass
from bs4 import BeautifulSoup
from .request_url import RequestURL
from module.conf import settings
import re
FILTER = "|".join(settings.rss_parser.filter)
@dataclass
class TorrentInfo:
name: str
torrent_link: str
class RequestContent:
def __init__(self):
self._req = RequestURL()
class RequestContent(RequestURL):
# Mikanani RSS
def get_torrents(self, _url: str) -> [TorrentInfo]:
soup = self._req.get_content(_url)
soup = self.get_xml(_url)
torrent_titles = [item.title.string for item in soup.find_all("item")]
torrent_urls = [item.get("url") for item in soup.find_all("enclosure")]
torrents = []
@@ -29,14 +30,14 @@ class RequestContent:
return torrents
def get_torrent(self, _url) -> TorrentInfo:
soup = self._req.get_content(_url)
soup = self.get_xml(_url)
item = soup.find("item")
enclosure = item.find("enclosure")
return TorrentInfo(item.title.string, enclosure["url"])
def get_xml(self, url):
return BeautifulSoup(self.get_url(url).text, "xml")
# API JSON
def get_json(self, _url) -> dict:
return self._req.get_content(_url, content="json")
def close_session(self):
self._req.close()
return self.get_url(_url).json()

View File

@@ -5,8 +5,6 @@ import socket
import socks
import logging
from bs4 import BeautifulSoup
from module.conf import settings
logger = logging.getLogger(__name__)
@@ -14,7 +12,31 @@ logger = logging.getLogger(__name__)
class RequestURL:
def __init__(self):
self.session = requests.session()
self.header = {
"user-agent": "Mozilla/5.0",
"Accept": "application/xml"
}
def get_url(self, url):
times = 0
while times < 5:
try:
req = self.session.get(url=url, headers=self.header)
req.raise_for_status()
return req
except requests.RequestException as e:
logger.debug(f"URL: {url}")
logger.debug(e)
logger.warning("ERROR with Connection.Please check DNS/Connection settings")
time.sleep(5)
times += 1
except Exception as e:
logger.debug(f"URL: {url}")
logger.debug(e)
break
def __enter__(self):
self.session = requests.Session()
if settings.proxy.enable:
if settings.proxy.type == "http":
url = f"http://{settings.proxy.host}:{settings.proxy.port}"
@@ -26,31 +48,9 @@ class RequestURL:
socks.set_default_proxy(socks.SOCKS5, addr=settings.proxy.host, port=settings.proxy.port, rdns=True,
username=settings.proxy.username, password=settings.proxy.password)
socket.socket = socks.socksocket
self.header = {
"user-agent": "Mozilla/5.0",
"Accept": "application/xml"
}
return self
def get_url(self, url):
times = 0
while times < 5:
try:
req = self.session.get(url=url, headers=self.header)
return req
except Exception as e:
logger.debug(f"URL: {url}")
logger.debug(e)
logger.warning("ERROR with Connection.Please check DNS/Connection settings")
time.sleep(5)
times += 1
def get_content(self, url, content="xml"):
if content == "xml":
return BeautifulSoup(self.get_url(url).text, content)
elif content == "json":
return self.get_url(url).json()
def close(self):
def __exit__(self, exc_type, exc_val, exc_tb):
self.session.close()

View File

@@ -7,11 +7,11 @@ class BgmAPI:
f"https://api.bgm.tv/search/subject/{e}?type=2"
self.info_url = lambda e: \
f"https://api.bgm.tv/subject/{e}"
self._request = RequestContent()
def search(self, title):
url = self.search_url(title)
contents = self._request.get_json(url)["list"]
if contents.__len__() == 0:
return None
return contents[0]["name"], contents[0]["name_cn"]
with RequestContent() as req:
contents = req.get_json(url)["list"]
if contents.__len__() == 0:
return None
return contents[0]["name"], contents[0]["name_cn"]

View File

@@ -2,7 +2,7 @@ import logging
import re
from dataclasses import dataclass
# from parser.episode import Episode
from module.models import Episode
logger = logging.getLogger(__name__)
@@ -30,18 +30,7 @@ CHINESE_NUMBER_MAP = {
}
@dataclass
class Episode:
title_en: str or None
title_zh: str or None
title_jp: str or None
season: int
season_raw: str
episode: int
sub: str
group: str
resolution: str
source: str
class RawParser:

View File

@@ -22,14 +22,14 @@ class TMDBMatcher:
f"https://api.themoviedb.org/3/search/tv?api_key={TMDB_API}&page=1&query={e}&include_adult=false"
self.info_url = lambda e: \
f"https://api.themoviedb.org/3/tv/{e}?api_key={TMDB_API}&language=zh-CN"
self._request = RequestContent()
def is_animation(self, tv_id) -> bool:
url_info = self.info_url(tv_id)
type_id = self._request.get_json(url_info)["genres"]
for type in type_id:
if type.get("id") == 16:
return True
with RequestContent() as req:
type_id = req.get_json(url_info)["genres"]
for type in type_id:
if type.get("id") == 16:
return True
return False
# def get_zh_title(self, id):
@@ -51,20 +51,20 @@ class TMDBMatcher:
return int(re.findall(r"\d", season.get("season"))[0])
def tmdb_search(self, title) -> TMDBInfo:
url = self.search_url(title)
contents = self._request.get_json(url).get("results")
if contents.__len__() == 0:
url = self.search_url(title.replace(" ", ""))
contents = self._request.get_json(url).get("results")
# 判断动画
for content in contents:
id = content["id"]
if self.is_animation(id):
break
url_info = self.info_url(id)
info_content = self._request.get_json(url_info)
# 关闭链接
self._request.close()
with RequestContent() as req:
url = self.search_url(title)
contents = req.get_json(url).get("results")
if contents.__len__() == 0:
url = self.search_url(title.replace(" ", ""))
contents = req.get_json(url).get("results")
# 判断动画
for content in contents:
id = content["id"]
if self.is_animation(id):
break
url_info = self.info_url(id)
info_content = req.get_json(url_info)
season = [{"season": s.get("name"), "air_date": s.get("air_date")} for s in info_content.get("seasons")]
last_season = self.get_season(season)
title_jp = info_content.get("original_name")

View File

@@ -1,7 +1,7 @@
from thefuzz import fuzz
import logging
from utils import json_config
from conf import settings
from module.utils import json_config
from module.conf import settings
logger = logging.getLogger(__name__)
@@ -40,13 +40,3 @@ class FuzzMatch:
return max_value, max_info["main"]
# logger.debug(max(value))
if __name__ == "__main__":
from conf.const_dev import DEV_SETTINGS
settings.init(DEV_SETTINGS)
f = FuzzMatch()
name = "勇者、辞职不干了"
value, title = f.find_max_name(name)
print(f"Raw Name: {name} \n"
f"Match Name: {title} \n"
f"Match Value: {value}")

View File

@@ -1,11 +1,13 @@
import logging
from .analyser import RawParser, DownloadParser, TMDBMatcher
from module.conf import settings
logger = logging.getLogger(__name__)
LANGUAGE = settings.rss_parser.language
class TitleParser:
def __init__(self):
self._raw_parser = RawParser()

View File

@@ -0,0 +1 @@
from .rss_analyser import RSSAnalyser

View File

@@ -1,6 +1,5 @@
import re
import logging
from module.network import RequestContent
from module.parser import TitleParser
@@ -14,11 +13,10 @@ logger = logging.getLogger(__name__)
class RSSAnalyser:
def __init__(self):
self._title_analyser = TitleParser()
self._request = RequestContent()
def rss_to_datas(self, bangumi_info: list) -> list:
rss_torrents = self._request.get_torrents(settings.rss_parser.link)
self._request.close_session()
with RequestContent() as req:
rss_torrents = req.get_torrents(settings.rss_parser.link)
for torrent in rss_torrents:
raw_title = torrent.name
extra_add = True
@@ -35,8 +33,8 @@ class RSSAnalyser:
return bangumi_info
def rss_to_data(self, url) -> dict:
rss_torrents = self._request.get_torrents(url)
self._request.close_session()
with RequestContent() as req:
rss_torrents = req.get_torrents(url)
for torrent in rss_torrents:
try:
data = self._title_analyser.return_dict(torrent.name)

View File

@@ -6,6 +6,5 @@ if [ -f /config/bangumi.json ]; then
fi
umask ${UMASK}
python3 main.py