- refactor
- change config from config.ini to config.json
This commit is contained in:
EstrellaXD
2023-03-04 20:18:31 +08:00
parent a7735ac366
commit 131682756e
49 changed files with 241 additions and 454 deletions

View File

@@ -19,8 +19,8 @@ coverage.xml
.pytest_cache
.hypothesis
autobangumi/tests
autobangumi/conf/const_dev.py
module/tests
module/conf/const_dev.py
config/bangumi.json/config/bangumi.json
/docs
/.github

View File

@@ -28,6 +28,4 @@ jobs:
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Test
run: |
cd src
python -m unittest discover tests
run: python -m unittest discover tests

19
.gitignore vendored
View File

@@ -162,19 +162,20 @@ cython_debug/
#.idea/
# Custom
/autobangumi/conf/const_dev.py
/module/conf/const_dev.py
/config
/autobangumi/tester.py
/autobangumi/config
/module/tester.py
/module/config
/autobangumi/parser/analyser/tmdb_parser.py
/module/parser/analyser/tmdb_parser.py
/autobangumi/run_debug.sh
/autobangumi/debug_run.sh
/autobangumi/__version__.py
/module/run_debug.sh
/module/debug_run.sh
/module/__version__.py
/data/
/autobangumi/conf/config_dev.ini
/module/conf/config_dev.ini
test.*
.run
.run
/module/conf/version.py

View File

@@ -17,7 +17,7 @@ ENV TZ=Asia/Shanghai \
WORKDIR /src
COPY --from=build --chmod=777 /install /usr/local
COPY --chmod=755 autobangumi /src
COPY --chmod=755 module /src
RUN apk add --no-cache \
curl \

View File

@@ -1,11 +0,0 @@
from .setting import settings
from .parse import parse
from .log import setup_logger

View File

@@ -1,34 +0,0 @@
import os
from autobangumi.conf import const
class Settings(dict):
def __getattr__(self, item):
return self.get(item)
def __setattr__(self, key, value):
self[key] = value
def init(self, args=None):
self.update(self._settings_from_env())
if args:
self.update(args)
def _val_from_env(self, env, attr):
"""Transforms env-strings to python."""
val = os.environ[env]
if isinstance(attr, tuple):
conv_func = attr[1]
val = conv_func(val)
return val
def _settings_from_env(self):
"""Loads settings from env."""
return {
attr if isinstance(attr, str) else attr[0]: self._val_from_env(env, attr)
for env, attr in const.ENV_TO_ATTR.items()
if env in os.environ
}
settings = Settings(const.DEFAULT_SETTINGS)

View File

@@ -1,170 +0,0 @@
# -*- encoding: utf-8 -*-
DEFAULT_SETTINGS = {
"data_version": 4.0,
"host_ip": "localhost:8080",
"sleep_time": 7200,
"times": 20,
"user_name": "admin",
"password": "adminadmin",
"download_path": "/downloads/Bangumi/",
"method": "pn",
"enable_group_tag": False,
"info_path": "/config/bangumi.json",
"not_contain": r"720|\d+-\d+",
"connect_retry_interval": 5,
"rule_name_re": r"\:|\/|\.",
"debug_mode": False,
"remove_bad_torrent": False,
"dev_debug": False,
"eps_complete": False,
"webui_port": 7892,
"language": "zh",
"tmdb_api": "32b19d6a05b512190a056fa4e747cbbc",
"enable_tmdb": False,
"socks": None,
"enable_rss_collector": True,
"enable_rename": True,
"reset_folder": False,
"log_path": "/config/log.txt",
"refresh_rss": False,
}
ENV_TO_ATTR = {
"AB_DOWNLOADER_HOST": "host_ip",
"AB_INTERVAL_TIME": ("sleep_time", lambda e: float(e)),
"AB_RENAME_FREQ": ("times", lambda e: float(e)),
"AB_DOWNLOADER_USERNAME": "user_name",
"AB_DOWNLOADER_PASSWORD": "password",
"AB_RSS": "rss_link",
"AB_DOWNLOAD_PATH": "download_path",
"AB_METHOD": "method",
"AB_GROUP_TAG": ("enable_group_tag", lambda e: e.lower() in ("true", "1", "t")),
"AB_NOT_CONTAIN": "not_contain",
"AB_DEBUG_MODE": ("debug_mode", lambda e: e.lower() in ("true", "1", "t")),
"AB_EP_COMPLETE": (
"eps_complete",
lambda e: e.lower() in ("true", "1", "t")
),
"AB_REMOVE_BAD_BT": ("remove_bad_torrent", lambda e: e.lower() in ("true", "1", "t")),
"AB_WEBUI_PORT": ("webui_port", lambda e: int(e)),
"AB_HTTP_PROXY": "http_proxy",
"AB_LANGUAGE": "language",
"AB_ENABLE_TMDB": ("enable_tmdb", lambda e: e.lower() in ("true", "1", "t")),
"AB_SOCKS": "socks",
"AB_RENAME": ("enable_rename", lambda e: e.lower() in ("true", "1", "t")),
"AB_RSS_COLLECTOR": ("enable_rss_collector", lambda e: e.lower() in ("true", "1", "t")),
"AB_RESET_FOLDER": ("reset_folder", lambda e: e.lower() in ("true", "1", "t")),
"AB_REFRESH_RSS": ("refresh_rss", lambda e: e.lower() in ("true", "1", "t")),
}
DOWNLOADER_DEFAULT = {
"Host": "localhost:8080",
"Username": "admin",
"Password": "adminadmin",
"DownloadPath": "/downloads/Bangumi/",
"Filter": r"720|\d+-\d+/",
}
DOWNLOADER_ENV ={
"AB_DOWNLOADER_HOST": "Host",
"AB_DOWNLOADER_USERNAME": "Username",
"AB_DOWNLOADER_PASSWORD": "Password",
"AB_DOWNLOAD_PATH": "DownloadPath",
"AB_NOT_CONTAIN": "Filter"
}
DEFAULT_ENV = {
"AB_INTERVAL_TIME": "SleepTime",
"AB_RENAME_FREQ": "RenameFreq",
"AB_RSS_COLLECTOR": "EnableParser",
"AB_RENAME": "EnableRenamer",
"AB_EP_COMPLETE": "SeasonCollect",
}
DEFAULT_DEFAULT = {
"SleepTime": 7200,
"RenameFreq": 20,
"EnableParser": True,
"EnableRenamer": True,
"SeasonCollect": False,
}
PARSER_ENV = {
"AB_RSS": "URL",
"AB_NOT_CONTAIN": "Filter",
"AB_LANGUAGE": "Language",
}
PARSER_DEFAULT = {
"URL": "",
"Filter": r"720|\d+-\d+/",
"Language": "zh",
}
TMDB_ENV = {
"AB_ENABLE_TMDB": "EnableTMDB",
"AB_LANGUAGE": "Language",
}
TMDB_DEFAULT = {
"EnableTMDB": False,
"Language": "zh",
}
RENAME_ENV = {
"AB_METHOD": "RenameMethod",
}
RENAME_DEFAULT = {
"RenameMethod": "pn",
}
NETWORK_ENV = {
"AB_WEBUI_PORT": "WebUIPort",
"AB_HTTP_PROXY": "HTTPProxy",
"AB_SOCKS": "Socks",
}
NETWORK_DEFAULT = {
"WebUIPort": 7892,
"HTTPProxy": "",
"Socks": "",
}
ENV_DICT = {
"DEFAULT": DEFAULT_ENV,
"DOWNLOADER": DOWNLOADER_ENV,
"PARSER": PARSER_ENV,
"TMDB": TMDB_ENV,
"RENAME": RENAME_ENV,
"NETWORK": NETWORK_ENV,
}
DEFAULT_DICT = {
"DEFAULT": DEFAULT_DEFAULT,
"DOWNLOADER": DOWNLOADER_DEFAULT,
"PARSER": PARSER_DEFAULT,
"TMDB": TMDB_DEFAULT,
"RENAME": RENAME_DEFAULT,
"NETWORK": NETWORK_DEFAULT,
}
class BCOLORS:
@staticmethod
def _(color: str, *args: str) -> str:
strings = [str(s) for s in args]
return f"{color}{', '.join(strings)}{BCOLORS.ENDC}"
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKCYAN = "\033[96m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"

View File

@@ -1,46 +0,0 @@
from pydantic import BaseSettings
from configparser import ConfigParser
import os
from .const import ENV_DICT, DEFAULT_DICT
config = ConfigParser()
def get_attr_from_env(env_dict: dict, default_dict: dict):
"""Transforms env-strings to python."""
conf = {
attr if isinstance(attr, str) else attr[0]: os.environ[env]
for env, attr in env_dict.items()
if env in os.environ
}
for key, value in default_dict.items():
if key not in conf:
conf[key] = value
return conf
def init_config():
for section, env_dict in ENV_DICT.items():
config[section] = get_attr_from_env(env_dict, DEFAULT_DICT[section])
with open("config/config.ini", "w") as f:
config.write(f)
if os.path.isfile("config/config_dev.ini"):
config.read("config/config_dev.ini")
elif os.path.isfile("config/config.ini"):
config.read("config/config.ini")
else:
init_config()
class Setting(BaseSettings):
DEFAULT = config["DEFAULT"]
DOWNLOADER = config["DOWNLOADER"]
PARSER = config["PARSER"]
RENAME = config["RENAME"]
NETWORK = config["NETWORK"]
settings = Setting()

View File

@@ -1,11 +0,0 @@
from conf import settings
def getClient():
host = settings.host_ip
username = settings.user_name
password = settings.password
# TODO 多下载器支持
# 从 settings 里读取下载器名称,然后返回对应 Client
from downloader.qb_downloader import QbDownloader
return QbDownloader(host, username, password)

View File

@@ -1,9 +1,5 @@
from autobangumi.conf.setting import setting
def main():
pass
from module.app import run
if __name__ == "__main__":
print(setting.DOWNLOADER["HOST"])
run()

View File

@@ -2,36 +2,31 @@ import os
import time
import logging
from __version__ import version
from conf import settings, parse
from conf.log import setup_logger
from utils import json_config
from module.conf import settings, setup_logger, LOG_PATH, DATA_PATH, VERSION
from module.utils import json_config
from core import RSSAnalyser, DownloadClient, Renamer, FullSeasonGet
from module.core import RSSAnalyser, DownloadClient, Renamer, FullSeasonGet
logger = logging.getLogger(__name__)
def reset_log():
try:
os.remove(settings.log_path)
except FileNotFoundError:
pass
if os.path.exists(LOG_PATH):
os.remove(LOG_PATH)
def load_data_file():
info_path = settings.info_path
if not os.path.exists(info_path):
if not os.path.exists(DATA_PATH):
bangumi_data = {
"rss_link": settings.rss_link,
"data_version": settings.data_version,
"rss_link": settings.rss_parser.link,
"data_version": settings.program.data_version,
"bangumi_info": []
}
logger.info("Building data information...")
else:
bangumi_data = json_config.load(info_path)
if bangumi_data["data_version"] != settings.data_version or bangumi_data["rss_link"] != settings.rss_link:
bangumi_data = json_config.load(DATA_PATH)
if bangumi_data["data_version"] != settings.program.data_version or bangumi_data["rss_link"] != settings.rss_parser.link:
bangumi_data = {
"rss_link": settings.rss_link,
"data_version": settings.data_version,
@@ -42,8 +37,7 @@ def load_data_file():
def save_data_file(bangumi_data):
info_path = settings.info_path
json_config.save(info_path, bangumi_data)
json_config.save(DATA_PATH, bangumi_data)
logger.debug("Saved")
@@ -56,56 +50,40 @@ def show_info():
logger.info(r" /_/ \_\__,_|\__\___/|____/ \__,_|_| |_|\__, |\__,_|_| |_| |_|_|")
logger.info(" __/ | ")
logger.info(" |___/ ")
logger.info(f"Version {version} Author: EstrellaXD Twitter: https://twitter.com/Estrella_Pan")
logger.info(f"Version {VERSION} Author: EstrellaXD Twitter: https://twitter.com/Estrella_Pan")
logger.info("GitHub: https://github.com/EstrellaXD/Auto_Bangumi/")
logger.info("Starting AutoBangumi...")
def main_process(bangumi_data, download_client: DownloadClient):
rename = Renamer(download_client)
if settings.reset_folder:
rename.set_folder()
rss_analyser = RSSAnalyser()
while True:
times = 0
if settings.enable_rss_collector:
if settings.rss_parser.enable:
rss_analyser.run(bangumi_data["bangumi_info"], download_client)
if settings.eps_complete and bangumi_data["bangumi_info"] != []:
if settings.bangumi_manage.eps_complete and bangumi_data["bangumi_info"] != []:
FullSeasonGet().eps_complete(bangumi_data["bangumi_info"], download_client)
logger.info("Running....")
save_data_file(bangumi_data)
while times < settings.times:
if settings.enable_rename:
while times < settings.program.times:
if settings.bangumi_manage.enable:
rename.run()
times += 1
time.sleep(settings.sleep_time/settings.times)
time.sleep(settings.program.sleep_time/settings.program.times)
def run():
# DEBUG 模式初始化
args = parse()
if args.debug:
try:
from conf.const_dev import DEV_SETTINGS
settings.init(DEV_SETTINGS)
except ModuleNotFoundError:
logger.debug("Please copy `const_dev.py` to `const_dev.py` to use custom settings")
else:
settings.init()
# 初始化
reset_log()
setup_logger()
show_info()
download_client = DownloadClient()
download_client.init_downloader()
if settings.rss_link is None:
if settings.rss_parser.link is None:
logger.error("Please add RIGHT RSS url.")
quit()
download_client.rss_feed()
bangumi_data = load_data_file()
# 主程序循环
main_process(bangumi_data, download_client)
if __name__ == "__main__":
run()
main_process(bangumi_data, download_client)

6
module/conf/__init__.py Normal file
View File

@@ -0,0 +1,6 @@
from .config import settings
from .log import setup_logger, LOG_PATH
from .version import VERSION
TMDB_API = "32b19d6a05b512190a056fa4e747cbbc"
DATA_PATH = "data/data.json"

106
module/conf/const.py Normal file
View File

@@ -0,0 +1,106 @@
# -*- encoding: utf-8 -*-
DEFAULT_SETTINGS = {
"program": {
"sleep_time": 7200,
"times": 20,
"webui_port": 7892,
"data_version": 4.0
},
"downloader": {
"type": "qbittorrent",
"host": "127.0.0.1:8080",
"username": "admin",
"password": "adminadmin",
"path": "/downloads/Bangumi",
"ssl": False
},
"rss_parser": {
"enable": True,
"type": "mikan",
"link": "",
"enable_tmdb": False,
"filter": ["720", "\\d+-\\d+"],
"language": "zh"
},
"bangumi_manage": {
"enable": True,
"eps_complete": False,
"rename_method": "pn",
"group_tag": False,
"remove_bad_torrent": False
},
"debug": {
"enable": False,
"level": "info",
"file": "bangumi.log",
"dev_debug": False
},
"proxy": {
"enable": False,
"type": "http",
"host": "",
"port": 1080,
"username": "",
"password": ""
},
"notification": {
"enable": False,
"type": "telegram",
"token": "",
"chat_id": ""
}
}
ENV_TO_ATTR = {
"program": {
"AB_INTERVAL_TIME": ("sleep_time", lambda e: float(e)),
"AB_RENAME_FREQ": ("times", lambda e: float(e)),
"AB_WEBUI_PORT": ("webui_port", lambda e: int(e)),
},
"downloader": {
"AB_DOWNLOADER_HOST": "host",
"AB_DOWNLOADER_USERNAME": "username",
"AB_DOWNLOADER_PASSWORD": "password",
"AB_DOWNLOAD_PATH": "path",
},
"rss_parser": {
"AB_RSS_COLLECTOR": ("enable", lambda e: e.lower() in ("True", "1", "t")),
"AB_RSS": "link",
"AB_NOT_CONTAIN": ("filter", lambda e: e.split("|")),
"AB_LANGUAGE": "language",
"AB_ENABLE_TMDB": ("enable_tmdb", lambda e: e.lower() in ("True", "1", "t")),
},
"bangumi_manage": {
"AB_RENAME": ("enable", lambda e: e.lower() in ("True", "1", "t")),
"AB_METHOD": "method",
"AB_GROUP_TAG": ("group_tag", lambda e: e.lower() in ("True", "1", "t")),
"AB_EP_COMPLETE": ("eps_complete", lambda e: e.lower() in ("True", "1", "t")),
"AB_REMOVE_BAD_BT": ("remove_bad_torrent", lambda e: e.lower() in ("True", "1", "t")),
},
"debug": {
"AB_DEBUG_MODE": ("enable", lambda e: e.lower() in ("True", "1", "t")),
},
"proxy": {
"AB_HTTP_PROXY": "http",
"AB_SOCKS": "socks",
},
}
class BCOLORS:
@staticmethod
def _(color: str, *args: str) -> str:
strings = [str(s) for s in args]
return f"{color}{', '.join(strings)}{BCOLORS.ENDC}"
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKCYAN = "\033[96m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"

View File

@@ -1,9 +1,11 @@
import logging
from autobangumi.conf import settings
from module.conf import settings
LOG_PATH = "data/log.txt"
def setup_logger():
level = logging.DEBUG if settings.debug_mode else logging.INFO
level = logging.DEBUG if settings.debug.enable else logging.INFO
logging.addLevelName(logging.DEBUG, 'DEBUG:')
logging.addLevelName(logging.INFO, 'INFO:')
logging.addLevelName(logging.WARNING, 'WARNING:')
@@ -13,7 +15,7 @@ def setup_logger():
format=LOGGING_FORMAT,
encoding="utf-8",
handlers=[
logging.FileHandler(settings.log_path),
logging.FileHandler(LOG_PATH, encoding="utf-8"),
logging.StreamHandler(),
]
)

View File

@@ -1,10 +1,10 @@
import re
from core import FullSeasonGet, DownloadClient, RSSAnalyser
from utils import json_config
from conf import settings
from module.core import FullSeasonGet, DownloadClient, RSSAnalyser
from module.utils import json_config
from module.conf import DATA_PATH
from ab_decorator import api_failed
from module.ab_decorator import api_failed
class APIProcess:
@@ -31,24 +31,24 @@ class APIProcess:
@staticmethod
def reset_rule():
data = json_config.load(settings.info_path)
data = json_config.load(DATA_PATH)
data["bangumi_info"] = []
json_config.save(settings.info_path, data)
json_config.save(DATA_PATH, data)
return "Success"
@staticmethod
def remove_rule(name):
datas = json_config.load(settings.info_path)["bangumi_info"]
datas = json_config.load(DATA_PATH)["bangumi_info"]
for data in datas:
if re.search(name.lower(), data["title_raw"].lower()):
datas.remove(data)
json_config.save(settings.info_path, datas)
json_config.save(DATA_PATH, datas)
return "Success"
return "Not matched"
@staticmethod
def add_rule(title, season):
data = json_config.load(settings.info_path)
data = json_config.load(DATA_PATH)
extra_data = {
"official_title": title,
"title_raw": title,
@@ -60,13 +60,5 @@ class APIProcess:
"added": False,
}
data["bangumi_info"].append(extra_data)
json_config.save(settings.info_path, data)
return "Success"
if __name__ == '__main__':
from conf.const_dev import DEV_SETTINGS
settings.init(DEV_SETTINGS)
url = "https://mikanani.me/RSS/Bangumi?bangumiId=2621&subgroupid=382"
API = APIProcess()
API.download_collection(url)
json_config.save(DATA_PATH, data)
return "Success"

View File

@@ -2,18 +2,14 @@ import re
import logging
import os
from autobangumi.downloader import getClient
from autobangumi.downloader.exceptions import ConflictError
from module.downloader import getClient
from module.downloader.exceptions import ConflictError
from autobangumi.conf import settings
from module.conf import settings
logger = logging.getLogger(__name__)
DOWNLOADER = settings.DOWNLOADER
DEBUG = settings.DEBUG
class DownloadClient:
def __init__(self):
self.client = getClient()
@@ -26,16 +22,16 @@ class DownloadClient:
"rss_refresh_interval": 30,
}
self.client.prefs_init(prefs=prefs)
if DOWNLOADER["DownloadPath"] == "":
if settings.downloader.download_path == "":
prefs = self.client.get_app_prefs()
DOWNLOADER["DownloadPath"] = os.path.join(prefs["save_path"], "Bangumi")
settings.downloader.path = os.path.join(prefs["save_path"], "Bangumi")
def set_rule(self, info: dict, rss_link):
official_name, raw_name, season, group = info["official_title"], info["title_raw"], info["season"], info["group"]
rule = {
"enable": True,
"mustContain": raw_name,
"mustNotContain": DOWNLOADER["Filter"],
"mustNotContain": "|".join(settings.rss_parser.filter),
"useRegex": True,
"episodeFilter": "",
"smartFilter": False,
@@ -43,47 +39,34 @@ class DownloadClient:
"affectedFeeds": [rss_link],
"ignoreDays": 0,
"lastMatch": "",
"addPaused": DEBUG["Enable"],
"addPaused": settings.debug.dev_debug,
"assignedCategory": "Bangumi",
"savePath": str(
os.path.join(
DOWNLOADER["Path"],
re.sub(settings.rule_name_re, " ", official_name).strip(),
settings.downloader.path,
re.sub(r"\:|\/|\.", " ", official_name).strip(),
f"Season {season}",
)
),
}
rule_name = f"[{group}] {official_name}" if settings.enable_group_tag else official_name
rule_name = f"[{group}] {official_name}" if settings.bangumi_manage.group_tag else official_name
self.client.rss_set_rule(rule_name=f"{rule_name} S{season}", rule_def=rule)
logger.info(f"Add {official_name} Season {season}")
def rss_feed(self):
if not settings.refresh_rss:
if self.client.get_rss_info() == settings.rss_link:
logger.info("RSS Already exists.")
else:
logger.info("No feed exists, start adding feed.")
self.client.rss_add_feed(url=settings.rss_link, item_path="Mikan_RSS")
logger.info("Add RSS Feed successfully.")
# TODO: 定时刷新 RSS
if self.client.get_rss_info() == settings.rss_parser.link:
logger.info("RSS Already exists.")
else:
try:
self.client.rss_remove_item(item_path="Mikan_RSS")
logger.info("Remove RSS Feed successfully.")
except ConflictError:
logger.info("No feed exists, start adding feed.")
try:
self.client.rss_add_feed(url=settings.rss_link, item_path="Mikan_RSS")
logger.info("Add RSS Feed successfully.")
except ConnectionError:
logger.warning("Error with adding RSS Feed.")
except ConflictError:
logger.info("RSS Already exists.")
logger.info("No feed exists, start adding feed.")
self.client.rss_add_feed(url=settings.rss_parser.link, item_path="Mikan_RSS")
logger.info("Add RSS Feed successfully.")
def add_collection_feed(self, rss_link, item_path):
self.client.rss_add_feed(url=rss_link, item_path=item_path)
logger.info("Add RSS Feed successfully.")
def add_rules(self, bangumi_info, rss_link=settings.rss_link):
def add_rules(self, bangumi_info, rss_link=settings.rss_parser.link):
logger.debug("Start adding rules.")
for info in bangumi_info:
if not info["added"]:

View File

@@ -2,8 +2,8 @@ import os.path
import re
import logging
from autobangumi.conf import settings
from autobangumi.network import RequestContent
from module.conf import settings
from module.network import RequestContent
from .download_client import DownloadClient
logger = logging.getLogger(__name__)

View File

@@ -6,8 +6,8 @@ from pathlib import PurePath, PureWindowsPath
from .download_client import DownloadClient
from autobangumi.conf import settings
from autobangumi.parser import TitleParser
from module.conf import settings
from module.parser import TitleParser
logger = logging.getLogger(__name__)
@@ -31,7 +31,7 @@ class Renamer:
@staticmethod
def split_path(path: str):
suffix = os.path.splitext(path)[-1]
path = path.replace(settings.download_path, "")
path = path.replace(settings.downloader.path, "")
path_parts = PurePath(path).parts \
if PurePath(path).name != path \
else PureWindowsPath(path).parts
@@ -63,7 +63,7 @@ class Renamer:
logger.warning("Wrong bangumi path, please check your qbittorrent settings.")
else:
try:
new_name = self._renamer.download_parser(name, folder_name, season, suffix, settings.method)
new_name = self._renamer.download_parser(name, folder_name, season, suffix, settings.bangumi_manage.rename_method)
if path_name != new_name:
self.client.rename_torrent_file(torrent_hash, path_name, new_name)
rename_count += 1
@@ -73,7 +73,7 @@ class Renamer:
logger.warning(f"{path_name} rename failed")
logger.warning(f"Folder name: {folder_name}, Season: {season}, Suffix: {suffix}")
logger.debug(e)
if settings.remove_bad_torrent:
if settings.bangumi_manage.remove_bad_torrent:
self.client.delete_torrent(torrent_hash)
self.print_result(torrent_count, rename_count)
@@ -82,7 +82,7 @@ class Renamer:
for info in recent_info:
torrent_hash = info.hash
_, season, folder_name, _, download_path = self.split_path(info.content_path)
new_path = os.path.join(settings.download_path, folder_name, f"Season {season}")
new_path = os.path.join(settings.downloader.path, folder_name, f"Season {season}")
# print(new_path)
self.client.move_torrent(torrent_hash, new_path)

View File

@@ -1,12 +1,12 @@
import re
import logging
from network import RequestContent
from parser import TitleParser
from module.network import RequestContent
from module.parser import TitleParser
from conf import settings
from module.conf import settings
from core import DownloadClient
from module.core import DownloadClient
logger = logging.getLogger(__name__)
@@ -17,8 +17,8 @@ class RSSAnalyser:
self._request = RequestContent()
def rss_to_datas(self, bangumi_info: list) -> list:
rss_torrents = self._request.get_torrents(settings.rss_link)
self._request.close_session()
rss_torrents = self._request.get_torrents(settings.rss_parser.link)
self._request.close()
for torrent in rss_torrents:
raw_title = torrent.name
extra_add = True
@@ -36,7 +36,7 @@ class RSSAnalyser:
def rss_to_data(self, url) -> dict:
rss_torrents = self._request.get_torrents(url)
self._request.close_session()
self._request.close()
for torrent in rss_torrents:
try:
data = self._title_analyser.return_dict(torrent.name)
@@ -48,7 +48,7 @@ class RSSAnalyser:
logger.info("Start collecting RSS info.")
try:
self.rss_to_datas(bangumi_info)
download_client.add_rules(bangumi_info, rss_link=settings.rss_link)
download_client.add_rules(bangumi_info, rss_link=settings.rss_parser.link)
except Exception as e:
logger.debug(e)
logger.info("Finished")

View File

@@ -0,0 +1,11 @@
from module.conf import settings
def getClient():
host = settings.downloader.host
username = settings.downloader.username
password = settings.downloader.password
# TODO 多下载器支持
# 从 settings 里读取下载器名称,然后返回对应 Client
from module.downloader.qb_downloader import QbDownloader
return QbDownloader(host, username, password)

View File

@@ -4,8 +4,8 @@ import time
from qbittorrentapi import Client, LoginFailed
from qbittorrentapi.exceptions import Conflict409Error
from autobangumi.conf import settings
from autobangumi.ab_decorator import qb_connect_failed_wait
from module.conf import settings
from module.ab_decorator import qb_connect_failed_wait
from .exceptions import ConflictError

View File

@@ -1,6 +1,6 @@
import requests
from autobangumi.conf import settings
from module.conf import settings
class PostNotification:

View File

@@ -7,7 +7,7 @@ import logging
from bs4 import BeautifulSoup
from autobangumi.conf import settings
from module.conf import settings
logger = logging.getLogger(__name__)
@@ -15,16 +15,17 @@ logger = logging.getLogger(__name__)
class RequestURL:
def __init__(self):
self.session = requests.session()
if settings.NETWORK["HTTP"] is not None:
self.session.proxies = {
"https": settings.NETWORK["HTTP"],
"http": settings.NETWORK["HTTP"],
}
elif settings.NETWORK["Socks"] is not None:
socks_info = settings.NETWORK["Socks"].split(",")
socks.set_default_proxy(socks.SOCKS5, addr=socks_info[0], port=int(socks_info[1]), rdns=True,
username=socks_info[2], password=socks_info[3])
socket.socket = socks.socksocket
if settings.proxy.enable:
if settings.proxy.type == "http":
url = f"http://{settings.proxy.host}:{settings.proxy.port}"
self.session.proxies = {
"https": url,
"http": url,
}
elif settings.proxy.type == "socks5":
socks.set_default_proxy(socks.SOCKS5, addr=settings.proxy.host, port=settings.proxy.port, rdns=True,
username=settings.proxy.username, password=settings.proxy.password)
socket.socket = socks.socksocket
self.header = {
"user-agent": "Mozilla/5.0",
"Accept": "application/xml"

View File

@@ -1,4 +1,4 @@
from autobangumi.network import RequestContent
from module.network import RequestContent
class BgmAPI:

View File

@@ -81,7 +81,8 @@ class RawParser:
def name_process(name: str):
name_en, name_zh, name_jp = None, None, None
name = name.strip()
split = re.split("/|\s{2}|-\s{2}", name.replace("仅限港澳台地区", ""))
name = re.sub(r"[(]仅限港澳台地区[)]", "", name)
split = re.split("/|\s{2}|-\s{2}", name)
while "" in split:
split.remove("")
if len(split) == 1:

View File

@@ -2,8 +2,8 @@ import re
import time
from dataclasses import dataclass
from network import RequestContent
from conf import settings
from module.network import RequestContent
from module.conf import TMDB_API
@dataclass
@@ -19,9 +19,9 @@ class TMDBInfo:
class TMDBMatcher:
def __init__(self):
self.search_url = lambda e: \
f"https://api.themoviedb.org/3/search/tv?api_key={settings.tmdb_api}&page=1&query={e}&include_adult=false"
f"https://api.themoviedb.org/3/search/tv?api_key={TMDB_API}&page=1&query={e}&include_adult=false"
self.info_url = lambda e: \
f"https://api.themoviedb.org/3/tv/{e}?api_key={settings.tmdb_api}&language=zh-CN"
f"https://api.themoviedb.org/3/tv/{e}?api_key={TMDB_API}&language=zh-CN"
self._request = RequestContent()
def is_animation(self, tv_id) -> bool:
@@ -70,10 +70,4 @@ class TMDBMatcher:
title_jp = info_content.get("original_name")
title_zh = info_content.get("name")
year_number = info_content.get("first_air_date").split("-")[0]
return TMDBInfo(id, title_jp, title_zh, season, last_season, year_number)
if __name__ == "__main__":
test = "辉夜大小姐"
info = TMDBMatcher().tmdb_search(test)
print(f"{info.title_zh}({info.year_number})")
return TMDBInfo(id, title_jp, title_zh, season, last_season, year_number)

View File

@@ -1,10 +1,10 @@
import logging
from parser.analyser import RawParser, DownloadParser, TMDBMatcher
from conf import settings
from .analyser import RawParser, DownloadParser, TMDBMatcher
from module.conf import settings
logger = logging.getLogger(__name__)
LANGUAGE = settings.rss_parser.language
class TitleParser:
def __init__(self):
@@ -15,10 +15,11 @@ class TitleParser:
def raw_parser(self, raw: str):
return self._raw_parser.analyse(raw)
def download_parser(self, download_raw, folder_name, season, suffix, method=settings.method):
def download_parser(self, download_raw, folder_name, season, suffix, method=settings.bangumi_manage.method):
return self._download_parser.download_rename(download_raw, folder_name, season, suffix, method)
def tmdb_parser(self, title: str, season: int):
official_title, tmdb_season = None, None
try:
tmdb_info = self._tmdb_parser.tmdb_search(title)
@@ -27,9 +28,9 @@ class TitleParser:
logger.debug(e)
logger.warning("Not Matched with TMDB")
return title, season
if settings.language == "zh":
if LANGUAGE == "zh":
official_title = f"{tmdb_info.title_zh} ({tmdb_info.year_number})"
elif settings.language == "jp":
elif LANGUAGE == "jp":
official_title = f"{tmdb_info.title_jp} ({tmdb_info.year_number})"
tmdb_season = tmdb_info.last_season if tmdb_info.last_season else season
official_title = official_title if official_title else title
@@ -40,10 +41,10 @@ class TitleParser:
episode = self.raw_parser(_raw)
title_search = episode.title_zh if episode.title_zh else episode.title_en
title_raw = episode.title_en if episode.title_en else episode.title_zh
if settings.enable_tmdb:
if settings.rss_parser.enable_tmdb:
official_title, _season = self.tmdb_parser(title_search, episode.season)
else:
official_title = title_search if settings.language == "zh" else title_raw
official_title = title_search if LANGUAGE == "zh" else title_raw
_season = episode.season
data = {
"official_title": official_title,
@@ -60,15 +61,4 @@ class TitleParser:
logger.debug(f"RAW:{_raw} >> {episode.title_en}")
return data
except Exception as e:
logger.debug(e)
if __name__ == '__main__':
import re
from conf.const_dev import DEV_SETTINGS
settings.init(DEV_SETTINGS)
T = TitleParser()
raw = "[梦蓝字幕组]New Doraemon 哆啦A梦新番[716][2022.07.23][AVC][10080P][GB_JP]"
season = int(re.search(r"\d{1,2}", "S02").group())
_dict = T.return_dict(raw)
print(_dict)
logger.debug(e)

View File

@@ -1,7 +1,7 @@
import unittest
from random import randrange
from parser.analyser import RawParser
from module.parser.analyser import RawParser
class TestRawParser(unittest.TestCase):