diff --git a/README.md b/README.md index 27f99ea3..c26612e0 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@
-
-
+
+
diff --git a/src/main.py b/src/main.py
index b5e5d620..435389d2 100644
--- a/src/main.py
+++ b/src/main.py
@@ -16,7 +16,7 @@ from module.conf import VERSION, settings
logger = logging.getLogger(__name__)
-main_process = multiprocessing.Process(target=app.run)
+main_process = multiprocessing.Process(target=app.run, args=(settings,))
@router.get("/api/v1/restart", tags=["program"])
@@ -27,7 +27,8 @@ async def restart():
logger.info("Restarting...")
else:
logger.info("Starting...")
- main_process = multiprocessing.Process(target=app.run)
+ settings.reload()
+ main_process = multiprocessing.Process(target=app.run, args=(settings,))
main_process.start()
logger.info("Restarted")
return {"status": "success"}
@@ -50,7 +51,8 @@ async def start():
if main_process.is_alive():
return {"status": "failed", "reason": "Already started"}
logger.info("Starting...")
- main_process = multiprocessing.Process(target=app.run)
+ settings.reload()
+ main_process = multiprocessing.Process(target=app.run, args=(settings,))
main_process.start()
logger.info("Started")
return {"status": "success"}
diff --git a/src/module/api.py b/src/module/api.py
index b0664dd5..48c66129 100644
--- a/src/module/api.py
+++ b/src/module/api.py
@@ -12,7 +12,7 @@ from module.models import Config
router = FastAPI()
-api_func = APIProcess()
+api_func = APIProcess(settings)
@router.on_event("startup")
diff --git a/src/module/app.py b/src/module/app.py
index e6f1c1ee..34540dd9 100644
--- a/src/module/app.py
+++ b/src/module/app.py
@@ -2,13 +2,13 @@ import os
import time
import logging
-from module.conf import settings, setup_logger, LOG_PATH, DATA_PATH, RSSLink, VERSION
+from module.conf import setup_logger, LOG_PATH, DATA_PATH, RSSLink, VERSION
from module.utils import load_program_data, save_program_data
from module.core import DownloadClient
from module.manager import Renamer, FullSeasonGet
from module.rss import RSSAnalyser
-from module.models import ProgramData
+from module.models import ProgramData, Config
logger = logging.getLogger(__name__)
@@ -19,10 +19,10 @@ def reset_log():
os.remove(LOG_PATH)
-def load_data_file(rss_link: str) -> ProgramData:
+def load_data_file(rss_link: str, data_version) -> ProgramData:
empty_data = ProgramData(
rss_link=rss_link,
- data_version=settings.data_version,
+ data_version=data_version,
)
if not os.path.exists(DATA_PATH):
program_data = empty_data
@@ -30,28 +30,29 @@ def load_data_file(rss_link: str) -> ProgramData:
logger.info("Building data information...")
else:
program_data = load_program_data(DATA_PATH)
- if program_data.rss_link != rss_link or program_data.data_version != settings.data_version:
+ if program_data.rss_link != rss_link or program_data.data_version != data_version:
program_data = empty_data
logger.info("Rebuilding data information...")
return program_data
-def main_process(program_data: ProgramData, download_client: DownloadClient):
- rename = Renamer(download_client, settings)
- rss_analyser = RSSAnalyser(settings)
+def main_process(program_data: ProgramData, download_client: DownloadClient, _settings: Config):
+ rename = Renamer(download_client, _settings)
+ rss_analyser = RSSAnalyser(_settings)
while True:
times = 0
- if settings.rss_parser.enable:
- rss_analyser.run(program_data.bangumi_info, download_client, program_data.rss_link)
- if settings.bangumi_manage.eps_complete and program_data.bangumi_info != []:
- FullSeasonGet().eps_complete(program_data.bangumi_info, download_client)
+ if _settings.rss_parser.enable:
+ rss_analyser.run(program_data.bangumi_info, program_data.rss_link)
+ download_client.add_rules(program_data.bangumi_info, program_data.rss_link)
+ if _settings.bangumi_manage.eps_complete and program_data.bangumi_info != []:
+ FullSeasonGet(settings=_settings).eps_complete(program_data.bangumi_info, download_client)
logger.info("Running....")
save_program_data(DATA_PATH, program_data)
- while times < settings.program.rename_times:
- if settings.bangumi_manage.enable:
+ while times < _settings.program.rename_times:
+ if _settings.bangumi_manage.enable:
rename.rename()
times += 1
- time.sleep(settings.program.sleep_time / settings.program.rename_times)
+ time.sleep(_settings.program.sleep_time / _settings.program.rename_times)
def show_info():
@@ -63,9 +64,8 @@ def show_info():
logger.info("Starting AutoBangumi...")
-def run():
+def run(settings: Config):
# 初始化
- settings.reload()
rss_link = RSSLink()
reset_log()
setup_logger()
@@ -73,10 +73,10 @@ def run():
if settings.rss_parser.token in ["", "token", None]:
logger.error("Please set your RSS token in config file.")
exit(1)
- download_client = DownloadClient()
+ download_client = DownloadClient(settings)
download_client.auth()
download_client.init_downloader()
download_client.rss_feed(rss_link)
- bangumi_data = load_data_file(rss_link)
+ bangumi_data = load_data_file(rss_link, settings.data_version)
# 主程序循环
- main_process(bangumi_data, download_client)
+ main_process(bangumi_data, download_client, settings)
diff --git a/src/module/core/api_func.py b/src/module/core/api_func.py
index 4374cfe9..071fe083 100644
--- a/src/module/core/api_func.py
+++ b/src/module/core/api_func.py
@@ -5,7 +5,7 @@ from module.core import DownloadClient
from module.manager import FullSeasonGet
from module.rss import RSSAnalyser
from module.utils import json_config
-from module.conf import DATA_PATH, settings
+from module.conf import DATA_PATH
from module.conf.config import save_config_to_file, CONFIG_PATH
from module.models import Config
from module.network import RequestContent
@@ -16,13 +16,14 @@ logger = logging.getLogger(__name__)
class APIProcess:
- def __init__(self):
- self._rss_analyser = RSSAnalyser()
- self._client = DownloadClient()
- self._full_season_get = FullSeasonGet()
+ def __init__(self, settings: Config):
+ self._rss_analyser = RSSAnalyser(settings)
+ self._client = DownloadClient(settings)
+ self._full_season_get = FullSeasonGet(settings)
+ self._custom_url = settings.rss_parser.custom_url
def link_process(self, link):
- return self._rss_analyser.rss_to_data(link, filter=False)
+ return self._rss_analyser.rss_to_data(link, _filter=False)
@api_failed
def download_collection(self, link):
@@ -85,10 +86,9 @@ class APIProcess:
def get_config() -> dict:
return json_config.load(CONFIG_PATH)
- @staticmethod
- def get_rss(full_path: str):
+ def get_rss(self, full_path: str):
url = f"https://mikanani.me/RSS/{full_path}"
- custom_url = settings.rss_parser.custom_url
+ custom_url = self._custom_url
if "://" not in custom_url:
custom_url = f"https://{custom_url}"
with RequestContent() as request:
diff --git a/src/module/core/download_client.py b/src/module/core/download_client.py
index eb4c376a..2d05a111 100644
--- a/src/module/core/download_client.py
+++ b/src/module/core/download_client.py
@@ -3,25 +3,22 @@ import logging
import os
from module.downloader import getClient
-from module.conf import settings
-from module.models import BangumiData
+from module.models import BangumiData, Config
logger = logging.getLogger(__name__)
class DownloadClient:
- def __init__(self):
- self.client = getClient()
+ def __init__(self, settings: Config):
+ self.client = getClient(settings)
self.authed = False
+ self.download_path = settings.downloader.path
+ self.group_tag = settings.bangumi_manage.group_tag
def auth(self):
- host, username, password = settings.downloader.host, settings.downloader.username, settings.downloader.password
- try:
- self.client.auth(host, username, password)
- self.authed = True
- except Exception as e:
- logger.error(f"Can't login {host} by {username}, {e}")
+ self.client.auth()
+ self.authed = True
def init_downloader(self):
prefs = {
@@ -36,16 +33,16 @@ class DownloadClient:
except Exception as e:
logger.warning("Cannot add new category, maybe already exists.")
logger.debug(e)
- if settings.downloader.path == "":
+ if self.download_path == "":
prefs = self.client.get_app_prefs()
- settings.downloader.path = os.path.join(prefs["save_path"], "Bangumi")
+ self.download_path = os.path.join(prefs["save_path"], "Bangumi")
def set_rule(self, info: BangumiData, rss_link):
official_name, raw_name, season, group = info.official_title, info.title_raw, info.season, info.group
rule = {
"enable": True,
"mustContain": raw_name,
- "mustNotContain": "|".join(settings.rss_parser.filter),
+ "mustNotContain": "|".join(info.filter),
"useRegex": True,
"episodeFilter": "",
"smartFilter": False,
@@ -57,28 +54,23 @@ class DownloadClient:
"assignedCategory": "Bangumi",
"savePath": str(
os.path.join(
- settings.downloader.path,
+ self.download_path,
re.sub(r"[:/.]", " ", official_name).strip(),
f"Season {season}",
)
),
}
- rule_name = f"[{group}] {official_name}" if settings.bangumi_manage.group_tag else official_name
+ rule_name = f"[{group}] {official_name}" if self.group_tag else official_name
self.client.rss_set_rule(rule_name=f"{rule_name} S{season}", rule_def=rule)
logger.info(f"Add {official_name} Season {season}")
def rss_feed(self, rss_link, item_path="Mikan_RSS"):
# TODO: 定时刷新 RSS
- if self.client.get_rss_info(rss_link):
- logger.info("RSS Already exists.")
- else:
- logger.info("No feed exists, start adding feed.")
- self.client.rss_add_feed(url=rss_link, item_path="Mikan_RSS")
- logger.info("Add RSS Feed successfully.")
+ self.client.rss_add_feed(url=rss_link, item_path=item_path)
def add_collection_feed(self, rss_link, item_path):
self.client.rss_add_feed(url=rss_link, item_path=item_path)
- logger.info("Add RSS Feed successfully.")
+ logger.info("Add Collection RSS Feed successfully.")
def add_rules(self, bangumi_info: list[BangumiData], rss_link: str):
logger.debug("Start adding rules.")
diff --git a/src/module/downloader/__init__.py b/src/module/downloader/__init__.py
index a7612e6c..b7b33989 100644
--- a/src/module/downloader/__init__.py
+++ b/src/module/downloader/__init__.py
@@ -1,11 +1,15 @@
-from module.conf import settings
+from module.models import Config
-def getClient():
+def getClient(settings: Config):
# TODO 多下载器支持
- # 从 settings 里读取下载器名称,然后返回对应 Client
- if settings.downloader.type == "qbittorrent":
+ type = settings.downloader.type
+ host = settings.downloader.host
+ username = settings.downloader.username
+ password = settings.downloader.password
+ ssl = settings.downloader.ssl
+ if type == "qbittorrent":
from .qb_downloader import QbDownloader
- return QbDownloader()
+ return QbDownloader(host, username, password, ssl)
else:
- raise Exception(f"Unsupported downloader type: {settings.downloader.type}")
+ raise Exception(f"Unsupported downloader type: {type}")
diff --git a/src/module/downloader/qb_downloader.py b/src/module/downloader/qb_downloader.py
index d25ee2e6..9074c916 100644
--- a/src/module/downloader/qb_downloader.py
+++ b/src/module/downloader/qb_downloader.py
@@ -4,33 +4,32 @@ import time
from qbittorrentapi import Client, LoginFailed
from qbittorrentapi.exceptions import Conflict409Error
-from module.conf import settings
from module.ab_decorator import qb_connect_failed_wait
-
from module.downloader.exceptions import ConflictError
logger = logging.getLogger(__name__)
class QbDownloader:
- def __init__(self):
- self._client: Client | None = None
-
- @qb_connect_failed_wait
- def auth(self, host, username, password):
- self._client = Client(
+ def __init__(self, host: str, username: str, password: str, ssl: bool):
+ self._client: Client = Client(
host=host,
username=username,
password=password,
- VERIFY_WEBUI_CERTIFICATE=settings.downloader.ssl
+ VERIFY_WEBUI_CERTIFICATE=ssl
)
+ self.host = host
+ self.username = username
+
+ @qb_connect_failed_wait
+ def auth(self):
while True:
try:
self._client.auth_log_in()
break
except LoginFailed:
- logger.warning(
- f"Can't login qBittorrent Server {host} by {username}, retry in {5} seconds."
+ logger.error(
+ f"Can't login qBittorrent Server {self.host} by {self.username}, retry in {5} seconds."
)
time.sleep(5)
@@ -66,21 +65,35 @@ class QbDownloader:
def torrents_rename_file(self, torrent_hash, old_path, new_path):
self._client.torrents_rename_file(torrent_hash=torrent_hash, old_path=old_path, new_path=new_path)
- def get_rss_info(self, url) -> str | None:
+ def check_rss(self, url, item_path) -> tuple[str | None, bool]:
items = self._client.rss_items()
- for item in items.items():
- if item[1].url == url:
- return item[0]
- return None
+ for key, value in items.items():
+ rss_url = value.get("url")
+ if key == item_path:
+ if rss_url != url:
+ return key, False
+ return None, True
+ else:
+ if rss_url == url:
+ return key, True
+ return None, False
def rss_add_feed(self, url, item_path):
- try:
- path = self.get_rss_info(url)
- if path:
- self.rss_remove_item(path)
- self._client.rss_add_feed(url, item_path)
- except Conflict409Error:
- logger.exception("RSS Exist.")
+ path, added = self.check_rss(url, item_path)
+ if path:
+ if not added:
+ logger.info("RSS Exist, Update URL.")
+ self._client.rss_remove_item(path)
+ self._client.rss_add_feed(url, item_path)
+ else:
+ logger.info("RSS Exist.")
+ else:
+ if added:
+ logger.info("RSS Exist.")
+ else:
+ logger.info("Add new RSS")
+ self._client.rss_add_feed(url, item_path)
+ logger.info("Successfully added RSS")
def rss_remove_item(self, item_path):
try:
diff --git a/src/module/manager/eps_complete.py b/src/module/manager/eps_complete.py
index 5f3cf412..1ebb6794 100644
--- a/src/module/manager/eps_complete.py
+++ b/src/module/manager/eps_complete.py
@@ -2,29 +2,27 @@ import os.path
import re
import logging
-from module.conf import settings
from module.network import RequestContent
from module.core import DownloadClient
-from module.models import BangumiData
+from module.models import BangumiData, Config
logger = logging.getLogger(__name__)
-SEARCH_KEY = ["group", "title_raw", "season_raw", "subtitle", "source", "dpi"]
-CUSTOM_URL = "https://mikanani.me" if settings.rss_parser.custom_url == "" else settings.rss_parser.custom_url
-if "://" not in CUSTOM_URL:
- if re.match(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", CUSTOM_URL):
- CUSTOM_URL = f"http://{CUSTOM_URL}"
- CUSTOM_URL = f"https://{CUSTOM_URL}"
class FullSeasonGet:
- def __init__(self):
- pass
+ def __init__(self, settings: Config):
+ self.SEARCH_KEY = ["group", "title_raw", "season_raw", "subtitle", "source", "dpi"]
+ self.CUSTOM_URL = "https://mikanani.me" if settings.rss_parser.custom_url == "" else settings.rss_parser.custom_url
+ if "://" not in self.CUSTOM_URL:
+ if re.match(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", self.CUSTOM_URL):
+ self.CUSTOM_URL = f"http://{self.CUSTOM_URL}"
+ self.CUSTOM_URL = f"https://{self.CUSTOM_URL}"
+ self.save_path = settings.downloader.path
- @staticmethod
- def init_eps_complete_search_str(data: BangumiData):
+ def init_eps_complete_search_str(self, data: BangumiData):
test = []
- for key in SEARCH_KEY:
+ for key in self.SEARCH_KEY:
data_dict = data.dict()
if data_dict[key] is not None:
test.append(data_dict[key])
@@ -35,17 +33,16 @@ class FullSeasonGet:
def get_season_torrents(self, data: BangumiData):
keyword = self.init_eps_complete_search_str(data)
with RequestContent() as req:
- torrents = req.get_torrents(f"{CUSTOM_URL}/RSS/Search?searchstr={keyword}")
+ torrents = req.get_torrents(f"{self.CUSTOM_URL}/RSS/Search?searchstr={keyword}")
return [torrent for torrent in torrents if data.title_raw in torrent.name]
- @staticmethod
- def collect_season_torrents(data: BangumiData, torrents):
+ def collect_season_torrents(self, data: BangumiData, torrents):
downloads = []
for torrent in torrents:
download_info = {
"url": torrent.torrent_link,
"save_path": os.path.join(
- settings.downloader.path,
+ self.save_path,
data.official_title,
f"Season {data.season}")
}
diff --git a/src/module/rss/__init__.py b/src/module/rss/__init__.py
index 9650cb6c..f7c3f4ee 100644
--- a/src/module/rss/__init__.py
+++ b/src/module/rss/__init__.py
@@ -1 +1 @@
-from .rss_analyser import RSSAnalyser
\ No newline at end of file
+from .rss_analyser import RSSAnalyser
diff --git a/src/module/rss/rss_analyser.py b/src/module/rss/rss_analyser.py
index 531b91a1..b1174cbe 100644
--- a/src/module/rss/rss_analyser.py
+++ b/src/module/rss/rss_analyser.py
@@ -46,9 +46,9 @@ class RSSAnalyser:
bangumi_info.append(data)
return bangumi_info
- def rss_to_data(self, url, filter: bool = True) -> BangumiData:
+ def rss_to_data(self, url, _filter: bool = True) -> BangumiData:
with RequestContent() as req:
- rss_torrents = req.get_torrents(url, filter)
+ rss_torrents = req.get_torrents(url, _filter)
for torrent in rss_torrents:
try:
data = self._title_analyser.raw_parser(
@@ -59,11 +59,10 @@ class RSSAnalyser:
except Exception as e:
logger.debug(e)
- def run(self, bangumi_info: list[BangumiData], download_client: DownloadClient, rss_link: str):
+ def run(self, bangumi_info: list[BangumiData], rss_link: str):
logger.info("Start collecting RSS info.")
try:
self.rss_to_datas(bangumi_info, rss_link)
- download_client.add_rules(bangumi_info, rss_link=rss_link)
except Exception as e:
logger.debug(e)
logger.info("Finished")
diff --git a/src/test/test_eps_complete.py b/src/test/test_eps_complete.py
new file mode 100644
index 00000000..7cb6cc30
--- /dev/null
+++ b/src/test/test_eps_complete.py
@@ -0,0 +1,9 @@
+from module.core.api_func import FullSeasonGet
+from module.models import Config
+
+
+def test_full_season_get():
+ settings = Config()
+ fsg = FullSeasonGet(settings)
+
+ fsg.get_season_torrents()
\ No newline at end of file
diff --git a/src/test/test_rss_parser.py b/src/test/test_rss_parser.py
new file mode 100644
index 00000000..3aaf4c6a
--- /dev/null
+++ b/src/test/test_rss_parser.py
@@ -0,0 +1,14 @@
+from module.models import Config
+from module.core.api_func import RSSAnalyser
+
+
+def test_rss_analyser():
+ settings = Config()
+ rss_analyser = RSSAnalyser(settings)
+ url = "https://mikanani.me/RSS/Bangumi?bangumiId=2966&subgroupid=552"
+
+ data = rss_analyser.rss_to_data(url=url)
+
+ assert data.title_raw == "Yamada-kun to Lv999 no Koi wo Suru"
+ assert data.official_title == "和山田谈场 Lv999 的恋爱"
+ assert data.season == 1
\ No newline at end of file