mirror of
https://github.com/EstrellaXD/Auto_Bangumi.git
synced 2026-05-11 10:35:50 +08:00
refactor
- fix rename bug - temp save
This commit is contained in:
2
src/module/core/__init__.py
Normal file
2
src/module/core/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
from .download_client import DownloadClient
|
||||
from .api_func import APIProcess
|
||||
66
src/module/core/api_func.py
Normal file
66
src/module/core/api_func.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import re
|
||||
|
||||
from module.core import DownloadClient
|
||||
from module.manager import FullSeasonGet
|
||||
from module.rss import RSSAnalyser
|
||||
from module.utils import json_config
|
||||
from module.conf import DATA_PATH
|
||||
|
||||
from module.ab_decorator import api_failed
|
||||
|
||||
|
||||
class APIProcess:
|
||||
def __init__(self):
|
||||
self._rss_analyser = RSSAnalyser()
|
||||
self._download_client = DownloadClient()
|
||||
self._full_season_get = FullSeasonGet()
|
||||
|
||||
def link_process(self, link):
|
||||
return self._rss_analyser.rss_to_data(link)
|
||||
|
||||
@api_failed
|
||||
def download_collection(self, link):
|
||||
data = self.link_process(link)
|
||||
self._full_season_get.download_collection(data, link, self._download_client)
|
||||
return data
|
||||
|
||||
@api_failed
|
||||
def add_subscribe(self, link):
|
||||
data = self.link_process(link)
|
||||
self._download_client.add_rss_feed(link, data.get("official_title"))
|
||||
self._download_client.set_rule(data, link)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def reset_rule():
|
||||
data = json_config.load(DATA_PATH)
|
||||
data["bangumi_info"] = []
|
||||
json_config.save(DATA_PATH, data)
|
||||
return "Success"
|
||||
|
||||
@staticmethod
|
||||
def remove_rule(name):
|
||||
datas = json_config.load(DATA_PATH)["bangumi_info"]
|
||||
for data in datas:
|
||||
if re.search(name.lower(), data["title_raw"].lower()):
|
||||
datas.remove(data)
|
||||
json_config.save(DATA_PATH, datas)
|
||||
return "Success"
|
||||
return "Not matched"
|
||||
|
||||
@staticmethod
|
||||
def add_rule(title, season):
|
||||
data = json_config.load(DATA_PATH)
|
||||
extra_data = {
|
||||
"official_title": title,
|
||||
"title_raw": title,
|
||||
"season": season,
|
||||
"season_raw": "",
|
||||
"dpi": "",
|
||||
"group": "",
|
||||
"eps_complete": False,
|
||||
"added": False,
|
||||
}
|
||||
data["bangumi_info"].append(extra_data)
|
||||
json_config.save(DATA_PATH, data)
|
||||
return "Success"
|
||||
116
src/module/core/download_client.py
Normal file
116
src/module/core/download_client.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import re
|
||||
import logging
|
||||
import os
|
||||
|
||||
from module.downloader import getClient
|
||||
|
||||
from module.conf import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadClient:
|
||||
def __init__(self):
|
||||
self.client = getClient()
|
||||
|
||||
def init_downloader(self):
|
||||
prefs = {
|
||||
"rss_auto_downloading_enabled": True,
|
||||
"rss_max_articles_per_feed": 500,
|
||||
"rss_processing_enabled": True,
|
||||
"rss_refresh_interval": 30,
|
||||
}
|
||||
self.client.prefs_init(prefs=prefs)
|
||||
if settings.downloader.download_path == "":
|
||||
prefs = self.client.get_app_prefs()
|
||||
settings.downloader.path = os.path.join(prefs["save_path"], "Bangumi")
|
||||
|
||||
def set_rule(self, info: dict, rss_link):
|
||||
official_name, raw_name, season, group = info["official_title"], info["title_raw"], info["season"], info["group"]
|
||||
rule = {
|
||||
"enable": True,
|
||||
"mustContain": raw_name,
|
||||
"mustNotContain": "|".join(settings.rss_parser.filter),
|
||||
"useRegex": True,
|
||||
"episodeFilter": "",
|
||||
"smartFilter": False,
|
||||
"previouslyMatchedEpisodes": [],
|
||||
"affectedFeeds": [rss_link],
|
||||
"ignoreDays": 0,
|
||||
"lastMatch": "",
|
||||
"addPaused": settings.debug.dev_debug,
|
||||
"assignedCategory": "Bangumi",
|
||||
"savePath": str(
|
||||
os.path.join(
|
||||
settings.downloader.path,
|
||||
re.sub(r"[:/.]", " ", official_name).strip(),
|
||||
f"Season {season}",
|
||||
)
|
||||
),
|
||||
}
|
||||
rule_name = f"[{group}] {official_name}" if settings.bangumi_manage.group_tag else official_name
|
||||
self.client.rss_set_rule(rule_name=f"{rule_name} S{season}", rule_def=rule)
|
||||
logger.info(f"Add {official_name} Season {season}")
|
||||
|
||||
def rss_feed(self):
|
||||
# TODO: 定时刷新 RSS
|
||||
if self.client.get_rss_info() == settings.rss_parser.link:
|
||||
logger.info("RSS Already exists.")
|
||||
else:
|
||||
logger.info("No feed exists, start adding feed.")
|
||||
self.client.rss_add_feed(url=settings.rss_parser.link, item_path="Mikan_RSS")
|
||||
logger.info("Add RSS Feed successfully.")
|
||||
|
||||
def add_collection_feed(self, rss_link, item_path):
|
||||
self.client.rss_add_feed(url=rss_link, item_path=item_path)
|
||||
logger.info("Add RSS Feed successfully.")
|
||||
|
||||
def add_rules(self, bangumi_info, rss_link=settings.rss_parser.link):
|
||||
logger.debug("Start adding rules.")
|
||||
for info in bangumi_info:
|
||||
if not info["added"]:
|
||||
self.set_rule(info, rss_link)
|
||||
info["added"] = True
|
||||
# logger.info("to rule.")
|
||||
logger.debug("Finished.")
|
||||
|
||||
def get_torrent_info(self):
|
||||
return self.client.torrents_info(
|
||||
status_filter="completed", category="Bangumi"
|
||||
)
|
||||
|
||||
def rename_torrent_file(self, hash, new_file_name, old_path, new_path):
|
||||
self.client.torrents_rename_file(
|
||||
torrent_hash=hash, new_file_name=new_file_name, old_path=old_path, new_path=new_path
|
||||
)
|
||||
logger.info(f"{old_path} >> {new_path}, new name {new_file_name}")
|
||||
|
||||
def delete_torrent(self, hashes):
|
||||
self.client.torrents_delete(
|
||||
hashes
|
||||
)
|
||||
logger.info(f"Remove bad torrents.")
|
||||
|
||||
def add_torrent(self, torrent: dict):
|
||||
self.client.torrents_add(
|
||||
urls=torrent["url"],
|
||||
save_path=torrent["save_path"],
|
||||
category="Bangumi"
|
||||
)
|
||||
|
||||
def move_torrent(self, hashes, location):
|
||||
self.client.move_torrent(
|
||||
hashes=hashes,
|
||||
new_location=location
|
||||
)
|
||||
|
||||
def add_rss_feed(self, rss_link, item_path):
|
||||
self.client.rss_add_feed(url=rss_link, item_path=item_path)
|
||||
logger.info("Add RSS Feed successfully.")
|
||||
|
||||
def get_download_rules(self):
|
||||
return self.client.get_download_rule()
|
||||
|
||||
def get_torrent_path(self, hashes):
|
||||
return self.client.get_torrent_path(hashes)
|
||||
|
||||
26
src/module/core/download_fliter.py
Normal file
26
src/module/core/download_fliter.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import re
|
||||
import logging
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from module.conf import settings
|
||||
from module.utils import json_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RSSFilter:
|
||||
def __init__(self):
|
||||
self.filter_rule = json_config.load(settings.filter_rule)
|
||||
|
||||
def filter(self, item: BeautifulSoup):
|
||||
title = item.title.string
|
||||
torrent = item.find("enclosure")
|
||||
download = False
|
||||
for rule in self.filter_rule:
|
||||
if re.search(rule["include"], title):
|
||||
if not re.search(rule["exclude"], title):
|
||||
download = True
|
||||
logger.debug(f"{title} added")
|
||||
return download, torrent
|
||||
|
||||
Reference in New Issue
Block a user