fix: fix bugs.

This commit is contained in:
EstrellaXD
2023-08-06 16:04:38 +08:00
parent 94579d6b12
commit d768299c7f
16 changed files with 108 additions and 119 deletions

View File

@@ -2,11 +2,11 @@ import threading
import time
from module.conf import settings
from module.database import BangumiDatabase
from module.database import Database
from module.downloader import DownloadClient
from module.manager import Renamer, eps_complete
from module.notification import PostNotification
from module.rss import analyser
from module.rss import RSSAnalyser, RSSEngine
from .status import ProgramStatus
@@ -17,21 +17,19 @@ class RSSThread(ProgramStatus):
self._rss_thread = threading.Thread(
target=self.rss_loop,
)
self.analyser = RSSAnalyser()
def rss_loop(self):
with DownloadClient() as client:
client.init_downloader()
while not self.stop_event.is_set():
# Analyse RSS
with BangumiDatabase() as db:
new_data = analyser.rss_to_data(rss_link=settings.rss_link, database=db)
if new_data:
db.insert_list(new_data)
bangumi_list = db.not_added()
if bangumi_list:
with DownloadClient() as client:
client.set_rules(bangumi_list)
db.update_list(bangumi_list)
with DownloadClient() as client, RSSEngine() as engine:
# Analyse RSS
rss_list = engine.rss.search_combine()
for rss in rss_list:
self.analyser.rss_to_data(rss_link=rss.url, engine=engine)
# Run RSS Engine
engine.run(client)
if settings.bangumi_manage.eps_complete:
eps_complete()
self.stop_event.wait(settings.program.rss_time)

View File

@@ -3,10 +3,11 @@ from sqlmodel import Session, SQLModel
from .rss import RSSDatabase
from .torrent import TorrentDatabase
from .bangumi import BangumiDatabase
from .engine import engine as e
class Database(Session):
def __init__(self, engine):
def __init__(self, engine=e):
self.engine = engine
super().__init__(engine)
self.rss = RSSDatabase(self)

View File

@@ -36,6 +36,9 @@ class RSSDatabase:
def search_active(self) -> list[RSSItem]:
return self.session.exec(select(RSSItem).where(RSSItem.enabled)).all()
def search_combine(self) -> list[RSSItem]:
return self.session.exec(select(RSSItem).where(RSSItem.combine)).all()
def delete(self, _id: int):
condition = delete(RSSItem).where(RSSItem.id == _id)
self.session.exec(condition)

View File

@@ -82,10 +82,9 @@ class QbDownloader:
status_filter=status_filter, category=category, tag=tag
)
def torrents_add(self, urls, save_path, category, torrent_files=None):
def torrents_add(self, torrent_files, save_path, category):
resp = self._client.torrents_add(
is_paused=False,
urls=urls,
torrent_files=torrent_files,
save_path=save_path,
category=category,

View File

@@ -1,7 +1,8 @@
import logging
from module.conf import settings
from module.models import Bangumi
from module.models import Bangumi, Torrent
from module.network import RequestContent, TorrentInfo
from .path import TorrentPath
@@ -113,17 +114,25 @@ class DownloadClient(TorrentPath):
self.client.torrents_delete(hashes)
logger.info("[Downloader] Remove torrents.")
def add_torrent(self, torrent: dict):
def add_torrent(
self, torrent: Torrent | TorrentInfo | list, bangumi: Bangumi
) -> bool:
if not bangumi.save_path:
bangumi.save_path = self._gen_save_path(bangumi)
with RequestContent() as req:
if isinstance(torrent, list):
torrent_file = [req.get_content(t.url) for t in torrent]
else:
torrent_file = req.get_content(torrent.url)
if self.client.torrents_add(
urls=torrent.get("urls"),
torrent_files=torrent.get("torrent_files"),
save_path=torrent.get("save_path"),
torrent_files=torrent_file,
save_path=bangumi.save_path,
category="Bangumi",
):
logger.debug(f"[Downloader] Add torrent: {torrent.get('save_path')}")
logger.debug(f"[Downloader] Add torrent: {bangumi.official_title}")
return True
else:
logger.error(f"[Downloader] Add torrent failed: {torrent.get('save_path')}")
logger.error(f"[Downloader] Add torrent failed: {bangumi.official_title}")
return False
def move_torrent(self, hashes, location):

View File

@@ -1,56 +1,43 @@
import logging
from module.database import BangumiDatabase
from module.downloader import DownloadClient
from module.models import Bangumi
from module.searcher import SearchTorrent
from module.rss import RSSEngine
logger = logging.getLogger(__name__)
class SeasonCollector(DownloadClient):
def add_season_torrents(self, data: Bangumi, torrents, torrent_files=None):
if torrent_files:
download_info = {
"torrent_files": torrent_files,
"save_path": self._gen_save_path(data),
}
return self.add_torrent(download_info)
else:
download_info = {
"urls": [torrent.torrent_link for torrent in torrents],
"save_path": self._gen_save_path(data),
}
return self.add_torrent(download_info)
def add_season_torrents(self, bangumi: Bangumi, torrents: list):
return self.add_torrent(bangumi=bangumi, torrent=torrents)
def collect_season(self, data: Bangumi, link: str = None, proxy: bool = False):
logger.info(f"Start collecting {data.official_title} Season {data.season}...")
def collect_season(self, bangumi: Bangumi, link: str = None):
logger.info(
f"Start collecting {bangumi.official_title} Season {bangumi.season}..."
)
with SearchTorrent() as st:
if not link:
torrents = st.search_season(data)
torrents = st.search_season(bangumi)
else:
torrents = st.get_torrents(link, _filter="|".join(data.filter))
torrents = st.get_torrents(link, _filter="|".join(bangumi.filter))
torrent_files = None
if proxy:
torrent_files = [
st.get_content(torrent.torrent_link) for torrent in torrents
]
return self.add_season_torrents(
data=data, torrents=torrents, torrent_files=torrent_files
)
return self.add_season_torrents(bangumi=bangumi, torrents=torrents)
def subscribe_season(self, data: Bangumi):
with BangumiDatabase() as db:
@staticmethod
def subscribe_season(data: Bangumi):
with RSSEngine() as engine:
data.added = True
data.eps_collect = True
self.set_rule(data)
db.insert(data)
self.add_rss_feed(data.rss_link[0], item_path=data.official_title)
engine.add_rss(
rss_link=data.rss_link, name=data.official_title, combine=False
)
engine.bangumi.add(data)
def eps_complete():
with BangumiDatabase() as bd:
datas = bd.not_complete()
with RSSEngine() as engine:
datas = engine.bangumi.not_complete()
if datas:
logger.info("Start collecting full season...")
for data in datas:
@@ -58,4 +45,4 @@ def eps_complete():
with SeasonCollector() as sc:
sc.collect_season(data)
data.eps_collect = True
bd.update_list(datas)
engine.bangumi.update_all(datas)

View File

@@ -2,14 +2,14 @@ import logging
from fastapi.responses import JSONResponse
from module.database import BangumiDatabase
from module.database import Database
from module.downloader import DownloadClient
from module.models import Bangumi
logger = logging.getLogger(__name__)
class TorrentManager(BangumiDatabase):
class TorrentManager(Database):
@staticmethod
def __match_torrents_list(data: Bangumi) -> list:
with DownloadClient() as client:
@@ -28,12 +28,12 @@ class TorrentManager(BangumiDatabase):
return f"Can't find {data.official_title} torrents."
def delete_rule(self, _id: int | str, file: bool = False):
data = self.search_id(int(_id))
data = self.bangumi.search_id(int(_id))
if isinstance(data, Bangumi):
with DownloadClient() as client:
client.remove_rule(data.rule_name)
client.remove_rss_feed(data.official_title)
self.delete_one(int(_id))
self.bangumi.delete_one(int(_id))
if file:
torrent_message = self.delete_torrents(data, client)
return JSONResponse(
@@ -53,12 +53,12 @@ class TorrentManager(BangumiDatabase):
)
def disable_rule(self, _id: str | int, file: bool = False):
data = self.search_id(int(_id))
data = self.bangumi.search_id(int(_id))
if isinstance(data, Bangumi):
with DownloadClient() as client:
client.remove_rule(data.rule_name)
data.deleted = True
self.update_one(data)
self.bangumi.update(data)
if file:
torrent_message = self.delete_torrents(data, client)
return JSONResponse(
@@ -80,10 +80,10 @@ class TorrentManager(BangumiDatabase):
)
def enable_rule(self, _id: str | int):
data = self.search_id(int(_id))
data = self.bangumi.search(int(_id))
if isinstance(data, Bangumi):
data.deleted = False
self.update_one(data)
self.bangumi.update(data)
with DownloadClient() as client:
client.set_rule(data)
logger.info(f"[Manager] Enable rule for {data.official_title}")
@@ -99,7 +99,7 @@ class TorrentManager(BangumiDatabase):
)
def update_rule(self, data: Bangumi):
old_data = self.search_id(data.id)
old_data = self.bangumi.search_id(data.id)
if not old_data:
logger.error(f"[Manager] Can't find data with {data.id}")
return JSONResponse(
@@ -115,7 +115,7 @@ class TorrentManager(BangumiDatabase):
# Set new download rule
client.remove_rule(data.rule_name)
client.set_rule(data)
self.update_one(data)
self.bangumi.update(data)
return JSONResponse(
status_code=200,
content={
@@ -124,13 +124,13 @@ class TorrentManager(BangumiDatabase):
)
def search_all_bangumi(self):
datas = self.search_all()
datas = self.bangumi.search_all()
if not datas:
return []
return [data for data in datas if not data.deleted]
def search_one(self, _id: int | str):
data = self.search_id(int(_id))
data = self.bangumi.search_id(int(_id))
if not data:
logger.error(f"[Manager] Can't find data with {_id}")
return {"status": "error", "msg": f"Can't find data with {_id}"}

View File

@@ -1,4 +1,4 @@
from .bangumi import Bangumi, Episode, BangumiUpdate
from .bangumi import Bangumi, Episode, BangumiUpdate, Notification
from .config import Config
from .rss import RSSItem, RSSUpdate
from .torrent import EpisodeFile, SubtitleFile, Torrent, TorrentUpdate

View File

@@ -13,7 +13,7 @@ from .site import mikan_parser
@dataclass
class TorrentInfo:
name: str
torrent_link: str
url: str
homepage: str
_poster_link: str | None = None
_official_title: str | None = None
@@ -52,9 +52,7 @@ class RequestContent(RequestURL):
):
if re.search(_filter, _title) is None:
torrents.append(
TorrentInfo(
name=_title, torrent_link=torrent_url, homepage=homepage
)
TorrentInfo(name=_title, url=torrent_url, homepage=homepage)
)
return torrents
except ConnectionError:

View File

@@ -1,7 +1,7 @@
import logging
from module.conf import settings
from module.database import BangumiDatabase
from module.database import Database
from module.models import Notification
from .plugin import (
@@ -36,8 +36,8 @@ class PostNotification:
@staticmethod
def _get_poster(notify: Notification):
with BangumiDatabase() as db:
poster_path = db.match_poster(notify.official_title)
with Database() as db:
poster_path = db.bangumi.match_poster(notify.official_title)
if poster_path:
poster_link = "https://mikanani.me" + poster_path
else:

View File

@@ -39,7 +39,7 @@ class TitleParser:
return official_title, tmdb_season, year
@staticmethod
def raw_parser(raw: str, rss_link: str) -> Bangumi | None:
def raw_parser(raw: str) -> Bangumi | None:
language = settings.rss_parser.language
try:
episode = raw_parser(raw)
@@ -72,7 +72,6 @@ class TitleParser:
eps_collect=False if episode.episode > 1 else True,
offset=0,
filter=settings.rss_parser.filter,
rss_link=[rss_link],
)
logger.debug(f"RAW:{raw} >> {title_raw}")
return data

View File

@@ -1,3 +1,2 @@
from .analyser import RSSAnalyser
analyser = RSSAnalyser()
from .engine import RSSEngine

View File

@@ -1,24 +1,22 @@
import logging
import re
from .engine import RSSEngine
from module.conf import settings
from module.models import Bangumi
from module.database import Database
from module.network import RequestContent, TorrentInfo
from module.parser import TitleParser
logger = logging.getLogger(__name__)
class RSSAnalyser:
def __init__(self):
self._title_analyser = TitleParser()
class RSSAnalyser(TitleParser):
def official_title_parser(self, data: Bangumi, mikan_title: str):
if settings.rss_parser.parser_type == "mikan":
data.official_title = mikan_title if mikan_title else data.official_title
elif settings.rss_parser.parser_type == "tmdb":
tmdb_title, season, year = self._title_analyser.tmdb_parser(
tmdb_title, season, year = self.tmdb_parser(
data.official_title, data.season, settings.rss_parser.language
)
data.official_title = tmdb_title
@@ -29,7 +27,7 @@ class RSSAnalyser:
data.official_title = re.sub(r"[/:.\\]", " ", data.official_title)
@staticmethod
def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list:
def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list[TorrentInfo]:
with RequestContent() as req:
if full_parse:
rss_torrents = req.get_torrents(rss_link)
@@ -42,7 +40,7 @@ class RSSAnalyser:
) -> list:
new_data = []
for torrent in torrents:
data = self._title_analyser.raw_parser(raw=torrent.name, rss_link=rss_link)
data = self.raw_parser(raw=torrent.name)
if data and data.title_raw not in [i.title_raw for i in new_data]:
try:
poster_link, mikan_title = (
@@ -52,6 +50,7 @@ class RSSAnalyser:
except AttributeError:
poster_link, mikan_title = None, None
data.poster_link = poster_link
data.rss_link = rss_link
self.official_title_parser(data, mikan_title)
if not full_parse:
return [data]
@@ -59,10 +58,8 @@ class RSSAnalyser:
logger.debug(f"[RSS] New title found: {data.official_title}")
return new_data
def torrent_to_data(
self, torrent: TorrentInfo, rss_link: str | None = None
) -> Bangumi:
data = self._title_analyser.raw_parser(raw=torrent.name, rss_link=rss_link)
def torrent_to_data(self, torrent: TorrentInfo) -> Bangumi:
data = self.raw_parser(raw=torrent.name)
if data:
try:
poster_link, mikan_title = (
@@ -76,16 +73,18 @@ class RSSAnalyser:
return data
def rss_to_data(
self, rss_link: str, database: Database, full_parse: bool = True
self, rss_link: str, engine: RSSEngine, full_parse: bool = True
) -> list[Bangumi]:
rss_torrents = self.get_rss_torrents(rss_link, full_parse)
torrents_to_add = database.bangumi.match_list(rss_torrents, rss_link)
torrents_to_add = engine.bangumi.match_list(rss_torrents, rss_link)
if not torrents_to_add:
logger.debug("[RSS] No new title has been found.")
return []
# New List
new_data = self.torrents_to_data(torrents_to_add, rss_link, full_parse)
if new_data:
# Add to database
engine.bangumi.add_all(new_data)
return new_data
else:
return []
@@ -93,6 +92,6 @@ class RSSAnalyser:
def link_to_data(self, link: str) -> Bangumi:
torrents = self.get_rss_torrents(link, False)
for torrent in torrents:
data = self.torrent_to_data(torrent, link)
data = self.torrent_to_data(torrent)
if data:
return data

View File

@@ -1,6 +1,8 @@
import re
import logging
from typing import Optional
from module.models import Bangumi, RSSItem, Torrent
from module.network import RequestContent
from module.downloader import DownloadClient
@@ -23,12 +25,15 @@ class RSSEngine(Database):
torrents.append(
Torrent(
name=torrent_info.name,
url=torrent_info.torrent_link,
url=torrent_info.url,
homepage=torrent_info.homepage,
)
)
return torrents
def get_combine_rss(self) -> list[RSSItem]:
return self.rss.get_combine()
def add_rss(self, rss_link: str, name: str | None = None, combine: bool = True):
if not name:
with RequestContent() as req:
@@ -41,19 +46,15 @@ class RSSEngine(Database):
new_torrents = self.torrent.check_new(torrents)
return new_torrents
def match_torrent(self, torrent: Torrent):
def match_torrent(self, torrent: Torrent) -> Optional[Bangumi]:
matched: Bangumi = self.bangumi.match_torrent(torrent.name)
if matched:
_filter = matched.filter.replace(",", "|")
if re.search(_filter, torrent.name, re.IGNORECASE):
if not re.search(_filter, torrent.name, re.IGNORECASE):
torrent.refer_id = matched.id
torrent.save_path = matched.save_path
with RequestContent() as req:
torrent_file = req.get_content(torrent.url)
return {
"torrent_files": torrent_file,
"save_path": torrent.save_path,
}
return matched
return None
def run(self, client: DownloadClient):
# Get All RSS Items
@@ -63,13 +64,9 @@ class RSSEngine(Database):
new_torrents = self.pull_rss(rss_item)
# Get all enabled bangumi data
for torrent in new_torrents:
download_info = self.match_torrent(torrent)
client.add_torrent(download_info)
torrent.downloaded = True
matched_data = self.match_torrent(torrent)
if matched_data:
if client.add_torrent(torrent, matched_data):
torrent.downloaded = True
# Add all torrents to database
self.torrent.add_all(new_torrents)
if __name__ == "__main__":
with RSSEngine() as engine:
engine.run()

View File

@@ -1,4 +1,4 @@
from module.models import Bangumi, TorrentBase
from module.models import Bangumi, Torrent
from module.network import RequestContent
from module.searcher.plugin import search_url
@@ -15,7 +15,7 @@ SEARCH_KEY = [
class SearchTorrent(RequestContent):
def search_torrents(
self, keywords: list[str], site: str = "mikan"
) -> list[TorrentBase]:
) -> list[Torrent]:
url = search_url(site, keywords)
# TorrentInfo to TorrentBase
torrents = self.get_torrents(url)
@@ -24,11 +24,11 @@ class SearchTorrent(RequestContent):
for torrent in torrents:
yield {
"name": torrent.name,
"torrent_link": torrent.torrent_link,
"torrent_link": torrent.url,
"homepage": torrent.homepage,
}
return [TorrentBase(**d) for d in to_dict()]
return [Torrent(**d) for d in to_dict()]
def search_season(self, data: Bangumi):
keywords = [getattr(data, key) for key in SEARCH_KEY if getattr(data, key)]

View File

@@ -1,7 +1,7 @@
import os
from module.conf import LEGACY_DATA_PATH
from module.database import BangumiDatabase
from module.database import Database
from module.models import Bangumi
from module.utils import json_config
@@ -15,8 +15,8 @@ def data_migration():
new_data = []
for info in infos:
new_data.append(Bangumi(**info, rss_link=[rss_link]))
with BangumiDatabase() as database:
database.update_table()
database.insert_list(new_data)
with Database() as db:
db.create_table()
db.bangumi.add_all(new_data)
LEGACY_DATA_PATH.unlink(missing_ok=True)