Merge pull request #418 from EstrellaXD/self-rss

feat: Add self rss engine module
This commit is contained in:
Estrella Pan
2023-08-06 20:52:13 +08:00
committed by GitHub
30 changed files with 454 additions and 278 deletions

View File

@@ -21,7 +21,6 @@ async def startup():
@router.on_event("shutdown")
async def shutdown():
program.stop()
sys.exit(0)
@router.get("/restart")

View File

@@ -3,7 +3,6 @@ import logging
from module.conf import VERSION, settings
from module.update import data_migration
from .rss_feed import add_rss_feed
from .sub_thread import RenameThread, RSSThread
logger = logging.getLogger(__name__)
@@ -52,7 +51,6 @@ class Program(RenameThread, RSSThread):
if self.enable_renamer:
self.rename_start()
if self.enable_rss:
add_rss_feed()
self.rss_start()
logger.info("Program running.")
return {"status": "Program started."}

View File

@@ -25,10 +25,3 @@ def add_rss_feed():
if add:
client.add_rss_feed(settings.rss_link)
logger.info(f"Add RSS Feed: {settings.rss_link}")
if __name__ == "__main__":
from module.conf import setup_logger
setup_logger()
add_rss_feed()

View File

@@ -2,11 +2,11 @@ import threading
import time
from module.conf import settings
from module.database import BangumiDatabase
from module.database import Database
from module.downloader import DownloadClient
from module.manager import Renamer, eps_complete
from module.notification import PostNotification
from module.rss import analyser
from module.rss import RSSAnalyser, RSSEngine
from .status import ProgramStatus
@@ -17,21 +17,19 @@ class RSSThread(ProgramStatus):
self._rss_thread = threading.Thread(
target=self.rss_loop,
)
self.analyser = RSSAnalyser()
def rss_loop(self):
with DownloadClient() as client:
client.init_downloader()
while not self.stop_event.is_set():
# Analyse RSS
with BangumiDatabase() as db:
new_data = analyser.rss_to_data(rss_link=settings.rss_link, database=db)
if new_data:
db.insert_list(new_data)
bangumi_list = db.not_added()
if bangumi_list:
with DownloadClient() as client:
client.set_rules(bangumi_list)
db.update_list(bangumi_list)
with DownloadClient() as client, RSSEngine() as engine:
# Analyse RSS
rss_list = engine.rss.search_combine()
for rss in rss_list:
self.analyser.rss_to_data(rss_link=rss.url, engine=engine)
# Run RSS Engine
engine.run(client)
if settings.bangumi_manage.eps_complete:
eps_complete()
self.stop_event.wait(settings.program.rss_time)

View File

@@ -1 +1,2 @@
from .bangumi import BangumiDatabase
from .combine import Database
from .engine import engine

View File

@@ -1,95 +1,97 @@
import logging
from sqlmodel import Session, select, delete, or_
from sqlmodel import Session, select, delete, or_, and_
from sqlalchemy.sql import func
from typing import Optional
from .engine import engine
from module.models import Bangumi
logger = logging.getLogger(__name__)
class BangumiDatabase(Session):
def __init__(self, _engine=engine):
super().__init__(_engine)
class BangumiDatabase:
def __init__(self, session: Session):
self.session = session
def insert_one(self, data: Bangumi):
self.add(data)
self.commit()
def add(self, data: Bangumi):
self.session.add(data)
self.session.commit()
logger.debug(f"[Database] Insert {data.official_title} into database.")
def insert_list(self, data: list[Bangumi]):
self.add_all(data)
def add_all(self, data: list[Bangumi]):
self.session.add_all(data)
logger.debug(f"[Database] Insert {len(data)} bangumi into database.")
def update_one(self, data: Bangumi) -> bool:
db_data = self.get(Bangumi, data.id)
def update(self, data: Bangumi) -> bool:
db_data = self.session.get(Bangumi, data.id)
if not db_data:
return False
bangumi_data = data.dict(exclude_unset=True)
for key, value in bangumi_data.items():
setattr(db_data, key, value)
self.add(db_data)
self.commit()
self.refresh(db_data)
self.session.add(db_data)
self.session.commit()
self.session.refresh(db_data)
logger.debug(f"[Database] Update {data.official_title}")
return True
def update_list(self, datas: list[Bangumi]):
for data in datas:
self.update_one(data)
def update_all(self, datas: list[Bangumi]):
self.session.add_all(datas)
self.session.commit()
logger.debug(f"[Database] Update {len(datas)} bangumi.")
def update_rss(self, title_raw, rss_set: str):
# Update rss and added
statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
bangumi = self.exec(statement).first()
bangumi = self.session.exec(statement).first()
bangumi.rss_link = rss_set
bangumi.added = False
self.add(bangumi)
self.commit()
self.refresh(bangumi)
self.session.add(bangumi)
self.session.commit()
self.session.refresh(bangumi)
logger.debug(f"[Database] Update {title_raw} rss_link to {rss_set}.")
def update_poster(self, title_raw, poster_link: str):
statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
bangumi = self.exec(statement).first()
bangumi = self.session.exec(statement).first()
bangumi.poster_link = poster_link
self.add(bangumi)
self.commit()
self.refresh(bangumi)
self.session.add(bangumi)
self.session.commit()
self.session.refresh(bangumi)
logger.debug(f"[Database] Update {title_raw} poster_link to {poster_link}.")
def delete_one(self, _id: int):
statement = select(Bangumi).where(Bangumi.id == _id)
bangumi = self.exec(statement).first()
self.delete(bangumi)
self.commit()
bangumi = self.session.exec(statement).first()
self.session.delete(bangumi)
self.session.commit()
logger.debug(f"[Database] Delete bangumi id: {_id}.")
def delete_all(self):
statement = delete(Bangumi)
self.exec(statement)
self.commit()
self.session.exec(statement)
self.session.commit()
def search_all(self) -> list[Bangumi]:
statement = select(Bangumi)
return self.exec(statement).all()
return self.session.exec(statement).all()
def search_id(self, _id: int) -> Optional[Bangumi]:
statement = select(Bangumi).where(Bangumi.id == _id)
bangumi = self.exec(statement).first()
bangumi = self.session.exec(statement).first()
if bangumi is None:
logger.warning(f"[Database] Cannot find bangumi id: {_id}.")
return None
else:
logger.debug(f"[Database] Find bangumi id: {_id}.")
return self.exec(statement).first()
return self.session.exec(statement).first()
def match_poster(self, bangumi_name: str) -> str:
# Use like to match
statement = select(Bangumi).where(func.instr(bangumi_name, Bangumi.title_raw) > 0)
data = self.exec(statement).first()
statement = select(Bangumi).where(
func.instr(bangumi_name, Bangumi.official_title) > 0
)
data = self.session.exec(statement).first()
if data:
return data.poster_link
else:
@@ -116,10 +118,19 @@ class BangumiDatabase(Session):
i += 1
return torrent_list
def match_torrent(self, torrent_name: str) -> Optional[Bangumi]:
statement = select(Bangumi).where(
and_(
func.instr(torrent_name, Bangumi.title_raw) > 0,
Bangumi.deleted == False,
)
)
return self.session.exec(statement).first()
def not_complete(self) -> list[Bangumi]:
# Find eps_complete = False
condition = select(Bangumi).where(Bangumi.eps_collect == 0)
datas = self.exec(condition).all()
condition = select(Bangumi).where(Bangumi.eps_collect == False)
datas = self.session.exec(condition).all()
return datas
def not_added(self) -> list[Bangumi]:
@@ -128,19 +139,18 @@ class BangumiDatabase(Session):
Bangumi.added == 0, Bangumi.rule_name is None, Bangumi.save_path is None
)
)
datas = self.exec(conditions).all()
datas = self.session.exec(conditions).all()
return datas
def disable_rule(self, _id: int):
statement = select(Bangumi).where(Bangumi.id == _id)
bangumi = self.exec(statement).first()
bangumi = self.session.exec(statement).first()
bangumi.deleted = True
self.add(bangumi)
self.commit()
self.refresh(bangumi)
self.session.add(bangumi)
self.session.commit()
self.session.refresh(bangumi)
logger.debug(f"[Database] Disable rule {bangumi.title_raw}.")
if __name__ == "__main__":
with BangumiDatabase() as db:
print(db.not_complete())
def search_rss(self, rss_link: str) -> list[Bangumi]:
statement = select(Bangumi).where(func.instr(rss_link, Bangumi.rss_link) > 0)
return self.session.exec(statement).all()

View File

@@ -0,0 +1,18 @@
from sqlmodel import Session, SQLModel
from .rss import RSSDatabase
from .torrent import TorrentDatabase
from .bangumi import BangumiDatabase
from .engine import engine as e
class Database(Session):
def __init__(self, engine=e):
self.engine = engine
super().__init__(engine)
self.rss = RSSDatabase(self)
self.torrent = TorrentDatabase(self)
self.bangumi = BangumiDatabase(self)
def create_table(self):
SQLModel.metadata.create_all(self.engine)

View File

@@ -0,0 +1,50 @@
import logging
from sqlmodel import Session, select, delete
from .engine import engine
from module.models import RSSItem
logger = logging.getLogger(__name__)
class RSSDatabase:
def __init__(self, session: Session):
self.session = session
def add(self, data: RSSItem):
# Check if exists
statement = select(RSSItem).where(RSSItem.url == data.url)
db_data = self.session.exec(statement).first()
if db_data:
logger.debug(f"RSS Item {data.url} already exists.")
return
else:
logger.debug(f"RSS Item {data.url} not exists, adding...")
self.session.add(data)
self.session.commit()
self.session.refresh(data)
def update(self, data: RSSItem):
self.session.add(data)
self.session.commit()
self.session.refresh(data)
def search_all(self) -> list[RSSItem]:
return self.session.exec(select(RSSItem)).all()
def search_active(self) -> list[RSSItem]:
return self.session.exec(select(RSSItem).where(RSSItem.enabled)).all()
def search_combine(self) -> list[RSSItem]:
return self.session.exec(select(RSSItem).where(RSSItem.combine)).all()
def delete(self, _id: int):
condition = delete(RSSItem).where(RSSItem.id == _id)
self.session.exec(condition)
self.session.commit()
def delete_all(self):
condition = delete(RSSItem)
self.session.exec(condition)
self.session.commit()

View File

@@ -1,47 +1,54 @@
import logging
from .connector import DataConnector
from sqlmodel import Session, select
from module.models import Torrent
logger = logging.getLogger(__name__)
class TorrentDatabase(DataConnector):
def update_table(self):
table_name = "torrent"
db_data = self.__data_to_db()
self._update_table(table_name, db_data)
class TorrentDatabase:
def __init__(self, session: Session):
self.session = session
def __data_to_db(self, data: SaveTorrent):
db_data = data.dict()
for key, value in db_data.items():
if isinstance(value, bool):
db_data[key] = int(value)
elif isinstance(value, list):
db_data[key] = ",".join(value)
return db_data
def add(self, data: Torrent):
self.session.add(data)
self.session.commit()
self.session.refresh(data)
logger.debug(f"Insert {data.name} in database.")
def __db_to_data(self, db_data: dict):
for key, item in db_data.items():
if isinstance(item, int):
if key not in ["id", "offset", "season", "year"]:
db_data[key] = bool(item)
elif key in ["filter", "rss_link"]:
db_data[key] = item.split(",")
return SaveTorrent(**db_data)
def add_all(self, datas: list[Torrent]):
self.session.add_all(datas)
self.session.commit()
logger.debug(f"Insert {len(datas)} torrents in database.")
def if_downloaded(self, torrent_url: str, torrent_name: str) -> bool:
self._cursor.execute(
"SELECT * FROM torrent WHERE torrent_url = ? OR torrent_name = ?",
(torrent_url, torrent_name),
)
return bool(self._cursor.fetchone())
def update(self, data: Torrent):
self.session.add(data)
self.session.commit()
self.session.refresh(data)
logger.debug(f"Update {data.name} in database.")
def insert(self, data: SaveTorrent):
db_data = self.__data_to_db(data)
columns = ", ".join(db_data.keys())
values = ", ".join([f":{key}" for key in db_data.keys()])
self._cursor.execute(
f"INSERT INTO torrent ({columns}) VALUES ({values})", db_data
)
logger.debug(f"Add {data.torrent_name} into database.")
self._conn.commit()
def update_all(self, datas: list[Torrent]):
self.session.add_all(datas)
self.session.commit()
def update_one_user(self, data: Torrent):
self.session.add(data)
self.session.commit()
self.session.refresh(data)
logger.debug(f"Update {data.name} in database.")
def search(self, _id: int) -> Torrent:
return self.session.exec(select(Torrent).where(Torrent.id == _id)).first()
def search_all(self) -> list[Torrent]:
return self.session.exec(select(Torrent)).all()
def check_new(self, torrents_list: list[Torrent]) -> list[Torrent]:
new_torrents = []
old_torrents = self.search_all()
old_urls = [t.url for t in old_torrents]
for torrent in torrents_list:
if torrent.url not in old_urls:
new_torrents.append(torrent)
return new_torrents

View File

@@ -82,10 +82,9 @@ class QbDownloader:
status_filter=status_filter, category=category, tag=tag
)
def torrents_add(self, urls, save_path, category, torrent_files=None):
def torrents_add(self, torrent_files, save_path, category):
resp = self._client.torrents_add(
is_paused=False,
urls=urls,
torrent_files=torrent_files,
save_path=save_path,
category=category,

View File

@@ -1,7 +1,8 @@
import logging
from module.conf import settings
from module.models import Bangumi
from module.models import Bangumi, Torrent
from module.network import RequestContent, TorrentInfo
from .path import TorrentPath
@@ -113,17 +114,25 @@ class DownloadClient(TorrentPath):
self.client.torrents_delete(hashes)
logger.info("[Downloader] Remove torrents.")
def add_torrent(self, torrent: dict):
def add_torrent(
self, torrent: Torrent | TorrentInfo | list, bangumi: Bangumi
) -> bool:
if not bangumi.save_path:
bangumi.save_path = self._gen_save_path(bangumi)
with RequestContent() as req:
if isinstance(torrent, list):
torrent_file = [req.get_content(t.url) for t in torrent]
else:
torrent_file = req.get_content(torrent.url)
if self.client.torrents_add(
urls=torrent.get("urls"),
torrent_files=torrent.get("torrent_files"),
save_path=torrent.get("save_path"),
torrent_files=torrent_file,
save_path=bangumi.save_path,
category="Bangumi",
):
logger.debug(f"[Downloader] Add torrent: {torrent.get('save_path')}")
logger.debug(f"[Downloader] Add torrent: {bangumi.official_title}")
return True
else:
logger.error(f"[Downloader] Add torrent failed: {torrent.get('save_path')}")
logger.error(f"[Downloader] Add torrent failed: {bangumi.official_title}")
return False
def move_torrent(self, hashes, location):

View File

@@ -1,56 +1,43 @@
import logging
from module.database import BangumiDatabase
from module.downloader import DownloadClient
from module.models import Bangumi
from module.searcher import SearchTorrent
from module.rss import RSSEngine
logger = logging.getLogger(__name__)
class SeasonCollector(DownloadClient):
def add_season_torrents(self, data: Bangumi, torrents, torrent_files=None):
if torrent_files:
download_info = {
"torrent_files": torrent_files,
"save_path": self._gen_save_path(data),
}
return self.add_torrent(download_info)
else:
download_info = {
"urls": [torrent.torrent_link for torrent in torrents],
"save_path": self._gen_save_path(data),
}
return self.add_torrent(download_info)
def add_season_torrents(self, bangumi: Bangumi, torrents: list):
return self.add_torrent(bangumi=bangumi, torrent=torrents)
def collect_season(self, data: Bangumi, link: str = None, proxy: bool = False):
logger.info(f"Start collecting {data.official_title} Season {data.season}...")
def collect_season(self, bangumi: Bangumi, link: str = None):
logger.info(
f"Start collecting {bangumi.official_title} Season {bangumi.season}..."
)
with SearchTorrent() as st:
if not link:
torrents = st.search_season(data)
torrents = st.search_season(bangumi)
else:
torrents = st.get_torrents(link, _filter="|".join(data.filter))
torrents = st.get_torrents(link, _filter="|".join(bangumi.filter))
torrent_files = None
if proxy:
torrent_files = [
st.get_content(torrent.torrent_link) for torrent in torrents
]
return self.add_season_torrents(
data=data, torrents=torrents, torrent_files=torrent_files
)
return self.add_season_torrents(bangumi=bangumi, torrents=torrents)
def subscribe_season(self, data: Bangumi):
with BangumiDatabase() as db:
@staticmethod
def subscribe_season(data: Bangumi):
with RSSEngine() as engine:
data.added = True
data.eps_collect = True
self.set_rule(data)
db.insert(data)
self.add_rss_feed(data.rss_link[0], item_path=data.official_title)
engine.add_rss(
rss_link=data.rss_link, name=data.official_title, combine=False
)
engine.bangumi.add(data)
def eps_complete():
with BangumiDatabase() as bd:
datas = bd.not_complete()
with RSSEngine() as engine:
datas = engine.bangumi.not_complete()
if datas:
logger.info("Start collecting full season...")
for data in datas:
@@ -58,4 +45,4 @@ def eps_complete():
with SeasonCollector() as sc:
sc.collect_season(data)
data.eps_collect = True
bd.update_list(datas)
engine.bangumi.update_all(datas)

View File

@@ -2,14 +2,14 @@ import logging
from fastapi.responses import JSONResponse
from module.database import BangumiDatabase
from module.database import Database
from module.downloader import DownloadClient
from module.models import Bangumi
logger = logging.getLogger(__name__)
class TorrentManager(BangumiDatabase):
class TorrentManager(Database):
@staticmethod
def __match_torrents_list(data: Bangumi) -> list:
with DownloadClient() as client:
@@ -28,12 +28,13 @@ class TorrentManager(BangumiDatabase):
return f"Can't find {data.official_title} torrents."
def delete_rule(self, _id: int | str, file: bool = False):
data = self.search_id(int(_id))
data = self.bangumi.search_id(int(_id))
if isinstance(data, Bangumi):
with DownloadClient() as client:
client.remove_rule(data.rule_name)
client.remove_rss_feed(data.official_title)
self.delete_one(int(_id))
# client.remove_rule(data.rule_name)
# client.remove_rss_feed(data.official_title)
self.rss.delete(data.official_title)
self.bangumi.delete_one(int(_id))
if file:
torrent_message = self.delete_torrents(data, client)
return JSONResponse(
@@ -53,12 +54,12 @@ class TorrentManager(BangumiDatabase):
)
def disable_rule(self, _id: str | int, file: bool = False):
data = self.search_id(int(_id))
data = self.bangumi.search_id(int(_id))
if isinstance(data, Bangumi):
with DownloadClient() as client:
client.remove_rule(data.rule_name)
# client.remove_rule(data.rule_name)
data.deleted = True
self.update_one(data)
self.bangumi.update(data)
if file:
torrent_message = self.delete_torrents(data, client)
return JSONResponse(
@@ -80,12 +81,10 @@ class TorrentManager(BangumiDatabase):
)
def enable_rule(self, _id: str | int):
data = self.search_id(int(_id))
data = self.bangumi.search(int(_id))
if isinstance(data, Bangumi):
data.deleted = False
self.update_one(data)
with DownloadClient() as client:
client.set_rule(data)
self.bangumi.update(data)
logger.info(f"[Manager] Enable rule for {data.official_title}")
return JSONResponse(
status_code=200,
@@ -99,7 +98,7 @@ class TorrentManager(BangumiDatabase):
)
def update_rule(self, data: Bangumi):
old_data = self.search_id(data.id)
old_data = self.bangumi.search_id(data.id)
if not old_data:
logger.error(f"[Manager] Can't find data with {data.id}")
return JSONResponse(
@@ -112,10 +111,7 @@ class TorrentManager(BangumiDatabase):
path = client._gen_save_path(data)
if match_list:
client.move_torrent(match_list, path)
# Set new download rule
client.remove_rule(data.rule_name)
client.set_rule(data)
self.update_one(data)
self.bangumi.update(data)
return JSONResponse(
status_code=200,
content={
@@ -124,13 +120,13 @@ class TorrentManager(BangumiDatabase):
)
def search_all_bangumi(self):
datas = self.search_all()
datas = self.bangumi.search_all()
if not datas:
return []
return [data for data in datas if not data.deleted]
def search_one(self, _id: int | str):
data = self.search_id(int(_id))
data = self.bangumi.search_id(int(_id))
if not data:
logger.error(f"[Manager] Can't find data with {_id}")
return {"status": "error", "msg": f"Can't find data with {_id}"}

View File

@@ -1,5 +1,5 @@
from .bangumi import Bangumi, Episode
from .bangumi import Bangumi, Episode, BangumiUpdate, Notification
from .config import Config
from .rss import RSSTorrents
from .torrent import EpisodeFile, SubtitleFile, TorrentBase
from .rss import RSSItem, RSSUpdate
from .torrent import EpisodeFile, SubtitleFile, Torrent, TorrentUpdate
from .user import UserLogin

View File

@@ -20,7 +20,7 @@ class Bangumi(SQLModel, table=True):
subtitle: Optional[str] = Field(alias="subtitle", title="字幕")
eps_collect: bool = Field(default=False, alias="eps_collect", title="是否已收集")
offset: int = Field(default=0, alias="offset", title="番剧偏移量")
filter: str = Field(default="720, \\d+-\\d+", alias="filter", title="番剧过滤器")
filter: str = Field(default="720,\\d+-\\d+", alias="filter", title="番剧过滤器")
rss_link: str = Field(default="", alias="rss_link", title="番剧RSS链接")
poster_link: Optional[str] = Field(alias="poster_link", title="番剧海报链接")
added: bool = Field(default=False, alias="added", title="是否已添加")
@@ -34,6 +34,7 @@ class BangumiUpdate(SQLModel):
default="official_title", alias="official_title", title="番剧中文名"
)
year: Optional[str] = Field(alias="year", title="番剧年份")
title_raw: str = Field(default="title_raw", alias="title_raw", title="番剧原名")
season: int = Field(default=1, alias="season", title="番剧季度")
season_raw: Optional[str] = Field(alias="season_raw", title="番剧季度原名")
group_name: Optional[str] = Field(alias="group_name", title="字幕组")
@@ -44,7 +45,10 @@ class BangumiUpdate(SQLModel):
offset: int = Field(default=0, alias="offset", title="番剧偏移量")
filter: str = Field(default="720, \\d+-\\d+", alias="filter", title="番剧过滤器")
rss_link: str = Field(default="", alias="rss_link", title="番剧RSS链接")
poster_link: Optional[str] = Field(alias="poster_link", title="番剧海报链接")
added: bool = Field(default=False, alias="added", title="是否已添加")
rule_name: Optional[str] = Field(alias="rule_name", title="番剧规则名")
save_path: Optional[str] = Field(alias="save_path", title="番剧保存路径")
deleted: bool = Field(False, alias="deleted", title="是否已删除")

View File

@@ -1,8 +1,6 @@
from os.path import expandvars
from pydantic import BaseModel, Field
# Sub config
class Program(BaseModel):
rss_time: int = Field(7200, description="Sleep time")

View File

@@ -1,9 +1,21 @@
from pydantic import BaseModel, Field
from sqlmodel import SQLModel, Field
from typing import Optional
class RSSItem(SQLModel, table=True):
id: int = Field(default=None, primary_key=True, alias="id")
item_path: str = Field("example path", alias="item_path")
url: str = Field("https://mikanani.me", alias="url")
combine: bool = Field(True, alias="combine")
enabled: bool = Field(True, alias="enabled")
class RSSUpdate(SQLModel):
item_path: Optional[str] = Field("example path", alias="item_path")
url: Optional[str] = Field("https://mikanani.me", alias="url")
combine: Optional[bool] = Field(True, alias="combine")
enabled: Optional[bool] = Field(True, alias="enabled")
class RSSTorrents(BaseModel):
name: str = Field(..., alias="item_path")
url: str = Field(..., alias="url")
analyze: bool = Field(..., alias="analyze")
enabled: bool = Field(..., alias="enabled")
torrents: list[str] = Field(..., alias="torrents")

View File

@@ -1,16 +1,20 @@
from pydantic import BaseModel, Field
from pydantic import BaseModel
from sqlmodel import SQLModel, Field
from typing import Optional
class TorrentBase(BaseModel):
name: str = Field(...)
torrent_link: str = Field(...)
homepage: str | None = Field(None)
class Torrent(SQLModel, table=True):
id: int = Field(default=None, primary_key=True, alias="id")
refer_id: Optional[int] = Field(None, alias="refer_id")
name: str = Field("", alias="name")
url: str = Field("https://example.com/torrent", alias="url")
homepage: Optional[str] = Field(None, alias="homepage")
save_path: Optional[str] = Field(None, alias="saved_path")
downloaded: bool = Field(False, alias="downloaded")
class FileSet(BaseModel):
media_path: str = Field(...)
sc_subtitle: str | None = Field(None)
tc_subtitle: str | None = Field(None)
class TorrentUpdate(SQLModel):
downloaded: bool = Field(False, alias="downloaded")
class EpisodeFile(BaseModel):

View File

@@ -13,7 +13,7 @@ from .site import mikan_parser
@dataclass
class TorrentInfo:
name: str
torrent_link: str
url: str
homepage: str
_poster_link: str | None = None
_official_title: str | None = None
@@ -37,7 +37,6 @@ class TorrentInfo:
class RequestContent(RequestURL):
# Mikanani RSS
def get_torrents(
self,
_url: str,
@@ -53,9 +52,7 @@ class RequestContent(RequestURL):
):
if re.search(_filter, _title) is None:
torrents.append(
TorrentInfo(
name=_title, torrent_link=torrent_url, homepage=homepage
)
TorrentInfo(name=_title, url=torrent_url, homepage=homepage)
)
return torrents
except ConnectionError:
@@ -95,3 +92,7 @@ class RequestContent(RequestURL):
def check_connection(self, _url):
return self.check_url(_url)
def get_rss_title(self, _url):
soup = self.get_xml(_url)
return soup.find("./channel/title").text

View File

@@ -7,3 +7,7 @@ def mikan_parser(soup):
torrent_urls.append(item.find("enclosure").attrib["url"])
torrent_homepage.append(item.find("link").text)
return torrent_titles, torrent_urls, torrent_homepage
def mikan_title(soup):
return soup.find("title").text

View File

@@ -1,7 +1,7 @@
import logging
from module.conf import settings
from module.database import BangumiDatabase
from module.database import Database
from module.models import Notification
from .plugin import (
@@ -36,8 +36,8 @@ class PostNotification:
@staticmethod
def _get_poster(notify: Notification):
with BangumiDatabase() as db:
poster_path = db.match_poster(notify.official_title)
with Database() as db:
poster_path = db.bangumi.match_poster(notify.official_title)
if poster_path:
poster_link = "https://mikanani.me" + poster_path
else:

View File

@@ -39,7 +39,7 @@ class TitleParser:
return official_title, tmdb_season, year
@staticmethod
def raw_parser(raw: str, rss_link: str) -> Bangumi | None:
def raw_parser(raw: str) -> Bangumi | None:
language = settings.rss_parser.language
try:
episode = raw_parser(raw)
@@ -72,7 +72,6 @@ class TitleParser:
eps_collect=False if episode.episode > 1 else True,
offset=0,
filter=settings.rss_parser.filter,
rss_link=[rss_link],
)
logger.debug(f"RAW:{raw} >> {title_raw}")
return data

View File

@@ -1,3 +1,2 @@
from .analyser import RSSAnalyser
analyser = RSSAnalyser()
from .engine import RSSEngine

View File

@@ -1,8 +1,9 @@
import logging
import re
from .engine import RSSEngine
from module.conf import settings
from module.database import BangumiDatabase
from module.models import Bangumi
from module.network import RequestContent, TorrentInfo
from module.parser import TitleParser
@@ -10,17 +11,12 @@ from module.parser import TitleParser
logger = logging.getLogger(__name__)
class RSSAnalyser:
def __init__(self):
self._title_analyser = TitleParser()
with BangumiDatabase() as db:
db.update_table()
class RSSAnalyser(TitleParser):
def official_title_parser(self, data: Bangumi, mikan_title: str):
if settings.rss_parser.parser_type == "mikan":
data.official_title = mikan_title if mikan_title else data.official_title
elif settings.rss_parser.parser_type == "tmdb":
tmdb_title, season, year = self._title_analyser.tmdb_parser(
tmdb_title, season, year = self.tmdb_parser(
data.official_title, data.season, settings.rss_parser.language
)
data.official_title = tmdb_title
@@ -31,7 +27,7 @@ class RSSAnalyser:
data.official_title = re.sub(r"[/:.\\]", " ", data.official_title)
@staticmethod
def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list:
def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list[TorrentInfo]:
with RequestContent() as req:
if full_parse:
rss_torrents = req.get_torrents(rss_link)
@@ -44,7 +40,7 @@ class RSSAnalyser:
) -> list:
new_data = []
for torrent in torrents:
data = self._title_analyser.raw_parser(raw=torrent.name, rss_link=rss_link)
data = self.raw_parser(raw=torrent.name)
if data and data.title_raw not in [i.title_raw for i in new_data]:
try:
poster_link, mikan_title = (
@@ -54,6 +50,7 @@ class RSSAnalyser:
except AttributeError:
poster_link, mikan_title = None, None
data.poster_link = poster_link
data.rss_link = rss_link
self.official_title_parser(data, mikan_title)
if not full_parse:
return [data]
@@ -61,10 +58,8 @@ class RSSAnalyser:
logger.debug(f"[RSS] New title found: {data.official_title}")
return new_data
def torrent_to_data(
self, torrent: TorrentInfo, rss_link: str | None = None
) -> Bangumi:
data = self._title_analyser.raw_parser(raw=torrent.name, rss_link=rss_link)
def torrent_to_data(self, torrent: TorrentInfo) -> Bangumi:
data = self.raw_parser(raw=torrent.name)
if data:
try:
poster_link, mikan_title = (
@@ -78,16 +73,18 @@ class RSSAnalyser:
return data
def rss_to_data(
self, rss_link: str, database: BangumiDatabase, full_parse: bool = True
self, rss_link: str, engine: RSSEngine, full_parse: bool = True
) -> list[Bangumi]:
rss_torrents = self.get_rss_torrents(rss_link, full_parse)
torrents_to_add = database.match_list(rss_torrents, rss_link)
torrents_to_add = engine.bangumi.match_list(rss_torrents, rss_link)
if not torrents_to_add:
logger.debug("[RSS] No new title has been found.")
return []
# New List
new_data = self.torrents_to_data(torrents_to_add, rss_link, full_parse)
if new_data:
# Add to database
engine.bangumi.add_all(new_data)
return new_data
else:
return []
@@ -95,6 +92,6 @@ class RSSAnalyser:
def link_to_data(self, link: str) -> Bangumi:
torrents = self.get_rss_torrents(link, False)
for torrent in torrents:
data = self.torrent_to_data(torrent, link)
data = self.torrent_to_data(torrent)
if data:
return data

View File

@@ -0,0 +1,72 @@
import re
import logging
from typing import Optional
from module.models import Bangumi, RSSItem, Torrent
from module.network import RequestContent
from module.downloader import DownloadClient
from module.database import Database, engine
logger = logging.getLogger(__name__)
class RSSEngine(Database):
def __init__(self, _engine=engine):
super().__init__(_engine)
@staticmethod
def _get_torrents(rss_link: str) -> list[Torrent]:
with RequestContent() as req:
torrent_infos = req.get_torrents(rss_link)
torrents: list[Torrent] = []
for torrent_info in torrent_infos:
torrents.append(
Torrent(
name=torrent_info.name,
url=torrent_info.url,
homepage=torrent_info.homepage,
)
)
return torrents
def get_combine_rss(self) -> list[RSSItem]:
return self.rss.get_combine()
def add_rss(self, rss_link: str, name: str | None = None, combine: bool = True):
if not name:
with RequestContent() as req:
name = req.get_rss_title(rss_link)
rss_data = RSSItem(item_path=name, url=rss_link, combine=combine)
self.rss.add(rss_data)
def pull_rss(self, rss_item: RSSItem) -> list[Torrent]:
torrents = self._get_torrents(rss_item.url)
new_torrents = self.torrent.check_new(torrents)
return new_torrents
def match_torrent(self, torrent: Torrent) -> Optional[Bangumi]:
matched: Bangumi = self.bangumi.match_torrent(torrent.name)
if matched:
_filter = matched.filter.replace(",", "|")
if not re.search(_filter, torrent.name, re.IGNORECASE):
torrent.refer_id = matched.id
torrent.save_path = matched.save_path
return matched
return None
def run(self, client: DownloadClient):
# Get All RSS Items
rss_items: list[RSSItem] = self.rss.search_active()
# From RSS Items, get all torrents
for rss_item in rss_items:
new_torrents = self.pull_rss(rss_item)
# Get all enabled bangumi data
for torrent in new_torrents:
matched_data = self.match_torrent(torrent)
if matched_data:
if client.add_torrent(torrent, matched_data):
torrent.downloaded = True
# Add all torrents to database
self.torrent.add_all(new_torrents)

View File

@@ -1,26 +0,0 @@
import re
from module.database import RSSDatabase
from module.models import Bangumi, RSSTorrents
from module.network import RequestContent, TorrentInfo
class RSSPoller(RSSDatabase):
@staticmethod
def polling(rss_link, req: RequestContent) -> list[TorrentInfo]:
return req.get_torrents(rss_link)
@staticmethod
def filter_torrent(data: Bangumi, torrent: TorrentInfo) -> bool:
if data.title_raw in torrent.name:
_filter = "|".join(data.filter)
if not re.search(_filter, torrent.name):
return True
else:
return False
def foo(self):
rss_datas: list[RSSTorrents] = self.get_rss_data()
with RequestContent() as req:
for rss_data in rss_datas:
self.polling(rss_data.url, req)

View File

@@ -1,4 +1,4 @@
from module.models import Bangumi, TorrentBase
from module.models import Bangumi, Torrent
from module.network import RequestContent
from module.searcher.plugin import search_url
@@ -15,7 +15,7 @@ SEARCH_KEY = [
class SearchTorrent(RequestContent):
def search_torrents(
self, keywords: list[str], site: str = "mikan"
) -> list[TorrentBase]:
) -> list[Torrent]:
url = search_url(site, keywords)
# TorrentInfo to TorrentBase
torrents = self.get_torrents(url)
@@ -24,11 +24,11 @@ class SearchTorrent(RequestContent):
for torrent in torrents:
yield {
"name": torrent.name,
"torrent_link": torrent.torrent_link,
"torrent_link": torrent.url,
"homepage": torrent.homepage,
}
return [TorrentBase(**d) for d in to_dict()]
return [Torrent(**d) for d in to_dict()]
def search_season(self, data: Bangumi):
keywords = [getattr(data, key) for key in SEARCH_KEY if getattr(data, key)]

View File

@@ -1,7 +1,7 @@
import os
from module.conf import LEGACY_DATA_PATH
from module.database import BangumiDatabase
from module.database import Database
from module.models import Bangumi
from module.utils import json_config
@@ -15,8 +15,8 @@ def data_migration():
new_data = []
for info in infos:
new_data.append(Bangumi(**info, rss_link=[rss_link]))
with BangumiDatabase() as database:
database.update_table()
database.insert_list(new_data)
with Database() as db:
db.create_table()
db.bangumi.add_all(new_data)
LEGACY_DATA_PATH.unlink(missing_ok=True)

View File

@@ -1,26 +1,27 @@
from sqlmodel import create_engine, SQLModel
from sqlmodel.pool import StaticPool
from module.database import BangumiDatabase
from module.models import Bangumi
from module.database.combine import Database
from module.models import Bangumi, Torrent, RSSItem
# sqlite mock engine
engine = create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
)
def test_bangumi_database():
# sqlite mock engine
engine = create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
)
SQLModel.metadata.create_all(engine)
test_data = Bangumi(
official_title="test",
official_title="无职转生,到了异世界就拿出真本事",
year="2021",
title_raw="test",
title_raw="Mushoku Tensei",
season=1,
season_raw="第一季",
group_name="test",
dpi="720p",
source="test",
subtitle="test",
season_raw="",
group_name="Lilith-Raws",
dpi="1080p",
source="Baha",
subtitle="CHT",
eps_collect=False,
offset=0,
filter="720p,\\d+-\\d+",
@@ -28,22 +29,50 @@ def test_bangumi_database():
poster_link="/test/test.jpg",
added=False,
rule_name=None,
save_path=None,
save_path="downloads/无职转生,到了异世界就拿出真本事/Season 1",
deleted=False,
)
with BangumiDatabase(engine) as database:
with Database(engine) as db:
db.create_table()
# insert
database.insert_one(test_data)
assert database.search_id(1) == test_data
db.bangumi.add(test_data)
assert db.bangumi.search_id(1) == test_data
# update
test_data.official_title = "test2"
database.update_one(test_data)
assert database.search_id(1) == test_data
test_data.official_title = "无职转生到了异世界就拿出真本事II"
db.bangumi.update(test_data)
assert db.bangumi.search_id(1) == test_data
# search poster
assert database.match_poster("test2 (2021)") == "/test/test.jpg"
assert db.bangumi.match_poster("无职转生到了异世界就拿出真本事II (2021)") == "/test/test.jpg"
# match torrent
result = db.bangumi.match_torrent("[Lilith-Raws] 无职转生,到了异世界就拿出真本事 / Mushoku Tensei - 11 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4]")
assert result.official_title == "无职转生到了异世界就拿出真本事II"
# delete
database.delete_one(1)
assert database.search_id(1) is None
db.bangumi.delete_one(1)
assert db.bangumi.search_id(1) is None
def test_torrent_database():
test_data = Torrent(
name="[Sub Group]test S02 01 [720p].mkv",
url="https://test.com/test.mkv",
)
with Database(engine) as db:
# insert
db.torrent.add(test_data)
assert db.torrent.search(1) == test_data
# update
test_data.downloaded = True
db.torrent.update(test_data)
assert db.torrent.search(1) == test_data
def test_rss_database():
rss_url = "https://test.com/test.xml"
with Database(engine) as db:
db.rss.add(RSSItem(url=rss_url))

View File

@@ -0,0 +1,18 @@
from module.rss.engine import RSSEngine
from .test_database import engine as e
def test_rss_engine():
with RSSEngine(e) as engine:
rss_link = "https://mikanani.me/RSS/Bangumi?bangumiId=2353&subgroupid=552"
engine.add_rss(rss_link, combine=False)
result = engine.rss.search_active()
assert result[1].item_path == "Mikan Project - 无职转生~到了异世界就拿出真本事~"
new_torrents = engine.pull_rss(result[1])
torrent = new_torrents[0]
assert torrent.name == "[Lilith-Raws] 无职转生,到了异世界就拿出真本事 / Mushoku Tensei - 11 [Baha][WEB-DL][1080p][AVC AAC][CHT][MP4]"