diff --git a/src/module/api/download.py b/src/module/api/download.py index ed3bc9df..6ce80569 100644 --- a/src/module/api/download.py +++ b/src/module/api/download.py @@ -32,8 +32,10 @@ async def download_collection( ) if data: with SeasonCollector() as collector: - collector.collect_season(data, data.rss_link[0]) - return {"status": "Success"} + if collector.collect_season(data, data.rss_link[0], proxy=True): + return {"status": "Success"} + else: + return {"status": "Failed to add torrent"} else: return {"status": "Failed to parse link"} diff --git a/src/module/downloader/client/qb_downloader.py b/src/module/downloader/client/qb_downloader.py index ccc0c4dd..8b22b813 100644 --- a/src/module/downloader/client/qb_downloader.py +++ b/src/module/downloader/client/qb_downloader.py @@ -81,7 +81,7 @@ class QbDownloader: return self._client.torrents_info(status_filter=status_filter, category=category, tag=tag) def torrents_add(self, urls, save_path, category, torrent_files=None): - return self._client.torrents_add( + resp = self._client.torrents_add( is_paused=False, urls=urls, torrent_files=torrent_files, @@ -89,6 +89,7 @@ class QbDownloader: category=category, use_auto_torrent_management=False ) + return resp == "Ok." def torrents_delete(self, hash): return self._client.torrents_delete(delete_files=True, torrent_hashes=hash) diff --git a/src/module/downloader/download_client.py b/src/module/downloader/download_client.py index d7122cec..c30c1cd3 100644 --- a/src/module/downloader/download_client.py +++ b/src/module/downloader/download_client.py @@ -112,9 +112,17 @@ class DownloadClient(TorrentPath): logger.info(f"[Downloader] Remove torrents.") def add_torrent(self, torrent: dict): - self.client.torrents_add( - urls=torrent["url"], save_path=torrent["save_path"], category="Bangumi" - ) + if self.client.torrents_add( + urls=torrent.get("urls"), + torrent_files=torrent.get("torrent_files"), + save_path=torrent.get("save_path"), + category="Bangumi" + ): + logger.debug(f"[Downloader] Add torrent: {torrent.get('save_path')}") + return True + else: + logger.error(f"[Downloader] Add torrent failed: {torrent.get('save_path')}") + return False def move_torrent(self, hashes, location): self.client.move_torrent(hashes=hashes, new_location=location) diff --git a/src/module/manager/collector.py b/src/module/manager/collector.py index f7d5804c..ec8d260b 100644 --- a/src/module/manager/collector.py +++ b/src/module/manager/collector.py @@ -11,23 +11,30 @@ logger = logging.getLogger(__name__) class SeasonCollector(DownloadClient): - def add_season_torrents(self, data: BangumiData, torrents): - for torrent in torrents: + def add_season_torrents(self, data: BangumiData, torrents, torrent_files=None): + if torrent_files: download_info = { - "url": torrent.torrent_link, + "torrent_files": torrent_files, "save_path": self._gen_save_path(data), } - self.add_torrent(download_info) + return self.add_torrent(download_info) + else: + download_info = { + "urls": [torrent.torrent_link for torrent in torrents], + "save_path": self._gen_save_path(data), + } + return self.add_torrent(download_info) - def collect_season(self, data: BangumiData, link: str = None): + def collect_season(self, data: BangumiData, link: str = None, proxy: bool = False): logger.info(f"Start collecting {data.official_title} Season {data.season}...") with SearchTorrent() as st: if not link: torrents = st.search_season(data) else: torrents = st.get_torrents(link, _filter="|".join(data.filter)) - self.add_season_torrents(data, torrents) - logger.info("Completed!") + if proxy: + torrent_files = [st.get_content(torrent.torrent_link) for torrent in torrents] + return self.add_season_torrents(data, torrents, torrent_files=torrent_files) def subscribe_season(self, data: BangumiData): with BangumiDatabase() as db: diff --git a/src/module/network/request_contents.py b/src/module/network/request_contents.py index b9b2b7af..63168d7b 100644 --- a/src/module/network/request_contents.py +++ b/src/module/network/request_contents.py @@ -46,7 +46,6 @@ class RequestContent(RequestURL): try: soup = self.get_xml(_url, retry) torrent_titles, torrent_urls, torrent_homepage = mikan_parser(soup) - torrents = [] for _title, torrent_url, homepage in zip( torrent_titles, torrent_urls, torrent_homepage