From 5928350ad1166f2648109cf641ee2a038041fa61 Mon Sep 17 00:00:00 2001 From: EstrellaXD Date: Fri, 19 May 2023 11:39:13 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=96=B0=E7=9A=84=E6=90=9C=E7=B4=A2?= =?UTF-8?q?=E5=99=A8=E6=A8=A1=E5=9D=97?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/pull-request.yml | 32 ++++++++++++ src/module/api/auth.py | 8 +-- src/module/api/log.py | 10 ++++ src/module/api/web.py | 2 +- src/module/manager/eps_complete.py | 67 +++++++++----------------- src/module/manager/torrent.py | 2 +- src/module/searcher/__init__.py | 1 + src/module/searcher/plugin/__init__.py | 8 +++ src/module/searcher/plugin/mikan.py | 13 +++++ src/module/searcher/searcher.py | 8 +++ 10 files changed, 101 insertions(+), 50 deletions(-) create mode 100644 .github/workflows/pull-request.yml create mode 100644 src/module/searcher/__init__.py create mode 100644 src/module/searcher/plugin/__init__.py create mode 100644 src/module/searcher/plugin/mikan.py create mode 100644 src/module/searcher/searcher.py diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml new file mode 100644 index 00000000..224374e3 --- /dev/null +++ b/.github/workflows/pull-request.yml @@ -0,0 +1,32 @@ +name: Create Pull Request + +on: + push: + tags: + - '\d+\.\d+\.\d+' + - '\d+\.\d+' + +jobs: + create-pull-request: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + - name: Generate pull request body + id: pr + run: | + echo "::set-output name=body::$(git log --format='* %s' $(git describe --tags --abbrev=0)..HEAD)" + - name: Create Pull Request + uses: peter-evans/create-pull-request@v3 + with: + token: ${{ secrets.ACCESS_TOKEN }} + commit-message: 'chore: release ${{ github.ref }}' + title: 'chore: release ${{ github.ref }}' + body: | + ${{ steps.pr.outputs.body }} + branch: release/${{ github.ref }} + base: main + labels: release + draft: false + branch-suffix: timestamp + delete-branch: false \ No newline at end of file diff --git a/src/module/api/auth.py b/src/module/api/auth.py index 88229779..607a0497 100644 --- a/src/module/api/auth.py +++ b/src/module/api/auth.py @@ -31,7 +31,7 @@ async def get_token_data(token: str = Depends(oauth2_scheme)): return payload -@router.post("/api/v1/auth/login", response_model=dict, tags=["login"]) +@router.post("/api/v1/auth/login", response_model=dict, tags=["auth"]) async def login(form_data: OAuth2PasswordRequestForm = Depends()): username = form_data.username password = form_data.password @@ -42,7 +42,7 @@ async def login(form_data: OAuth2PasswordRequestForm = Depends()): return {"access_token": token, "token_type": "bearer", "expire": 86400} -@router.get("/api/v1/auth/refresh_token", response_model=dict, tags=["login"]) +@router.get("/api/v1/auth/refresh_token", response_model=dict, tags=["auth"]) async def refresh( current_user: User = Depends(get_current_user) ): @@ -52,7 +52,7 @@ async def refresh( return {"access_token": token, "token_type": "bearer", "expire": 86400} -@router.get("/api/v1/auth/logout", response_model=dict, tags=["login"]) +@router.get("/api/v1/auth/logout", response_model=dict, tags=["auth"]) async def logout( current_user: User = Depends(get_current_user) ): @@ -61,7 +61,7 @@ async def logout( return {"message": "logout success"} -@router.post("/api/v1/auth/update", response_model=dict, tags=["users"]) +@router.post("/api/v1/auth/update", response_model=dict, tags=["auth"]) async def update_user(data: User, current_user: User = Depends(get_current_user)): if not current_user: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token") diff --git a/src/module/api/log.py b/src/module/api/log.py index bd406365..645f5695 100644 --- a/src/module/api/log.py +++ b/src/module/api/log.py @@ -15,6 +15,16 @@ async def get_log(): return Response("Log file not found", status_code=404) +@router.get("/api/v1/log/clear", tags=["log"]) +async def clear_log(): + if os.path.isfile(LOG_PATH): + with open(LOG_PATH, "w") as f: + f.write("") + return {"status": "ok"} + else: + return Response("Log file not found", status_code=404) + + diff --git a/src/module/api/web.py b/src/module/api/web.py index 76dcc91e..51942887 100644 --- a/src/module/api/web.py +++ b/src/module/api/web.py @@ -20,4 +20,4 @@ if VERSION != "DEV_VERSION": else: @router.get("/", status_code=302, tags=["html"]) def index(): - return RedirectResponse("/docs") \ No newline at end of file + return RedirectResponse("/docs") diff --git a/src/module/manager/eps_complete.py b/src/module/manager/eps_complete.py index 88471a78..d3e2fc5b 100644 --- a/src/module/manager/eps_complete.py +++ b/src/module/manager/eps_complete.py @@ -6,69 +6,50 @@ from module.network import RequestContent from module.downloader import DownloadClient from module.models import BangumiData from module.database import BangumiDatabase +from module.searcher import SearchTorrent from module.conf import settings logger = logging.getLogger(__name__) +SEARCH_KEY = [ + "group_name", + "title_raw", + "season_raw", + "subtitle", + "source", + "dpi", +] class FullSeasonGet(DownloadClient): - def __init__(self): - super().__init__() - self.SEARCH_KEY = [ - "group_name", - "title_raw", - "season_raw", - "subtitle", - "source", - "dpi", - ] - self.CUSTOM_URL = ( - "https://mikanani.me" - if settings.rss_parser.custom_url == "" - else settings.rss_parser.custom_url - ) - if "://" not in self.CUSTOM_URL: - if re.match(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", self.CUSTOM_URL): - self.CUSTOM_URL = f"http://{self.CUSTOM_URL}" - self.CUSTOM_URL = f"https://{self.CUSTOM_URL}" - self.save_path = settings.downloader.path - - def init_eps_complete_search_str(self, data: BangumiData): - test = [] - for key in self.SEARCH_KEY: + def init_search_str(self, data: BangumiData): + str_list = [] + for key in SEARCH_KEY: data_dict = data.dict() if data_dict[key] is not None: - test.append(data_dict[key]) - search_str_pre = "+".join(test) - search_str = re.sub(r"[\W_ ]", "+", search_str_pre) - return search_str + str_list.append(data_dict[key]) + return str_list def get_season_torrents(self, data: BangumiData): - keyword = self.init_eps_complete_search_str(data) - with RequestContent() as req: - torrents = req.get_torrents( - f"{self.CUSTOM_URL}/RSS/Search?searchstr={keyword}" - ) + keywords = self.init_search_str(data) + with SearchTorrent() as st: + torrents = st.search_torrents(keywords) return [torrent for torrent in torrents if data.title_raw in torrent.name] - def collect_season_torrents(self, data: BangumiData, torrents): - downloads = [] + def collect_season(self, data: BangumiData, torrents): + official_title = f"{data.official_title}({data.year})" if data.year else data.official_title for torrent in torrents: download_info = { "url": torrent.torrent_link, "save_path": os.path.join( - self.save_path, data.official_title, f"Season {data.season}" + settings.downloader.path, official_title, f"Season {data.season}" ), } - downloads.append(download_info) - return downloads + self.add_torrent(download_info) def download_season(self, data: BangumiData): logger.info(f"Start collecting {data.official_title} Season {data.season}...") torrents = self.get_season_torrents(data) - downloads = self.collect_season_torrents(data, torrents) - for download in downloads: - self.add_torrent(download) + self.collect_season(data, torrents) logger.info("Completed!") data.eps_collect = True @@ -87,10 +68,8 @@ class FullSeasonGet(DownloadClient): ): with RequestContent() as req: torrents = req.get_torrents(link) - downloads = self.collect_season_torrents(data, torrents) logger.info(f"Starting download {data.official_title} Season {data.season}...") - for download in downloads: - self.add_torrent(download) + self.collect_season(data, torrents) logger.info("Completed!") def add_subscribe(self, data: BangumiData): diff --git a/src/module/manager/torrent.py b/src/module/manager/torrent.py index 354794f6..3d90d291 100644 --- a/src/module/manager/torrent.py +++ b/src/module/manager/torrent.py @@ -32,7 +32,7 @@ class TorrentManager(DownloadClient): def delete_rule(self, data: BangumiData): rule_name = f"{data.official_title}({data.year})" if data.year else data.title_raw if settings.bangumi_manage.group_tag: - rule_name = f"[{data.group_name}] {rule_name}" if self.group_tag else rule_name + rule_name = f"[{data.group_name}] {rule_name}" if settings.bangumi_manage.group_tag else rule_name self.remove_rule(rule_name) def set_new_path(self, data: BangumiData): diff --git a/src/module/searcher/__init__.py b/src/module/searcher/__init__.py new file mode 100644 index 00000000..534573fe --- /dev/null +++ b/src/module/searcher/__init__.py @@ -0,0 +1 @@ +from .searcher import SearchTorrent diff --git a/src/module/searcher/plugin/__init__.py b/src/module/searcher/plugin/__init__.py new file mode 100644 index 00000000..47852a16 --- /dev/null +++ b/src/module/searcher/plugin/__init__.py @@ -0,0 +1,8 @@ +from .mikan import mikan_url + + +def search_url(site: str, keywords: list[str]): + if site == "mikan": + return mikan_url(keywords) + else: + raise NotImplementedError(f"site {site} is not supported") diff --git a/src/module/searcher/plugin/mikan.py b/src/module/searcher/plugin/mikan.py new file mode 100644 index 00000000..72e651f3 --- /dev/null +++ b/src/module/searcher/plugin/mikan.py @@ -0,0 +1,13 @@ +import re + +from module.conf import settings + + +def mikan_url(keywords: list[str]): + keyword = "+".join(keywords) + search_str = re.sub(r"[\W_ ]", "+", keyword) + url = f"{settings.rss_parser.custom_url}/RSS/Search?searchstr={'%20'.join(search_str)}" + if "://" not in url: + url = f"https://{url}" + return url + diff --git a/src/module/searcher/searcher.py b/src/module/searcher/searcher.py new file mode 100644 index 00000000..e4225c37 --- /dev/null +++ b/src/module/searcher/searcher.py @@ -0,0 +1,8 @@ +from .plugin import search_url +from module.network import RequestContent + + +class SearchTorrent(RequestContent): + def search_torrents(self, keywords: list[str], site: str = "mikan") -> list: + url = search_url(site, keywords) + return self.get_torrents(url) \ No newline at end of file