feat: 新的搜索器模块

This commit is contained in:
EstrellaXD
2023-05-19 11:39:13 +08:00
parent 9aa6b09005
commit 5928350ad1
10 changed files with 101 additions and 50 deletions

32
.github/workflows/pull-request.yml vendored Normal file
View File

@@ -0,0 +1,32 @@
name: Create Pull Request
on:
push:
tags:
- '\d+\.\d+\.\d+'
- '\d+\.\d+'
jobs:
create-pull-request:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Generate pull request body
id: pr
run: |
echo "::set-output name=body::$(git log --format='* %s' $(git describe --tags --abbrev=0)..HEAD)"
- name: Create Pull Request
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.ACCESS_TOKEN }}
commit-message: 'chore: release ${{ github.ref }}'
title: 'chore: release ${{ github.ref }}'
body: |
${{ steps.pr.outputs.body }}
branch: release/${{ github.ref }}
base: main
labels: release
draft: false
branch-suffix: timestamp
delete-branch: false

View File

@@ -31,7 +31,7 @@ async def get_token_data(token: str = Depends(oauth2_scheme)):
return payload
@router.post("/api/v1/auth/login", response_model=dict, tags=["login"])
@router.post("/api/v1/auth/login", response_model=dict, tags=["auth"])
async def login(form_data: OAuth2PasswordRequestForm = Depends()):
username = form_data.username
password = form_data.password
@@ -42,7 +42,7 @@ async def login(form_data: OAuth2PasswordRequestForm = Depends()):
return {"access_token": token, "token_type": "bearer", "expire": 86400}
@router.get("/api/v1/auth/refresh_token", response_model=dict, tags=["login"])
@router.get("/api/v1/auth/refresh_token", response_model=dict, tags=["auth"])
async def refresh(
current_user: User = Depends(get_current_user)
):
@@ -52,7 +52,7 @@ async def refresh(
return {"access_token": token, "token_type": "bearer", "expire": 86400}
@router.get("/api/v1/auth/logout", response_model=dict, tags=["login"])
@router.get("/api/v1/auth/logout", response_model=dict, tags=["auth"])
async def logout(
current_user: User = Depends(get_current_user)
):
@@ -61,7 +61,7 @@ async def logout(
return {"message": "logout success"}
@router.post("/api/v1/auth/update", response_model=dict, tags=["users"])
@router.post("/api/v1/auth/update", response_model=dict, tags=["auth"])
async def update_user(data: User, current_user: User = Depends(get_current_user)):
if not current_user:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token")

View File

@@ -15,6 +15,16 @@ async def get_log():
return Response("Log file not found", status_code=404)
@router.get("/api/v1/log/clear", tags=["log"])
async def clear_log():
if os.path.isfile(LOG_PATH):
with open(LOG_PATH, "w") as f:
f.write("")
return {"status": "ok"}
else:
return Response("Log file not found", status_code=404)

View File

@@ -20,4 +20,4 @@ if VERSION != "DEV_VERSION":
else:
@router.get("/", status_code=302, tags=["html"])
def index():
return RedirectResponse("/docs")
return RedirectResponse("/docs")

View File

@@ -6,69 +6,50 @@ from module.network import RequestContent
from module.downloader import DownloadClient
from module.models import BangumiData
from module.database import BangumiDatabase
from module.searcher import SearchTorrent
from module.conf import settings
logger = logging.getLogger(__name__)
SEARCH_KEY = [
"group_name",
"title_raw",
"season_raw",
"subtitle",
"source",
"dpi",
]
class FullSeasonGet(DownloadClient):
def __init__(self):
super().__init__()
self.SEARCH_KEY = [
"group_name",
"title_raw",
"season_raw",
"subtitle",
"source",
"dpi",
]
self.CUSTOM_URL = (
"https://mikanani.me"
if settings.rss_parser.custom_url == ""
else settings.rss_parser.custom_url
)
if "://" not in self.CUSTOM_URL:
if re.match(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", self.CUSTOM_URL):
self.CUSTOM_URL = f"http://{self.CUSTOM_URL}"
self.CUSTOM_URL = f"https://{self.CUSTOM_URL}"
self.save_path = settings.downloader.path
def init_eps_complete_search_str(self, data: BangumiData):
test = []
for key in self.SEARCH_KEY:
def init_search_str(self, data: BangumiData):
str_list = []
for key in SEARCH_KEY:
data_dict = data.dict()
if data_dict[key] is not None:
test.append(data_dict[key])
search_str_pre = "+".join(test)
search_str = re.sub(r"[\W_ ]", "+", search_str_pre)
return search_str
str_list.append(data_dict[key])
return str_list
def get_season_torrents(self, data: BangumiData):
keyword = self.init_eps_complete_search_str(data)
with RequestContent() as req:
torrents = req.get_torrents(
f"{self.CUSTOM_URL}/RSS/Search?searchstr={keyword}"
)
keywords = self.init_search_str(data)
with SearchTorrent() as st:
torrents = st.search_torrents(keywords)
return [torrent for torrent in torrents if data.title_raw in torrent.name]
def collect_season_torrents(self, data: BangumiData, torrents):
downloads = []
def collect_season(self, data: BangumiData, torrents):
official_title = f"{data.official_title}({data.year})" if data.year else data.official_title
for torrent in torrents:
download_info = {
"url": torrent.torrent_link,
"save_path": os.path.join(
self.save_path, data.official_title, f"Season {data.season}"
settings.downloader.path, official_title, f"Season {data.season}"
),
}
downloads.append(download_info)
return downloads
self.add_torrent(download_info)
def download_season(self, data: BangumiData):
logger.info(f"Start collecting {data.official_title} Season {data.season}...")
torrents = self.get_season_torrents(data)
downloads = self.collect_season_torrents(data, torrents)
for download in downloads:
self.add_torrent(download)
self.collect_season(data, torrents)
logger.info("Completed!")
data.eps_collect = True
@@ -87,10 +68,8 @@ class FullSeasonGet(DownloadClient):
):
with RequestContent() as req:
torrents = req.get_torrents(link)
downloads = self.collect_season_torrents(data, torrents)
logger.info(f"Starting download {data.official_title} Season {data.season}...")
for download in downloads:
self.add_torrent(download)
self.collect_season(data, torrents)
logger.info("Completed!")
def add_subscribe(self, data: BangumiData):

View File

@@ -32,7 +32,7 @@ class TorrentManager(DownloadClient):
def delete_rule(self, data: BangumiData):
rule_name = f"{data.official_title}({data.year})" if data.year else data.title_raw
if settings.bangumi_manage.group_tag:
rule_name = f"[{data.group_name}] {rule_name}" if self.group_tag else rule_name
rule_name = f"[{data.group_name}] {rule_name}" if settings.bangumi_manage.group_tag else rule_name
self.remove_rule(rule_name)
def set_new_path(self, data: BangumiData):

View File

@@ -0,0 +1 @@
from .searcher import SearchTorrent

View File

@@ -0,0 +1,8 @@
from .mikan import mikan_url
def search_url(site: str, keywords: list[str]):
if site == "mikan":
return mikan_url(keywords)
else:
raise NotImplementedError(f"site {site} is not supported")

View File

@@ -0,0 +1,13 @@
import re
from module.conf import settings
def mikan_url(keywords: list[str]):
keyword = "+".join(keywords)
search_str = re.sub(r"[\W_ ]", "+", keyword)
url = f"{settings.rss_parser.custom_url}/RSS/Search?searchstr={'%20'.join(search_str)}"
if "://" not in url:
url = f"https://{url}"
return url

View File

@@ -0,0 +1,8 @@
from .plugin import search_url
from module.network import RequestContent
class SearchTorrent(RequestContent):
def search_torrents(self, keywords: list[str], site: str = "mikan") -> list:
url = search_url(site, keywords)
return self.get_torrents(url)