mirror of
https://github.com/jxxghp/MoviePilot.git
synced 2026-03-20 03:57:30 +08:00
feat 拆分插件仓库
This commit is contained in:
5
.github/workflows/build.yml
vendored
5
.github/workflows/build.yml
vendored
@@ -96,6 +96,11 @@ jobs:
|
||||
New-Item -Path "nginx/temp/__keep__.txt" -ItemType File -Force
|
||||
New-Item -Path "nginx/logs" -ItemType Directory -Force
|
||||
New-Item -Path "nginx/logs/__keep__.txt" -ItemType File -Force
|
||||
Invoke-WebRequest -Uri "https://github.com/jxxghp/MoviePilot-Plugins/archive/refs/heads/main.zip" -OutFile "MoviePilot-Plugins-main.zip"
|
||||
Expand-Archive -Path "MoviePilot-Plugins-main.zip" -DestinationPath "MoviePilot-Plugins-main"
|
||||
Move-Item -Path "MoviePilot-Plugins-main/MoviePilot-Plugins-main/plugins/*" -Destination "plugins/"
|
||||
Remove-Item -Path "MoviePilot-Plugins-main.zip"
|
||||
Remove-Item -Path "MoviePilot-Plugins-main" -Recurse -Force
|
||||
shell: pwsh
|
||||
|
||||
- name: Pyinstaller
|
||||
|
||||
@@ -76,7 +76,10 @@ RUN cp -f /app/nginx.conf /etc/nginx/nginx.template.conf \
|
||||
&& locale-gen zh_CN.UTF-8 \
|
||||
&& FRONTEND_VERSION=$(curl -sL "https://api.github.com/repos/jxxghp/MoviePilot-Frontend/releases/latest" | jq -r .tag_name) \
|
||||
&& curl -sL "https://github.com/jxxghp/MoviePilot-Frontend/releases/download/${FRONTEND_VERSION}/dist.zip" | busybox unzip -d / - \
|
||||
&& mv /dist /public
|
||||
&& mv /dist /public \
|
||||
&& curl -sL "https://github.com/jxxghp/MoviePilot-Plugins/archive/refs/heads/main.zip" | busybox unzip -d / - \
|
||||
&& mv /MoviePilot-Plugins-main/plugins/* /app/app/plugins/ \
|
||||
&& rm -rf /MoviePilot-Plugins-main
|
||||
EXPOSE 3000
|
||||
VOLUME [ "/config" ]
|
||||
ENTRYPOINT [ "/entrypoint" ]
|
||||
|
||||
@@ -1,342 +0,0 @@
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app import schemas
|
||||
from app.core.config import settings
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
from app.log import logger
|
||||
from app.schemas import NotificationType
|
||||
|
||||
|
||||
class AutoBackup(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "自动备份"
|
||||
# 插件描述
|
||||
plugin_desc = "自动备份数据和配置文件。"
|
||||
# 插件图标
|
||||
plugin_icon = "backup.png"
|
||||
# 主题色
|
||||
plugin_color = "#4FB647"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "autobackup_"
|
||||
# 加载顺序
|
||||
plugin_order = 17
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
# 任务执行间隔
|
||||
_cron = None
|
||||
_cnt = None
|
||||
_onlyonce = False
|
||||
_notify = False
|
||||
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._cron = config.get("cron")
|
||||
self._cnt = config.get("cnt")
|
||||
self._notify = config.get("notify")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
|
||||
# 加载模块
|
||||
if self._enabled:
|
||||
# 定时服务
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
|
||||
if self._cron:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.__backup,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="自动备份")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
|
||||
if self._onlyonce:
|
||||
logger.info(f"自动备份服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.__backup, trigger='date',
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
name="自动备份")
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
self.update_config({
|
||||
"onlyonce": False,
|
||||
"cron": self._cron,
|
||||
"enabled": self._enabled,
|
||||
"cnt": self._cnt,
|
||||
"notify": self._notify,
|
||||
})
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def __backup(self):
|
||||
"""
|
||||
自动备份、删除备份
|
||||
"""
|
||||
logger.info(f"当前时间 {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))} 开始备份")
|
||||
|
||||
# docker用默认路径
|
||||
bk_path = self.get_data_path()
|
||||
|
||||
# 备份
|
||||
zip_file = self.backup_file(bk_path=bk_path)
|
||||
|
||||
if zip_file:
|
||||
success = True
|
||||
msg = f"备份完成 备份文件 {zip_file}"
|
||||
logger.info(msg)
|
||||
else:
|
||||
success = False
|
||||
msg = "创建备份失败"
|
||||
logger.error(msg)
|
||||
|
||||
# 清理备份
|
||||
bk_cnt = 0
|
||||
del_cnt = 0
|
||||
if self._cnt:
|
||||
# 获取指定路径下所有以"bk"开头的文件,按照创建时间从旧到新排序
|
||||
files = sorted(glob.glob(f"{bk_path}/bk**"), key=os.path.getctime)
|
||||
bk_cnt = len(files)
|
||||
# 计算需要删除的文件数
|
||||
del_cnt = bk_cnt - int(self._cnt)
|
||||
if del_cnt > 0:
|
||||
logger.info(
|
||||
f"获取到 {bk_path} 路径下备份文件数量 {bk_cnt} 保留数量 {int(self._cnt)} 需要删除备份文件数量 {del_cnt}")
|
||||
|
||||
# 遍历并删除最旧的几个备份
|
||||
for i in range(del_cnt):
|
||||
os.remove(files[i])
|
||||
logger.debug(f"删除备份文件 {files[i]} 成功")
|
||||
else:
|
||||
logger.info(
|
||||
f"获取到 {bk_path} 路径下备份文件数量 {bk_cnt} 保留数量 {int(self._cnt)} 无需删除")
|
||||
|
||||
# 发送通知
|
||||
if self._notify:
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title="【自动备份任务完成】",
|
||||
text=f"创建备份{'成功' if zip_file else '失败'}\n"
|
||||
f"清理备份数量 {del_cnt}\n"
|
||||
f"剩余备份数量 {bk_cnt - del_cnt}")
|
||||
|
||||
return success, msg
|
||||
|
||||
@staticmethod
|
||||
def backup_file(bk_path: Path = None):
|
||||
"""
|
||||
@param bk_path 自定义备份路径
|
||||
"""
|
||||
try:
|
||||
# 创建备份文件夹
|
||||
config_path = Path(settings.CONFIG_PATH)
|
||||
backup_file = f"bk_{time.strftime('%Y%m%d%H%M%S')}"
|
||||
backup_path = bk_path / backup_file
|
||||
backup_path.mkdir(parents=True)
|
||||
# 把现有的相关文件进行copy备份
|
||||
if settings.LIBRARY_CATEGORY:
|
||||
shutil.copy(f'{config_path}/category.yaml', backup_path)
|
||||
shutil.copy(f'{config_path}/user.db', backup_path)
|
||||
|
||||
zip_file = str(backup_path) + '.zip'
|
||||
if os.path.exists(zip_file):
|
||||
zip_file = str(backup_path) + '.zip'
|
||||
shutil.make_archive(str(backup_path), 'zip', str(backup_path))
|
||||
shutil.rmtree(str(backup_path))
|
||||
return zip_file
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
return [{
|
||||
"path": "/backup",
|
||||
"endpoint": self.__backup,
|
||||
"methods": ["GET"],
|
||||
"summary": "MoviePilot备份",
|
||||
"description": "MoviePilot备份",
|
||||
}]
|
||||
|
||||
def backup(self) -> schemas.Response:
|
||||
"""
|
||||
API调用备份
|
||||
"""
|
||||
success, msg = self.__backup()
|
||||
return schemas.Response(
|
||||
success=success,
|
||||
message=msg
|
||||
)
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '开启通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '备份周期'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cnt',
|
||||
'label': '最大保留备份数'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '备份文件路径默认为本地映射的config/plugins/AutoBackup。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"request_method": "POST",
|
||||
"webhook_url": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
@@ -1,591 +0,0 @@
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.chain.transfer import TransferChain
|
||||
from app.core.config import settings
|
||||
from app.core.event import eventmanager
|
||||
from app.db.downloadhistory_oper import DownloadHistoryOper
|
||||
from app.db.transferhistory_oper import TransferHistoryOper
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
from app.log import logger
|
||||
from app.schemas import NotificationType, DownloadHistory
|
||||
from app.schemas.types import EventType
|
||||
|
||||
|
||||
class AutoClean(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "定时清理媒体库"
|
||||
# 插件描述
|
||||
plugin_desc = "定时清理用户下载的种子、源文件、媒体库文件。"
|
||||
# 插件图标
|
||||
plugin_icon = "clean.png"
|
||||
# 主题色
|
||||
plugin_color = "#3377ed"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "autoclean_"
|
||||
# 加载顺序
|
||||
plugin_order = 23
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
# 任务执行间隔
|
||||
_cron = None
|
||||
_type = None
|
||||
_onlyonce = False
|
||||
_notify = False
|
||||
_cleantype = None
|
||||
_cleandate = None
|
||||
_cleanuser = None
|
||||
_downloadhis = None
|
||||
_transferhis = None
|
||||
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._cron = config.get("cron")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._notify = config.get("notify")
|
||||
self._cleantype = config.get("cleantype")
|
||||
self._cleandate = config.get("cleandate")
|
||||
self._cleanuser = config.get("cleanuser")
|
||||
|
||||
# 加载模块
|
||||
if self._enabled:
|
||||
self._downloadhis = DownloadHistoryOper()
|
||||
self._transferhis = TransferHistoryOper()
|
||||
# 定时服务
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
|
||||
if self._cron:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.__clean,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="定时清理媒体库")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
|
||||
if self._onlyonce:
|
||||
logger.info(f"定时清理媒体库服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.__clean, trigger='date',
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
name="定时清理媒体库")
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
self.update_config({
|
||||
"onlyonce": False,
|
||||
"cron": self._cron,
|
||||
"cleantype": self._cleantype,
|
||||
"cleandate": self._cleandate,
|
||||
"enabled": self._enabled,
|
||||
"cleanuser": self._cleanuser,
|
||||
"notify": self._notify,
|
||||
})
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def __get_clean_date(self, deltatime: str = None):
|
||||
# 清理日期
|
||||
current_time = datetime.now()
|
||||
if deltatime:
|
||||
days_ago = current_time - timedelta(days=int(deltatime))
|
||||
else:
|
||||
days_ago = current_time - timedelta(days=int(self._cleandate))
|
||||
return days_ago.strftime("%Y-%m-%d")
|
||||
|
||||
def __clean(self):
|
||||
"""
|
||||
定时清理媒体库
|
||||
"""
|
||||
if not self._cleandate:
|
||||
logger.error("未配置媒体库全局清理时间,停止运行")
|
||||
return
|
||||
|
||||
# 查询用户清理日期之前的下载历史,不填默认清理全部用户的下载
|
||||
if not self._cleanuser:
|
||||
clean_date = self.__get_clean_date()
|
||||
downloadhis_list = self._downloadhis.list_by_user_date(date=clean_date)
|
||||
logger.info(f'获取到日期 {clean_date} 之前的下载历史 {len(downloadhis_list)} 条')
|
||||
self.__clean_history(date=clean_date, clean_type=self._cleantype, downloadhis_list=downloadhis_list)
|
||||
|
||||
# 根据填写的信息判断怎么清理
|
||||
else:
|
||||
# username:days#cleantype
|
||||
clean_type = self._cleantype
|
||||
clean_date = self._cleandate
|
||||
|
||||
# 1.3.7版本及之前处理多位用户
|
||||
if str(self._cleanuser).count(','):
|
||||
for username in str(self._cleanuser).split(","):
|
||||
downloadhis_list = self._downloadhis.list_by_user_date(date=clean_date,
|
||||
username=username)
|
||||
logger.info(
|
||||
f'获取到用户 {username} 日期 {clean_date} 之前的下载历史 {len(downloadhis_list)} 条')
|
||||
self.__clean_history(date=clean_date, clean_type=self._cleantype, downloadhis_list=downloadhis_list)
|
||||
return
|
||||
|
||||
for userinfo in str(self._cleanuser).split("\n"):
|
||||
if userinfo.count('#'):
|
||||
clean_type = userinfo.split('#')[1]
|
||||
username_and_days = userinfo.split('#')[0]
|
||||
else:
|
||||
username_and_days = userinfo
|
||||
if username_and_days.count(':'):
|
||||
clean_date = username_and_days.split(':')[1]
|
||||
username = username_and_days.split(':')[0]
|
||||
else:
|
||||
username = userinfo
|
||||
|
||||
# 转strftime
|
||||
clean_date = self.__get_clean_date(clean_date)
|
||||
logger.info(f'{username} 使用 {clean_type} 清理方式,清理 {clean_date} 之前的下载历史')
|
||||
downloadhis_list = self._downloadhis.list_by_user_date(date=clean_date,
|
||||
username=username)
|
||||
logger.info(
|
||||
f'获取到用户 {username} 日期 {clean_date} 之前的下载历史 {len(downloadhis_list)} 条')
|
||||
self.__clean_history(date=clean_date, clean_type=clean_type,
|
||||
downloadhis_list=downloadhis_list)
|
||||
|
||||
def __clean_history(self, date: str, clean_type: str, downloadhis_list: List[DownloadHistory]):
|
||||
"""
|
||||
清理下载历史、转移记录
|
||||
"""
|
||||
if not downloadhis_list:
|
||||
logger.warn(f"未获取到日期 {date} 之前的下载记录,停止运行")
|
||||
return
|
||||
|
||||
# 读取历史记录
|
||||
pulgin_history = self.get_data('history') or []
|
||||
|
||||
# 创建一个字典来保存分组结果
|
||||
downloadhis_grouped_dict: Dict[tuple, List[DownloadHistory]] = defaultdict(list)
|
||||
# 遍历DownloadHistory对象列表
|
||||
for downloadhis in downloadhis_list:
|
||||
# 获取type和tmdbid的值
|
||||
dtype = downloadhis.type
|
||||
tmdbid = downloadhis.tmdbid
|
||||
|
||||
# 将DownloadHistory对象添加到对应分组的列表中
|
||||
downloadhis_grouped_dict[(dtype, tmdbid)].append(downloadhis)
|
||||
|
||||
# 输出分组结果
|
||||
for key, downloadhis_list in downloadhis_grouped_dict.items():
|
||||
logger.info(f"开始清理 {key}")
|
||||
del_transferhis_cnt = 0
|
||||
del_media_name = downloadhis_list[0].title
|
||||
del_media_user = downloadhis_list[0].username
|
||||
del_media_type = downloadhis_list[0].type
|
||||
del_media_year = downloadhis_list[0].year
|
||||
del_media_season = downloadhis_list[0].seasons
|
||||
del_media_episode = downloadhis_list[0].episodes
|
||||
del_image = downloadhis_list[0].image
|
||||
for downloadhis in downloadhis_list:
|
||||
if not downloadhis.download_hash:
|
||||
logger.debug(f'下载历史 {downloadhis.id} {downloadhis.title} 未获取到download_hash,跳过处理')
|
||||
continue
|
||||
# 根据hash获取转移记录
|
||||
transferhis_list = self._transferhis.list_by_hash(download_hash=downloadhis.download_hash)
|
||||
if not transferhis_list:
|
||||
logger.warn(f"下载历史 {downloadhis.download_hash} 未查询到转移记录,跳过处理")
|
||||
continue
|
||||
|
||||
for history in transferhis_list:
|
||||
# 册除媒体库文件
|
||||
if clean_type in ["dest", "all"]:
|
||||
TransferChain().delete_files(Path(history.dest))
|
||||
# 删除记录
|
||||
self._transferhis.delete(history.id)
|
||||
# 删除源文件
|
||||
if clean_type in ["src", "all"]:
|
||||
TransferChain().delete_files(Path(history.src))
|
||||
# 发送事件
|
||||
eventmanager.send_event(
|
||||
EventType.DownloadFileDeleted,
|
||||
{
|
||||
"src": history.src
|
||||
}
|
||||
)
|
||||
|
||||
# 累加删除数量
|
||||
del_transferhis_cnt += len(transferhis_list)
|
||||
|
||||
if del_transferhis_cnt:
|
||||
# 发送消息
|
||||
if self._notify:
|
||||
self.post_message(
|
||||
mtype=NotificationType.MediaServer,
|
||||
title="【定时清理媒体库任务完成】",
|
||||
text=f"清理媒体名称 {del_media_name}\n"
|
||||
f"下载媒体用户 {del_media_user}\n"
|
||||
f"删除历史记录 {del_transferhis_cnt}")
|
||||
|
||||
pulgin_history.append({
|
||||
"type": del_media_type,
|
||||
"title": del_media_name,
|
||||
"year": del_media_year,
|
||||
"season": del_media_season,
|
||||
"episode": del_media_episode,
|
||||
"image": del_image,
|
||||
"del_time": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
|
||||
})
|
||||
|
||||
# 保存历史
|
||||
self.save_data("history", pulgin_history)
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '开启通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '0 0 ? ? ?'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'cleantype',
|
||||
'label': '全局清理方式',
|
||||
'items': [
|
||||
{'title': '媒体库文件', 'value': 'dest'},
|
||||
{'title': '源文件', 'value': 'src'},
|
||||
{'title': '所有文件', 'value': 'all'},
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cleandate',
|
||||
'label': '全局清理日期',
|
||||
'placeholder': '清理多少天之前的下载记录(天)'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'cleanuser',
|
||||
'label': '清理配置',
|
||||
'rows': 6,
|
||||
'placeholder': '每一行一个配置,支持以下几种配置方式,清理方式支持 src、desc、all 分别对应源文件,媒体库文件,所有文件\n'
|
||||
'用户名缺省默认清理所有用户(慎重留空),清理天数缺省默认使用全局清理天数,清理方式缺省默认使用全局清理方式\n'
|
||||
'用户名/插件名(豆瓣想看、豆瓣榜单、RSS订阅)\n'
|
||||
'用户名#清理方式\n'
|
||||
'用户名:清理天数\n'
|
||||
'用户名:清理天数#清理方式',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"onlyonce": False,
|
||||
"notify": False,
|
||||
"cleantype": "dest",
|
||||
"cron": "",
|
||||
"cleanuser": "",
|
||||
"cleandate": 30
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
"""
|
||||
拼装插件详情页面,需要返回页面配置,同时附带数据
|
||||
"""
|
||||
# 查询同步详情
|
||||
historys = self.get_data('history')
|
||||
if not historys:
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'text': '暂无数据',
|
||||
'props': {
|
||||
'class': 'text-center',
|
||||
}
|
||||
}
|
||||
]
|
||||
# 数据按时间降序排序
|
||||
historys = sorted(historys, key=lambda x: x.get('del_time'), reverse=True)
|
||||
# 拼装页面
|
||||
contents = []
|
||||
for history in historys:
|
||||
htype = history.get("type")
|
||||
title = history.get("title")
|
||||
year = history.get("year")
|
||||
season = history.get("season")
|
||||
episode = history.get("episode")
|
||||
image = history.get("image")
|
||||
del_time = history.get("del_time")
|
||||
|
||||
if season:
|
||||
sub_contents = [
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'类型:{htype}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'标题:{title}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'年份:{year}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'季:{season}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'集:{episode}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'时间:{del_time}'
|
||||
}
|
||||
]
|
||||
else:
|
||||
sub_contents = [
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'类型:{htype}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'标题:{title}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'年份:{year}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'时间:{del_time}'
|
||||
}
|
||||
]
|
||||
|
||||
contents.append(
|
||||
{
|
||||
'component': 'VCard',
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'd-flex justify-space-start flex-nowrap flex-row',
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VImg',
|
||||
'props': {
|
||||
'src': image,
|
||||
'height': 120,
|
||||
'width': 80,
|
||||
'aspect-ratio': '2/3',
|
||||
'class': 'object-cover shadow ring-gray-500',
|
||||
'cover': True
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'div',
|
||||
'content': sub_contents
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'grid gap-3 grid-info-card',
|
||||
},
|
||||
'content': contents
|
||||
}
|
||||
]
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,147 +0,0 @@
|
||||
import random
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class Pt52(_ISiteSigninHandler):
|
||||
"""
|
||||
52pt
|
||||
如果填写openai key则调用chatgpt获取答案
|
||||
否则随机
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "52pt.site"
|
||||
|
||||
# 已签到
|
||||
_sign_regex = ['今天已经签过到了']
|
||||
|
||||
# 签到成功,待补充
|
||||
_success_regex = ['\\d+点魔力值']
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: dict) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
render = site_info.get("render")
|
||||
proxy = site_info.get("proxy")
|
||||
|
||||
# 判断今日是否已签到
|
||||
html_text = self.get_page_source(url='https://52pt.site/bakatest.php',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
sign_status = self.sign_in_result(html_res=html_text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 没有签到则解析html
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
if not html:
|
||||
return False, '签到失败'
|
||||
|
||||
# 获取页面问题、答案
|
||||
questionid = html.xpath("//input[@name='questionid']/@value")[0]
|
||||
option_ids = html.xpath("//input[@name='choice[]']/@value")
|
||||
question_str = html.xpath("//td[@class='text' and contains(text(),'请问:')]/text()")[0]
|
||||
|
||||
# 正则获取问题
|
||||
match = re.search(r'请问:(.+)', question_str)
|
||||
if match:
|
||||
question_str = match.group(1)
|
||||
logger.debug(f"获取到签到问题 {question_str}")
|
||||
else:
|
||||
logger.error(f"未获取到签到问题")
|
||||
return False, f"【{site}】签到失败,未获取到签到问题"
|
||||
|
||||
# 正确答案,默认随机,如果gpt返回则用gpt返回的答案提交
|
||||
choice = [option_ids[random.randint(0, len(option_ids) - 1)]]
|
||||
|
||||
# 签到
|
||||
return self.__signin(questionid=questionid,
|
||||
choice=choice,
|
||||
site_cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
site=site)
|
||||
|
||||
def __signin(self, questionid: str,
|
||||
choice: list,
|
||||
site: str,
|
||||
site_cookie: str,
|
||||
ua: str,
|
||||
proxy: bool) -> Tuple[bool, str]:
|
||||
"""
|
||||
签到请求
|
||||
questionid: 450
|
||||
choice[]: 8
|
||||
choice[]: 4
|
||||
usercomment: 此刻心情:无
|
||||
submit: 提交
|
||||
多选会有多个choice[]....
|
||||
"""
|
||||
data = {
|
||||
'questionid': questionid,
|
||||
'choice[]': choice[0] if len(choice) == 1 else choice,
|
||||
'usercomment': '太难了!',
|
||||
'wantskip': '不会'
|
||||
}
|
||||
logger.debug(f"签到请求参数 {data}")
|
||||
|
||||
sign_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url='https://52pt.site/bakatest.php', data=data)
|
||||
if not sign_res or sign_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,签到接口请求失败")
|
||||
return False, '签到失败,签到接口请求失败'
|
||||
|
||||
# 判断是否签到成功
|
||||
sign_status = self.sign_in_result(html_res=sign_res.text,
|
||||
regexs=self._success_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
else:
|
||||
sign_status = self.sign_in_result(html_res=sign_res.text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
logger.error(f"{site} 签到失败,请到页面查看")
|
||||
return False, '签到失败,请到页面查看'
|
||||
@@ -1,88 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from typing import Tuple
|
||||
|
||||
import chardet
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.helper.browser import PlaywrightHelper
|
||||
from app.log import logger
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class _ISiteSigninHandler(metaclass=ABCMeta):
|
||||
"""
|
||||
实现站点签到的基类,所有站点签到类都需要继承此类,并实现match和signin方法
|
||||
实现类放置到sitesignin目录下将会自动加载
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = ""
|
||||
|
||||
@abstractmethod
|
||||
def match(self, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, self.site_url) else False
|
||||
|
||||
@abstractmethod
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: True|False,签到结果信息
|
||||
"""
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_page_source(url: str, cookie: str, ua: str, proxy: bool, render: bool) -> str:
|
||||
"""
|
||||
获取页面源码
|
||||
:param url: Url地址
|
||||
:param cookie: Cookie
|
||||
:param ua: UA
|
||||
:param proxy: 是否使用代理
|
||||
:param render: 是否渲染
|
||||
:return: 页面源码,错误信息
|
||||
"""
|
||||
if render:
|
||||
return PlaywrightHelper().get_page_source(url=url,
|
||||
cookies=cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY_SERVER if proxy else None)
|
||||
else:
|
||||
res = RequestUtils(cookies=cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).get_res(url=url)
|
||||
if res is not None:
|
||||
# 使用chardet检测字符编码
|
||||
raw_data = res.content
|
||||
if raw_data:
|
||||
try:
|
||||
result = chardet.detect(raw_data)
|
||||
encoding = result['encoding']
|
||||
# 解码为字符串
|
||||
return raw_data.decode(encoding)
|
||||
except Exception as e:
|
||||
logger.error(f"chardet解码失败:{str(e)}")
|
||||
return res.text
|
||||
else:
|
||||
return res.text
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def sign_in_result(html_res: str, regexs: list) -> bool:
|
||||
"""
|
||||
判断是否签到成功
|
||||
"""
|
||||
html_text = re.sub(r"#\d+", "", re.sub(r"\d+px", "", html_res))
|
||||
for regex in regexs:
|
||||
if re.search(str(regex), html_text):
|
||||
return True
|
||||
return False
|
||||
@@ -1,75 +0,0 @@
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class BTSchool(_ISiteSigninHandler):
|
||||
"""
|
||||
学校签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "pt.btschool.club"
|
||||
|
||||
# 已签到
|
||||
_sign_text = '每日签到'
|
||||
|
||||
@classmethod
|
||||
def match(cls, url) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
render = site_info.get("render")
|
||||
proxy = site_info.get("proxy")
|
||||
|
||||
logger.info(f"{site} 开始签到")
|
||||
# 判断今日是否已签到
|
||||
html_text = self.get_page_source(url='https://pt.btschool.club',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
# 已签到
|
||||
if self._sign_text not in html_text:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
html_text = self.get_page_source(url='https://pt.btschool.club/index.php?action=addbonus',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,签到接口请求失败")
|
||||
return False, '签到失败,签到接口请求失败'
|
||||
|
||||
# 签到成功
|
||||
if self._sign_text not in html_text:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
@@ -1,148 +0,0 @@
|
||||
import random
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
from lxml import etree
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class CHDBits(_ISiteSigninHandler):
|
||||
"""
|
||||
彩虹岛签到
|
||||
如果填写openai key则调用chatgpt获取答案
|
||||
否则随机
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "ptchdbits.co"
|
||||
|
||||
# 已签到
|
||||
_sign_regex = ['今天已经签过到了']
|
||||
|
||||
# 签到成功,待补充
|
||||
_success_regex = ['\\d+点魔力值']
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 判断今日是否已签到
|
||||
html_text = self.get_page_source(url='https://ptchdbits.co/bakatest.php',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
sign_status = self.sign_in_result(html_res=html_text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 没有签到则解析html
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
if not html:
|
||||
return False, '签到失败'
|
||||
|
||||
# 获取页面问题、答案
|
||||
questionid = html.xpath("//input[@name='questionid']/@value")[0]
|
||||
option_ids = html.xpath("//input[@name='choice[]']/@value")
|
||||
question_str = html.xpath("//td[@class='text' and contains(text(),'请问:')]/text()")[0]
|
||||
|
||||
# 正则获取问题
|
||||
match = re.search(r'请问:(.+)', question_str)
|
||||
if match:
|
||||
question_str = match.group(1)
|
||||
logger.debug(f"获取到签到问题 {question_str}")
|
||||
else:
|
||||
logger.error(f"未获取到签到问题")
|
||||
return False, f"【{site}】签到失败,未获取到签到问题"
|
||||
|
||||
# 正确答案,默认随机,如果gpt返回则用gpt返回的答案提交
|
||||
choice = [option_ids[random.randint(0, len(option_ids) - 1)]]
|
||||
|
||||
# 签到
|
||||
return self.__signin(questionid=questionid,
|
||||
choice=choice,
|
||||
site_cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
site=site)
|
||||
|
||||
def __signin(self, questionid: str,
|
||||
choice: list,
|
||||
site: str,
|
||||
site_cookie: str,
|
||||
ua: str,
|
||||
proxy: bool) -> Tuple[bool, str]:
|
||||
"""
|
||||
签到请求
|
||||
questionid: 450
|
||||
choice[]: 8
|
||||
choice[]: 4
|
||||
usercomment: 此刻心情:无
|
||||
submit: 提交
|
||||
多选会有多个choice[]....
|
||||
"""
|
||||
data = {
|
||||
'questionid': questionid,
|
||||
'choice[]': choice[0] if len(choice) == 1 else choice,
|
||||
'usercomment': '太难了!',
|
||||
'wantskip': '不会'
|
||||
}
|
||||
logger.debug(f"签到请求参数 {data}")
|
||||
|
||||
sign_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url='https://ptchdbits.co/bakatest.php', data=data)
|
||||
if not sign_res or sign_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,签到接口请求失败")
|
||||
return False, '签到失败,签到接口请求失败'
|
||||
|
||||
# 判断是否签到成功
|
||||
sign_status = self.sign_in_result(html_res=sign_res.text,
|
||||
regexs=self._success_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
else:
|
||||
sign_status = self.sign_in_result(html_res=sign_res.text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
logger.error(f"{site} 签到失败,请到页面查看")
|
||||
return False, '签到失败,请到页面查看'
|
||||
@@ -1,62 +0,0 @@
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class HaiDan(_ISiteSigninHandler):
|
||||
"""
|
||||
海胆签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "haidan.video"
|
||||
|
||||
# 签到成功
|
||||
_succeed_regex = ['(?<=value=")已经打卡(?=")']
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 签到
|
||||
html_text = self.get_page_source(url='https://www.haidan.video/signin.php',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
sign_status = self.sign_in_result(html_res=html_text,
|
||||
regexs=self._succeed_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
|
||||
logger.error(f"{site} 签到失败,签到接口返回 {html_text}")
|
||||
return False, '签到失败'
|
||||
@@ -1,83 +0,0 @@
|
||||
import json
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class Hares(_ISiteSigninHandler):
|
||||
"""
|
||||
白兔签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "club.hares.top"
|
||||
|
||||
# 已签到
|
||||
_sign_text = '已签到'
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 获取页面html
|
||||
html_text = self.get_page_source(url='https://club.hares.top',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
|
||||
if not html_text:
|
||||
logger.error(f"{site} 模拟访问失败,请检查站点连通性")
|
||||
return False, '模拟访问失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 模拟访问失败,Cookie已失效")
|
||||
return False, '模拟访问失败,Cookie已失效'
|
||||
|
||||
# if self._sign_text in html_res.text:
|
||||
# logger.info(f"今日已签到")
|
||||
# return True, '今日已签到'
|
||||
|
||||
headers = {
|
||||
'Accept': 'application/json',
|
||||
"User-Agent": ua
|
||||
}
|
||||
sign_res = RequestUtils(cookies=site_cookie,
|
||||
headers=headers,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).get_res(url="https://club.hares.top/attendance.php?action=sign")
|
||||
if not sign_res or sign_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,签到接口请求失败")
|
||||
return False, '签到失败,签到接口请求失败'
|
||||
|
||||
# {"code":1,"msg":"您今天已经签到过了"}
|
||||
# {"code":0,"msg":"签到成功"}
|
||||
sign_dict = json.loads(sign_res.text)
|
||||
if sign_dict['code'] == 0:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
else:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
@@ -1,81 +0,0 @@
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class HD4fans(_ISiteSigninHandler):
|
||||
"""
|
||||
兽签到
|
||||
"""
|
||||
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "pt.hd4fans.org"
|
||||
|
||||
# 签到成功
|
||||
_repeat_text = '<span id="checkedin">[签到成功]</span>'
|
||||
_success_text = "签到成功"
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 获取页面html
|
||||
html_text = self.get_page_source(url='https://pt.hd4fans.org/index.php',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
# 判断是否已签到
|
||||
if self._repeat_text in html_text:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 签到
|
||||
data = {
|
||||
'action': 'checkin'
|
||||
}
|
||||
sign_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url="https://pt.hd4fans.org/checkin.php", data=data)
|
||||
if not sign_res or sign_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
# sign_res.text=本次签到魔力
|
||||
if sign_res.text and sign_res.text.isdigit():
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
|
||||
logger.error(f"{site} 签到失败,签到接口返回 {sign_res.text}")
|
||||
return False, '签到失败'
|
||||
@@ -1,69 +0,0 @@
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class HDArea(_ISiteSigninHandler):
|
||||
"""
|
||||
好大签到
|
||||
"""
|
||||
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "hdarea.club"
|
||||
|
||||
# 签到成功
|
||||
_success_text = "此次签到您获得"
|
||||
_repeat_text = "请不要重复签到哦"
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxies = settings.PROXY if site_info.get("proxy") else None
|
||||
|
||||
# 获取页面html
|
||||
data = {
|
||||
'action': 'sign_in'
|
||||
}
|
||||
html_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=proxies
|
||||
).post_res(url="https://www.hdarea.club/sign_in.php", data=data)
|
||||
if not html_res or html_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_res.text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
# 判断是否已签到
|
||||
# '已连续签到278天,此次签到您获得了100魔力值奖励!'
|
||||
if self._success_text in html_res.text:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
if self._repeat_text in html_res.text:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
logger.error(f"{site} 签到失败,签到接口返回 {html_res.text}")
|
||||
return False, '签到失败'
|
||||
@@ -1,117 +0,0 @@
|
||||
import json
|
||||
from typing import Tuple
|
||||
|
||||
from lxml import etree
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class HDChina(_ISiteSigninHandler):
|
||||
"""
|
||||
瓷器签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "hdchina.org"
|
||||
|
||||
# 已签到
|
||||
_sign_regex = ['<a class="label label-default" href="#">已签到</a>']
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxies = settings.PROXY if site_info.get("proxy") else None
|
||||
|
||||
# 尝试解决瓷器cookie每天签到后过期,只保留hdchina=部分
|
||||
cookie = ""
|
||||
# 按照分号进行字符串拆分
|
||||
sub_strs = site_cookie.split(";")
|
||||
# 遍历每个子字符串
|
||||
for sub_str in sub_strs:
|
||||
if "hdchina=" in sub_str:
|
||||
# 如果子字符串包含"hdchina=",则保留该子字符串
|
||||
cookie += sub_str + ";"
|
||||
|
||||
if "hdchina=" not in cookie:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
site_cookie = cookie
|
||||
# 获取页面html
|
||||
html_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=proxies
|
||||
).get_res(url="https://hdchina.org/index.php")
|
||||
if not html_res or html_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_res.text or "阻断页面" in html_res.text:
|
||||
logger.error(f"{site} 签到失败,Cookie失效")
|
||||
return False, '签到失败,Cookie失效'
|
||||
|
||||
# 获取新返回的cookie进行签到
|
||||
site_cookie = ';'.join(['{}={}'.format(k, v) for k, v in html_res.cookies.get_dict().items()])
|
||||
|
||||
# 判断是否已签到
|
||||
html_res.encoding = "utf-8"
|
||||
sign_status = self.sign_in_result(html_res=html_res.text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 没有签到则解析html
|
||||
html = etree.HTML(html_res.text)
|
||||
|
||||
if not html:
|
||||
return False, '签到失败'
|
||||
|
||||
# x_csrf
|
||||
x_csrf = html.xpath("//meta[@name='x-csrf']/@content")[0]
|
||||
if not x_csrf:
|
||||
logger.error("{site} 签到失败,获取x-csrf失败")
|
||||
return False, '签到失败'
|
||||
logger.debug(f"获取到x-csrf {x_csrf}")
|
||||
|
||||
# 签到
|
||||
data = {
|
||||
'csrf': x_csrf
|
||||
}
|
||||
sign_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=proxies
|
||||
).post_res(url="https://hdchina.org/plugin_sign-in.php?cmd=signin", data=data)
|
||||
if not sign_res or sign_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,签到接口请求失败")
|
||||
return False, '签到失败,签到接口请求失败'
|
||||
|
||||
sign_dict = json.loads(sign_res.text)
|
||||
logger.debug(f"签到返回结果 {sign_dict}")
|
||||
if sign_dict['state']:
|
||||
# {'state': 'success', 'signindays': 10, 'integral': 20}
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
else:
|
||||
# {'state': False, 'msg': '不正确的CSRF / Incorrect CSRF token'}
|
||||
logger.error(f"{site} 签到失败,不正确的CSRF / Incorrect CSRF token")
|
||||
return False, '签到失败'
|
||||
@@ -1,66 +0,0 @@
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class HDCity(_ISiteSigninHandler):
|
||||
"""
|
||||
城市签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "hdcity.city"
|
||||
|
||||
# 签到成功
|
||||
_success_text = '本次签到获得魅力'
|
||||
# 重复签到
|
||||
_repeat_text = '已签到'
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 获取页面html
|
||||
html_text = self.get_page_source(url='https://hdcity.city/sign',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
# 判断是否已签到
|
||||
# '已连续签到278天,此次签到您获得了100魔力值奖励!'
|
||||
if self._success_text in html_text:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
if self._repeat_text in html_text:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
logger.error(f"{site} 签到失败,签到接口返回 {html_text}")
|
||||
return False, '签到失败'
|
||||
@@ -1,133 +0,0 @@
|
||||
import json
|
||||
import time
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.helper.ocr import OcrHelper
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class HDSky(_ISiteSigninHandler):
|
||||
"""
|
||||
天空ocr签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "hdsky.me"
|
||||
|
||||
# 已签到
|
||||
_sign_regex = ['已签到']
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 判断今日是否已签到
|
||||
html_text = self.get_page_source(url='https://hdsky.me',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
sign_status = self.sign_in_result(html_res=html_text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 获取验证码请求,考虑到网络问题获取失败,多获取几次试试
|
||||
res_times = 0
|
||||
img_hash = None
|
||||
while not img_hash and res_times <= 3:
|
||||
image_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url='https://hdsky.me/image_code_ajax.php',
|
||||
data={'action': 'new'})
|
||||
if image_res and image_res.status_code == 200:
|
||||
image_json = json.loads(image_res.text)
|
||||
if image_json["success"]:
|
||||
img_hash = image_json["code"]
|
||||
break
|
||||
res_times += 1
|
||||
logger.debug(f"获取{site}验证码失败,正在进行重试,目前重试次数 {res_times}")
|
||||
time.sleep(1)
|
||||
|
||||
# 获取到二维码hash
|
||||
if img_hash:
|
||||
# 完整验证码url
|
||||
img_get_url = 'https://hdsky.me/image.php?action=regimage&imagehash=%s' % img_hash
|
||||
logger.debug(f"获取到{site}验证码链接 {img_get_url}")
|
||||
# ocr识别多次,获取6位验证码
|
||||
times = 0
|
||||
ocr_result = None
|
||||
# 识别几次
|
||||
while times <= 3:
|
||||
# ocr二维码识别
|
||||
ocr_result = OcrHelper().get_captcha_text(image_url=img_get_url,
|
||||
cookie=site_cookie,
|
||||
ua=ua)
|
||||
logger.debug(f"ocr识别{site}验证码 {ocr_result}")
|
||||
if ocr_result:
|
||||
if len(ocr_result) == 6:
|
||||
logger.info(f"ocr识别{site}验证码成功 {ocr_result}")
|
||||
break
|
||||
times += 1
|
||||
logger.debug(f"ocr识别{site}验证码失败,正在进行重试,目前重试次数 {times}")
|
||||
time.sleep(1)
|
||||
|
||||
if ocr_result:
|
||||
# 组装请求参数
|
||||
data = {
|
||||
'action': 'showup',
|
||||
'imagehash': img_hash,
|
||||
'imagestring': ocr_result
|
||||
}
|
||||
# 访问签到链接
|
||||
res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url='https://hdsky.me/showup.php', data=data)
|
||||
if res and res.status_code == 200:
|
||||
if json.loads(res.text)["success"]:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
elif str(json.loads(res.text)["message"]) == "date_unmatch":
|
||||
# 重复签到
|
||||
logger.warn(f"{site} 重复成功")
|
||||
return True, '今日已签到'
|
||||
elif str(json.loads(res.text)["message"]) == "invalid_imagehash":
|
||||
# 验证码错误
|
||||
logger.warn(f"{site} 签到失败:验证码错误")
|
||||
return False, '签到失败:验证码错误'
|
||||
|
||||
logger.error(f'{site} 签到失败:未获取到验证码')
|
||||
return False, '签到失败:未获取到验证码'
|
||||
@@ -1,82 +0,0 @@
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class HDUpt(_ISiteSigninHandler):
|
||||
"""
|
||||
hdu签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "pt.hdupt.com"
|
||||
|
||||
# 已签到
|
||||
_sign_regex = ['<span id="yiqiandao">']
|
||||
|
||||
# 签到成功
|
||||
_success_text = '本次签到获得魅力'
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 获取页面html
|
||||
html_text = self.get_page_source(url='https://pt.hdupt.com',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
sign_status = self.sign_in_result(html_res=html_text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 签到
|
||||
html_text = self.get_page_source(url='https://pt.hdupt.com/added.php?action=qiandao',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
logger.debug(f"{site} 签到接口返回 {html_text}")
|
||||
# 判断是否已签到 sign_res.text = ".23"
|
||||
if len(list(map(int, re.findall(r"\d+", html_text)))) > 0:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
|
||||
logger.error(f"{site} 签到失败,签到接口返回 {html_text}")
|
||||
return False, '签到失败'
|
||||
@@ -1,132 +0,0 @@
|
||||
import json
|
||||
import time
|
||||
from typing import Tuple
|
||||
|
||||
from lxml import etree
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.helper.ocr import OcrHelper
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class Opencd(_ISiteSigninHandler):
|
||||
"""
|
||||
皇后ocr签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "open.cd"
|
||||
|
||||
# 已签到
|
||||
_repeat_text = "/plugin_sign-in.php?cmd=show-log"
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 判断今日是否已签到
|
||||
html_text = self.get_page_source(url='https://www.open.cd',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
if self._repeat_text in html_text:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 获取签到参数
|
||||
html_text = self.get_page_source(url='https://www.open.cd/plugin_sign-in.php',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
# 没有签到则解析html
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return False, '签到失败'
|
||||
|
||||
# 签到参数
|
||||
img_url = html.xpath('//form[@id="frmSignin"]//img/@src')[0]
|
||||
img_hash = html.xpath('//form[@id="frmSignin"]//input[@name="imagehash"]/@value')[0]
|
||||
if not img_url or not img_hash:
|
||||
logger.error(f"{site} 签到失败,获取签到参数失败")
|
||||
return False, '签到失败,获取签到参数失败'
|
||||
|
||||
# 完整验证码url
|
||||
img_get_url = 'https://www.open.cd/%s' % img_url
|
||||
logger.debug(f"{site} 获取到{site}验证码链接 {img_get_url}")
|
||||
|
||||
# ocr识别多次,获取6位验证码
|
||||
times = 0
|
||||
ocr_result = None
|
||||
# 识别几次
|
||||
while times <= 3:
|
||||
# ocr二维码识别
|
||||
ocr_result = OcrHelper().get_captcha_text(image_url=img_get_url,
|
||||
cookie=site_cookie,
|
||||
ua=ua)
|
||||
logger.debug(f"ocr识别{site}验证码 {ocr_result}")
|
||||
if ocr_result:
|
||||
if len(ocr_result) == 6:
|
||||
logger.info(f"ocr识别{site}验证码成功 {ocr_result}")
|
||||
break
|
||||
times += 1
|
||||
logger.debug(f"ocr识别{site}验证码失败,正在进行重试,目前重试次数 {times}")
|
||||
time.sleep(1)
|
||||
|
||||
if ocr_result:
|
||||
# 组装请求参数
|
||||
data = {
|
||||
'imagehash': img_hash,
|
||||
'imagestring': ocr_result
|
||||
}
|
||||
# 访问签到链接
|
||||
sign_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url='https://www.open.cd/plugin_sign-in.php?cmd=signin', data=data)
|
||||
if sign_res and sign_res.status_code == 200:
|
||||
logger.debug(f"sign_res返回 {sign_res.text}")
|
||||
# sign_res.text = '{"state":"success","signindays":"0","integral":"10"}'
|
||||
sign_dict = json.loads(sign_res.text)
|
||||
if sign_dict['state']:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
else:
|
||||
logger.error(f"{site} 签到失败,签到接口返回 {sign_dict}")
|
||||
return False, '签到失败'
|
||||
|
||||
logger.error(f'{site} 签到失败:未获取到验证码')
|
||||
return False, '签到失败:未获取到验证码'
|
||||
@@ -1,65 +0,0 @@
|
||||
import json
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class PTerClub(_ISiteSigninHandler):
|
||||
"""
|
||||
猫签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "pterclub.com"
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 签到
|
||||
html_text = self.get_page_source(url='https://pterclub.com/attendance-ajax.php',
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
try:
|
||||
sign_dict = json.loads(html_text)
|
||||
except Exception as e:
|
||||
logger.error(f"{site} 签到失败,签到接口返回数据异常,错误信息:{str(e)}")
|
||||
return False, '签到失败,签到接口返回数据异常'
|
||||
if sign_dict['status'] == '1':
|
||||
# {"status":"1","data":" (签到已成功300)","message":"<p>这是您的第<b>237</b>次签到,
|
||||
# 已连续签到<b>237</b>天。</p><p>本次签到获得<b>300</b>克猫粮。</p>"}
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
else:
|
||||
# {"status":"0","data":"抱歉","message":"您今天已经签到过了,请勿重复刷新。"}
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
@@ -1,274 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from io import BytesIO
|
||||
from typing import Tuple
|
||||
|
||||
from PIL import Image
|
||||
from lxml import etree
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class Tjupt(_ISiteSigninHandler):
|
||||
"""
|
||||
北洋签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "tjupt.org"
|
||||
|
||||
# 签到地址
|
||||
_sign_in_url = 'https://www.tjupt.org/attendance.php'
|
||||
|
||||
# 已签到
|
||||
_sign_regex = ['<a href="attendance.php">今日已签到</a>']
|
||||
|
||||
# 签到成功
|
||||
_succeed_regex = ['这是您的首次签到,本次签到获得\\d+个魔力值。',
|
||||
'签到成功,这是您的第\\d+次签到,已连续签到\\d+天,本次签到获得\\d+个魔力值。',
|
||||
'重新签到成功,本次签到获得\\d+个魔力值']
|
||||
|
||||
# 存储正确的答案,后续可直接查
|
||||
_answer_path = settings.TEMP_PATH / "signin/"
|
||||
_answer_file = _answer_path / "tjupt.json"
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 创建正确答案存储目录
|
||||
if not os.path.exists(os.path.dirname(self._answer_file)):
|
||||
os.makedirs(os.path.dirname(self._answer_file))
|
||||
|
||||
# 获取北洋签到页面html
|
||||
html_text = self.get_page_source(url=self._sign_in_url,
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
|
||||
# 获取签到后返回html,判断是否签到成功
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
sign_status = self.sign_in_result(html_res=html_text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 没有签到则解析html
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return False, '签到失败'
|
||||
img_url = html.xpath('//table[@class="captcha"]//img/@src')[0]
|
||||
|
||||
if not img_url:
|
||||
logger.error(f"{site} 签到失败,未获取到签到图片")
|
||||
return False, '签到失败,未获取到签到图片'
|
||||
|
||||
# 签到图片
|
||||
img_url = "https://www.tjupt.org" + img_url
|
||||
logger.info(f"获取到签到图片 {img_url}")
|
||||
# 获取签到图片hash
|
||||
captcha_img_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).get_res(url=img_url)
|
||||
if not captcha_img_res or captcha_img_res.status_code != 200:
|
||||
logger.error(f"{site} 签到图片 {img_url} 请求失败")
|
||||
return False, '签到失败,未获取到签到图片'
|
||||
captcha_img = Image.open(BytesIO(captcha_img_res.content))
|
||||
captcha_img_hash = self._tohash(captcha_img)
|
||||
logger.debug(f"签到图片hash {captcha_img_hash}")
|
||||
|
||||
# 签到答案选项
|
||||
values = html.xpath("//input[@name='answer']/@value")
|
||||
options = html.xpath("//input[@name='answer']/following-sibling::text()")
|
||||
|
||||
if not values or not options:
|
||||
logger.error(f"{site} 签到失败,未获取到答案选项")
|
||||
return False, '签到失败,未获取到答案选项'
|
||||
|
||||
# value+选项
|
||||
answers = list(zip(values, options))
|
||||
logger.debug(f"获取到所有签到选项 {answers}")
|
||||
|
||||
# 查询已有答案
|
||||
exits_answers = {}
|
||||
try:
|
||||
with open(self._answer_file, 'r') as f:
|
||||
json_str = f.read()
|
||||
exits_answers = json.loads(json_str)
|
||||
# 查询本地本次验证码hash答案
|
||||
captcha_answer = exits_answers[captcha_img_hash]
|
||||
|
||||
# 本地存在本次hash对应的正确答案再遍历查询
|
||||
if captcha_answer:
|
||||
for value, answer in answers:
|
||||
if str(captcha_answer) == str(answer):
|
||||
# 确实是答案
|
||||
return self.__signin(answer=value,
|
||||
site_cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
site=site)
|
||||
except (FileNotFoundError, IOError, OSError) as e:
|
||||
logger.debug(f"查询本地已知答案失败:{str(e)},继续请求豆瓣查询")
|
||||
|
||||
# 本地不存在正确答案则请求豆瓣查询匹配
|
||||
for value, answer in answers:
|
||||
if answer:
|
||||
# 豆瓣检索
|
||||
db_res = RequestUtils().get_res(url=f'https://movie.douban.com/j/subject_suggest?q={answer}')
|
||||
if not db_res or db_res.status_code != 200:
|
||||
logger.debug(f"签到选项 {answer} 未查询到豆瓣数据")
|
||||
continue
|
||||
|
||||
# 豆瓣返回结果
|
||||
db_answers = json.loads(db_res.text)
|
||||
if not isinstance(db_answers, list):
|
||||
db_answers = [db_answers]
|
||||
|
||||
if len(db_answers) == 0:
|
||||
logger.debug(f"签到选项 {answer} 查询到豆瓣数据为空")
|
||||
|
||||
for db_answer in db_answers:
|
||||
answer_img_url = db_answer['img']
|
||||
|
||||
# 获取答案hash
|
||||
answer_img_res = RequestUtils(referer="https://movie.douban.com").get_res(url=answer_img_url)
|
||||
if not answer_img_res or answer_img_res.status_code != 200:
|
||||
logger.debug(f"签到答案 {answer} {answer_img_url} 请求失败")
|
||||
continue
|
||||
|
||||
answer_img = Image.open(BytesIO(answer_img_res.content))
|
||||
answer_img_hash = self._tohash(answer_img)
|
||||
logger.debug(f"签到答案图片hash {answer} {answer_img_hash}")
|
||||
|
||||
# 获取选项图片与签到图片相似度,大于0.9默认是正确答案
|
||||
score = self._comparehash(captcha_img_hash, answer_img_hash)
|
||||
logger.info(f"签到图片与选项 {answer} 豆瓣图片相似度 {score}")
|
||||
if score > 0.9:
|
||||
# 确实是答案
|
||||
return self.__signin(answer=value,
|
||||
site_cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
site=site,
|
||||
exits_answers=exits_answers,
|
||||
captcha_img_hash=captcha_img_hash)
|
||||
|
||||
# 间隔5s,防止请求太频繁被豆瓣屏蔽ip
|
||||
time.sleep(5)
|
||||
logger.error(f"豆瓣图片匹配,未获取到匹配答案")
|
||||
|
||||
# 没有匹配签到成功,则签到失败
|
||||
return False, '签到失败,未获取到匹配答案'
|
||||
|
||||
def __signin(self, answer, site_cookie, ua, proxy, site, exits_answers=None, captcha_img_hash=None):
|
||||
"""
|
||||
签到请求
|
||||
"""
|
||||
data = {
|
||||
'answer': answer,
|
||||
'submit': '提交'
|
||||
}
|
||||
logger.debug(f"提交data {data}")
|
||||
sign_in_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url=self._sign_in_url, data=data)
|
||||
if not sign_in_res or sign_in_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,签到接口请求失败")
|
||||
return False, '签到失败,签到接口请求失败'
|
||||
|
||||
# 获取签到后返回html,判断是否签到成功
|
||||
sign_status = self.sign_in_result(html_res=sign_in_res.text,
|
||||
regexs=self._succeed_regex)
|
||||
if sign_status:
|
||||
logger.info(f"签到成功")
|
||||
if exits_answers and captcha_img_hash:
|
||||
# 签到成功写入本地文件
|
||||
self.__write_local_answer(exits_answers=exits_answers or {},
|
||||
captcha_img_hash=captcha_img_hash,
|
||||
answer=answer)
|
||||
return True, '签到成功'
|
||||
else:
|
||||
logger.error(f"{site} 签到失败,请到页面查看")
|
||||
return False, '签到失败,请到页面查看'
|
||||
|
||||
def __write_local_answer(self, exits_answers, captcha_img_hash, answer):
|
||||
"""
|
||||
签到成功写入本地文件
|
||||
"""
|
||||
try:
|
||||
exits_answers[captcha_img_hash] = answer
|
||||
# 序列化数据
|
||||
formatted_data = json.dumps(exits_answers, indent=4)
|
||||
with open(self._answer_file, 'w') as f:
|
||||
f.write(formatted_data)
|
||||
except (FileNotFoundError, IOError, OSError) as e:
|
||||
logger.debug(f"签到成功写入本地文件失败:{str(e)}")
|
||||
|
||||
@staticmethod
|
||||
def _tohash(img, shape=(10, 10)):
|
||||
"""
|
||||
获取图片hash
|
||||
"""
|
||||
img = img.resize(shape)
|
||||
gray = img.convert('L')
|
||||
s = 0
|
||||
hash_str = ''
|
||||
for i in range(shape[1]):
|
||||
for j in range(shape[0]):
|
||||
s = s + gray.getpixel((j, i))
|
||||
avg = s / (shape[0] * shape[1])
|
||||
for i in range(shape[1]):
|
||||
for j in range(shape[0]):
|
||||
if gray.getpixel((j, i)) > avg:
|
||||
hash_str = hash_str + '1'
|
||||
else:
|
||||
hash_str = hash_str + '0'
|
||||
return hash_str
|
||||
|
||||
@staticmethod
|
||||
def _comparehash(hash1, hash2, shape=(10, 10)):
|
||||
"""
|
||||
比较图片hash
|
||||
返回相似度
|
||||
"""
|
||||
n = 0
|
||||
if len(hash1) != len(hash2):
|
||||
return -1
|
||||
for i in range(len(hash1)):
|
||||
if hash1[i] == hash2[i]:
|
||||
n = n + 1
|
||||
return n / (shape[0] * shape[1])
|
||||
@@ -1,97 +0,0 @@
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class TTG(_ISiteSigninHandler):
|
||||
"""
|
||||
TTG签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "totheglory.im"
|
||||
|
||||
# 已签到
|
||||
_sign_regex = ['<b style="color:green;">已签到</b>']
|
||||
_sign_text = '亲,您今天已签到过,不要太贪哦'
|
||||
|
||||
# 签到成功
|
||||
_success_text = '您已连续签到'
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 获取页面html
|
||||
html_text = self.get_page_source(url="https://totheglory.im",
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
# 判断是否已签到
|
||||
sign_status = self.sign_in_result(html_res=html_text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 获取签到参数
|
||||
signed_timestamp = re.search('(?<=signed_timestamp: ")\\d{10}', html_text).group()
|
||||
signed_token = re.search('(?<=signed_token: ").*(?=")', html_text).group()
|
||||
logger.debug(f"signed_timestamp={signed_timestamp} signed_token={signed_token}")
|
||||
|
||||
data = {
|
||||
'signed_timestamp': signed_timestamp,
|
||||
'signed_token': signed_token
|
||||
}
|
||||
# 签到
|
||||
sign_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url="https://totheglory.im/signed.php",
|
||||
data=data)
|
||||
if not sign_res or sign_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,签到接口请求失败")
|
||||
return False, '签到失败,签到接口请求失败'
|
||||
|
||||
sign_res.encoding = "utf-8"
|
||||
if self._success_text in sign_res.text:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
if self._sign_text in sign_res.text:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
logger.error(f"{site} 签到失败,未知原因")
|
||||
return False, '签到失败,未知原因'
|
||||
@@ -1,123 +0,0 @@
|
||||
import datetime
|
||||
import random
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
from lxml import etree
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class U2(_ISiteSigninHandler):
|
||||
"""
|
||||
U2签到 随机
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "u2.dmhy.org"
|
||||
|
||||
# 已签到
|
||||
_sign_regex = ['<a href="showup.php">已签到</a>',
|
||||
'<a href="showup.php">Show Up</a>',
|
||||
'<a href="showup.php">Показать</a>',
|
||||
'<a href="showup.php">已簽到</a>',
|
||||
'<a href="showup.php">已簽到</a>']
|
||||
|
||||
# 签到成功
|
||||
_success_text = "window.location.href = 'showup.php';</script>"
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
now = datetime.datetime.now()
|
||||
# 判断当前时间是否小于9点
|
||||
if now.hour < 9:
|
||||
logger.error(f"{site} 签到失败,9点前不签到")
|
||||
return False, '签到失败,9点前不签到'
|
||||
|
||||
# 获取页面html
|
||||
html_text = self.get_page_source(url="https://u2.dmhy.org/showup.php",
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 签到失败,请检查站点连通性")
|
||||
return False, '签到失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 签到失败,Cookie已失效")
|
||||
return False, '签到失败,Cookie已失效'
|
||||
|
||||
# 判断是否已签到
|
||||
sign_status = self.sign_in_result(html_res=html_text,
|
||||
regexs=self._sign_regex)
|
||||
if sign_status:
|
||||
logger.info(f"{site} 今日已签到")
|
||||
return True, '今日已签到'
|
||||
|
||||
# 没有签到则解析html
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
if not html:
|
||||
return False, '签到失败'
|
||||
|
||||
# 获取签到参数
|
||||
req = html.xpath("//form//td/input[@name='req']/@value")[0]
|
||||
hash_str = html.xpath("//form//td/input[@name='hash']/@value")[0]
|
||||
form = html.xpath("//form//td/input[@name='form']/@value")[0]
|
||||
submit_name = html.xpath("//form//td/input[@type='submit']/@name")
|
||||
submit_value = html.xpath("//form//td/input[@type='submit']/@value")
|
||||
if not re or not hash_str or not form or not submit_name or not submit_value:
|
||||
logger.error("{site} 签到失败,未获取到相关签到参数")
|
||||
return False, '签到失败'
|
||||
|
||||
# 随机一个答案
|
||||
answer_num = random.randint(0, 3)
|
||||
data = {
|
||||
'req': req,
|
||||
'hash': hash_str,
|
||||
'form': form,
|
||||
'message': '一切随缘~',
|
||||
submit_name[answer_num]: submit_value[answer_num]
|
||||
}
|
||||
# 签到
|
||||
sign_res = RequestUtils(cookies=site_cookie,
|
||||
ua=ua,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url="https://u2.dmhy.org/showup.php?action=show",
|
||||
data=data)
|
||||
if not sign_res or sign_res.status_code != 200:
|
||||
logger.error(f"{site} 签到失败,签到接口请求失败")
|
||||
return False, '签到失败,签到接口请求失败'
|
||||
|
||||
# 判断是否签到成功
|
||||
# sign_res.text = "<script type="text/javascript">window.location.href = 'showup.php';</script>"
|
||||
if self._success_text in sign_res.text:
|
||||
logger.info(f"{site} 签到成功")
|
||||
return True, '签到成功'
|
||||
else:
|
||||
logger.error(f"{site} 签到失败,未知原因")
|
||||
return False, '签到失败,未知原因'
|
||||
@@ -1,88 +0,0 @@
|
||||
import json
|
||||
from typing import Tuple
|
||||
|
||||
from lxml import etree
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.plugins.autosignin.sites import _ISiteSigninHandler
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class ZhuQue(_ISiteSigninHandler):
|
||||
"""
|
||||
ZHUQUE签到
|
||||
"""
|
||||
# 匹配的站点Url,每一个实现类都需要设置为自己的站点Url
|
||||
site_url = "zhuque.in"
|
||||
|
||||
@classmethod
|
||||
def match(cls, url: str) -> bool:
|
||||
"""
|
||||
根据站点Url判断是否匹配当前站点签到类,大部分情况使用默认实现即可
|
||||
:param url: 站点Url
|
||||
:return: 是否匹配,如匹配则会调用该类的signin方法
|
||||
"""
|
||||
return True if StringUtils.url_equal(url, cls.site_url) else False
|
||||
|
||||
def signin(self, site_info: CommentedMap) -> Tuple[bool, str]:
|
||||
"""
|
||||
执行签到操作
|
||||
:param site_info: 站点信息,含有站点Url、站点Cookie、UA等信息
|
||||
:return: 签到结果信息
|
||||
"""
|
||||
site = site_info.get("name")
|
||||
site_cookie = site_info.get("cookie")
|
||||
ua = site_info.get("ua")
|
||||
proxy = site_info.get("proxy")
|
||||
render = site_info.get("render")
|
||||
|
||||
# 获取页面html
|
||||
html_text = self.get_page_source(url="https://zhuque.in",
|
||||
cookie=site_cookie,
|
||||
ua=ua,
|
||||
proxy=proxy,
|
||||
render=render)
|
||||
if not html_text:
|
||||
logger.error(f"{site} 模拟登录失败,请检查站点连通性")
|
||||
return False, '模拟登录失败,请检查站点连通性'
|
||||
|
||||
if "login.php" in html_text:
|
||||
logger.error(f"{site} 模拟登录失败,Cookie已失效")
|
||||
return False, '模拟登录失败,Cookie已失效'
|
||||
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
if not html:
|
||||
return False, '模拟登录失败'
|
||||
|
||||
# 释放技能
|
||||
msg = '失败'
|
||||
x_csrf_token = html.xpath("//meta[@name='x-csrf-token']/@content")[0]
|
||||
if x_csrf_token:
|
||||
data = {
|
||||
"all": 1,
|
||||
"resetModal": "true"
|
||||
}
|
||||
headers = {
|
||||
"x-csrf-token": str(x_csrf_token),
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"User-Agent": ua
|
||||
}
|
||||
skill_res = RequestUtils(cookies=site_cookie,
|
||||
headers=headers,
|
||||
proxies=settings.PROXY if proxy else None
|
||||
).post_res(url="https://zhuque.in/api/gaming/fireGenshinCharacterMagic", json=data)
|
||||
if not skill_res or skill_res.status_code != 200:
|
||||
logger.error(f"模拟登录失败,释放技能失败")
|
||||
|
||||
# '{"status":200,"data":{"code":"FIRE_GENSHIN_CHARACTER_MAGIC_SUCCESS","bonus":0}}'
|
||||
skill_dict = json.loads(skill_res.text)
|
||||
if skill_dict['status'] == 200:
|
||||
bonus = int(skill_dict['data']['bonus'])
|
||||
msg = f'成功,获得{bonus}魔力'
|
||||
|
||||
logger.info(f'【{site}】模拟登录成功,技能释放{msg}')
|
||||
return True, f'模拟登录成功,技能释放{msg}'
|
||||
@@ -1,694 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
from functools import reduce
|
||||
from pathlib import Path
|
||||
from threading import RLock
|
||||
from typing import Optional, Any, List, Dict, Tuple
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from requests import Response
|
||||
|
||||
from app.chain.subscribe import SubscribeChain
|
||||
from app.core.config import settings
|
||||
from app.core.context import MediaInfo
|
||||
from app.core.event import eventmanager
|
||||
from app.log import logger
|
||||
from app.modules.emby import Emby
|
||||
from app.modules.jellyfin import Jellyfin
|
||||
from app.modules.plex import Plex
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import WebhookEventInfo
|
||||
from app.schemas.types import MediaType, EventType
|
||||
from app.utils.http import RequestUtils
|
||||
|
||||
lock = RLock()
|
||||
|
||||
|
||||
class BestFilmVersion(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "收藏洗版"
|
||||
# 插件描述
|
||||
plugin_desc = "Jellyfin/Emby/Plex点击收藏电影后,自动订阅洗版。"
|
||||
# 插件图标
|
||||
plugin_icon = "like.jpg"
|
||||
# 主题色
|
||||
plugin_color = "#E4003F"
|
||||
# 插件版本
|
||||
plugin_version = "2.0"
|
||||
# 插件作者
|
||||
plugin_author = "wlj"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/developer-wlj"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "bestfilmversion_"
|
||||
# 加载顺序
|
||||
plugin_order = 13
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有变量
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
_cache_path: Optional[Path] = None
|
||||
subscribechain = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
_cron: str = ""
|
||||
_notify: bool = False
|
||||
_webhook_enabled: bool = False
|
||||
_only_once: bool = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self._cache_path = settings.TEMP_PATH / "__best_film_version_cache__"
|
||||
self.subscribechain = SubscribeChain()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
# 配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._cron = config.get("cron")
|
||||
self._notify = config.get("notify")
|
||||
self._webhook_enabled = config.get("webhook_enabled")
|
||||
self._only_once = config.get("only_once")
|
||||
|
||||
if self._enabled:
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
if not self._webhook_enabled:
|
||||
if self._cron:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.sync,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="收藏洗版")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
# 推送实时消息
|
||||
self.systemmessage.put(f"执行周期配置错误:{str(err)}")
|
||||
else:
|
||||
self._scheduler.add_job(self.sync, "interval", minutes=30, name="收藏洗版")
|
||||
|
||||
if self._only_once:
|
||||
self._only_once = False
|
||||
self.update_config({
|
||||
"enabled": self._enabled,
|
||||
"cron": self._cron,
|
||||
"notify": self._notify,
|
||||
"webhook_enabled": self._webhook_enabled,
|
||||
"only_once": self._only_once
|
||||
})
|
||||
self._scheduler.add_job(self.sync, 'date',
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
name="立即运行收藏洗版")
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取插件API
|
||||
[{
|
||||
"path": "/xx",
|
||||
"endpoint": self.xxx,
|
||||
"methods": ["GET", "POST"],
|
||||
"summary": "API说明"
|
||||
}]
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 3
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 3
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '发送通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 3
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'only_once',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 3
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'webhook_enabled',
|
||||
'label': 'Webhook',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '5位cron表达式,留空自动'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '支持主动定时获取媒体库数据和Webhook实时触发两种方式,两者只能选其一,'
|
||||
'Webhook需要在媒体服务器设置发送Webhook报文。'
|
||||
'Plex使用主动获取时,建议执行周期设置大于1小时,'
|
||||
'收藏Api调用Plex官网接口,有频率限制。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"notify": False,
|
||||
"cron": "*/30 * * * *",
|
||||
"webhook_enabled": False,
|
||||
"only_once": False
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
"""
|
||||
拼装插件详情页面,需要返回页面配置,同时附带数据
|
||||
"""
|
||||
# 查询同步详情
|
||||
historys = self.get_data('history')
|
||||
if not historys:
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'text': '暂无数据',
|
||||
'props': {
|
||||
'class': 'text-center',
|
||||
}
|
||||
}
|
||||
]
|
||||
# 数据按时间降序排序
|
||||
historys = sorted(historys, key=lambda x: x.get('time'), reverse=True)
|
||||
# 拼装页面
|
||||
contents = []
|
||||
for history in historys:
|
||||
title = history.get("title")
|
||||
poster = history.get("poster")
|
||||
mtype = history.get("type")
|
||||
time_str = history.get("time")
|
||||
tmdbid = history.get("tmdbid")
|
||||
contents.append(
|
||||
{
|
||||
'component': 'VCard',
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'd-flex justify-space-start flex-nowrap flex-row',
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VImg',
|
||||
'props': {
|
||||
'src': poster,
|
||||
'height': 120,
|
||||
'width': 80,
|
||||
'aspect-ratio': '2/3',
|
||||
'class': 'object-cover shadow ring-gray-500',
|
||||
'cover': True
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCardSubtitle',
|
||||
'props': {
|
||||
'class': 'pa-2 font-bold break-words whitespace-break-spaces'
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'a',
|
||||
'props': {
|
||||
'href': f"https://www.themoviedb.org/movie/{tmdbid}",
|
||||
'target': '_blank'
|
||||
},
|
||||
'text': title
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'类型:{mtype}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'时间:{time_str}'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'grid gap-3 grid-info-card',
|
||||
},
|
||||
'content': contents
|
||||
}
|
||||
]
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
|
||||
def sync(self):
|
||||
"""
|
||||
通过流媒体管理工具收藏,自动洗版
|
||||
"""
|
||||
# 获取锁
|
||||
_is_lock: bool = lock.acquire(timeout=60)
|
||||
if not _is_lock:
|
||||
return
|
||||
try:
|
||||
# 读取缓存
|
||||
caches = self._cache_path.read_text().split("\n") if self._cache_path.exists() else []
|
||||
# 读取历史记录
|
||||
history = self.get_data('history') or []
|
||||
|
||||
# 媒体服务器类型,多个以,分隔
|
||||
if not settings.MEDIASERVER:
|
||||
return
|
||||
media_servers = settings.MEDIASERVER.split(',')
|
||||
|
||||
# 读取收藏
|
||||
all_items = {}
|
||||
for media_server in media_servers:
|
||||
if media_server == 'jellyfin':
|
||||
all_items['jellyfin'] = self.jellyfin_get_items()
|
||||
elif media_server == 'emby':
|
||||
all_items['emby'] = self.emby_get_items()
|
||||
else:
|
||||
all_items['plex'] = self.plex_get_watchlist()
|
||||
|
||||
def function(y, x):
|
||||
return y if (x['Name'] in [i['Name'] for i in y]) else (lambda z, u: (z.append(u), z))(y, x)[1]
|
||||
|
||||
# 处理所有结果
|
||||
for server, all_item in all_items.items():
|
||||
# all_item 根据电影名去重
|
||||
result = reduce(function, all_item, [])
|
||||
for data in result:
|
||||
# 检查缓存
|
||||
if data.get('Name') in caches:
|
||||
continue
|
||||
|
||||
# 获取详情
|
||||
if server == 'jellyfin':
|
||||
item_info_resp = Jellyfin().get_iteminfo(itemid=data.get('Id'))
|
||||
elif server == 'emby':
|
||||
item_info_resp = Emby().get_iteminfo(itemid=data.get('Id'))
|
||||
else:
|
||||
item_info_resp = self.plex_get_iteminfo(itemid=data.get('Id'))
|
||||
logger.debug(f'BestFilmVersion插件 item打印 {item_info_resp}')
|
||||
if not item_info_resp:
|
||||
continue
|
||||
|
||||
# 只接受Movie类型
|
||||
if data.get('Type') != 'Movie':
|
||||
continue
|
||||
|
||||
# 获取tmdb_id
|
||||
tmdb_id = item_info_resp.tmdbid
|
||||
if not tmdb_id:
|
||||
continue
|
||||
# 识别媒体信息
|
||||
mediainfo: MediaInfo = self.chain.recognize_media(tmdbid=tmdb_id, mtype=MediaType.MOVIE)
|
||||
if not mediainfo:
|
||||
logger.warn(f'未识别到媒体信息,标题:{data.get("Name")},tmdbid:{tmdb_id}')
|
||||
continue
|
||||
# 添加订阅
|
||||
self.subscribechain.add(mtype=MediaType.MOVIE,
|
||||
title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
best_version=True,
|
||||
username="收藏洗版",
|
||||
exist_ok=True)
|
||||
# 加入缓存
|
||||
caches.append(data.get('Name'))
|
||||
# 存储历史记录
|
||||
if mediainfo.tmdb_id not in [h.get("tmdbid") for h in history]:
|
||||
history.append({
|
||||
"title": mediainfo.title,
|
||||
"type": mediainfo.type.value,
|
||||
"year": mediainfo.year,
|
||||
"poster": mediainfo.get_poster_image(),
|
||||
"overview": mediainfo.overview,
|
||||
"tmdbid": mediainfo.tmdb_id,
|
||||
"time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
})
|
||||
# 保存历史记录
|
||||
self.save_data('history', history)
|
||||
# 保存缓存
|
||||
self._cache_path.write_text("\n".join(caches))
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
def jellyfin_get_items(self) -> List[dict]:
|
||||
# 获取所有user
|
||||
users_url = "[HOST]Users?&apikey=[APIKEY]"
|
||||
users = self.get_users(Jellyfin().get_data(users_url))
|
||||
if not users:
|
||||
logger.info(f"bestfilmversion/users_url: {users_url}")
|
||||
return []
|
||||
all_items = []
|
||||
for user in users:
|
||||
# 根据加入日期 降序排序
|
||||
url = "[HOST]Users/" + user + "/Items?SortBy=DateCreated%2CSortName" \
|
||||
"&SortOrder=Descending" \
|
||||
"&Filters=IsFavorite" \
|
||||
"&Recursive=true" \
|
||||
"&Fields=PrimaryImageAspectRatio%2CBasicSyncInfo" \
|
||||
"&CollapseBoxSetItems=false" \
|
||||
"&ExcludeLocationTypes=Virtual" \
|
||||
"&EnableTotalRecordCount=false" \
|
||||
"&Limit=20" \
|
||||
"&apikey=[APIKEY]"
|
||||
resp = self.get_items(Jellyfin().get_data(url))
|
||||
if not resp:
|
||||
continue
|
||||
all_items.extend(resp)
|
||||
return all_items
|
||||
|
||||
def emby_get_items(self) -> List[dict]:
|
||||
# 获取所有user
|
||||
get_users_url = "[HOST]Users?&api_key=[APIKEY]"
|
||||
users = self.get_users(Emby().get_data(get_users_url))
|
||||
if not users:
|
||||
return []
|
||||
all_items = []
|
||||
for user in users:
|
||||
# 根据加入日期 降序排序
|
||||
url = "[HOST]emby/Users/" + user + "/Items?SortBy=DateCreated%2CSortName" \
|
||||
"&SortOrder=Descending" \
|
||||
"&Filters=IsFavorite" \
|
||||
"&Recursive=true" \
|
||||
"&Fields=PrimaryImageAspectRatio%2CBasicSyncInfo" \
|
||||
"&CollapseBoxSetItems=false" \
|
||||
"&ExcludeLocationTypes=Virtual" \
|
||||
"&EnableTotalRecordCount=false" \
|
||||
"&Limit=20&api_key=[APIKEY]"
|
||||
resp = self.get_items(Emby().get_data(url))
|
||||
if not resp:
|
||||
continue
|
||||
all_items.extend(resp)
|
||||
return all_items
|
||||
|
||||
@staticmethod
|
||||
def get_items(resp: Response):
|
||||
try:
|
||||
if resp:
|
||||
return resp.json().get("Items") or []
|
||||
else:
|
||||
return []
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def get_users(resp: Response):
|
||||
try:
|
||||
if resp:
|
||||
return [data['Id'] for data in resp.json()]
|
||||
else:
|
||||
logger.error(f"BestFilmVersion/Users 未获取到返回数据")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"连接BestFilmVersion/Users 出错:" + str(e))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def plex_get_watchlist() -> List[dict]:
|
||||
# 根据加入日期 降序排序
|
||||
url = f"https://metadata.provider.plex.tv/library/sections/watchlist/all?type=1&sort=addedAt%3Adesc" \
|
||||
f"&X-Plex-Container-Start=0&X-Plex-Container-Size=50" \
|
||||
f"&X-Plex-Token={settings.PLEX_TOKEN}"
|
||||
res = []
|
||||
try:
|
||||
resp = RequestUtils().get_res(url=url)
|
||||
if resp:
|
||||
dom = parseString(resp.text)
|
||||
# 获取文档元素对象
|
||||
elem = dom.documentElement
|
||||
# 获取 指定元素
|
||||
eles = elem.getElementsByTagName('Video')
|
||||
if not eles:
|
||||
return []
|
||||
for ele in eles:
|
||||
data = {}
|
||||
# 获取标签中内容
|
||||
ele_id = ele.attributes['ratingKey'].nodeValue
|
||||
ele_title = ele.attributes['title'].nodeValue
|
||||
ele_type = ele.attributes['type'].nodeValue
|
||||
_type = "Movie" if ele_type == "movie" else ""
|
||||
data['Id'] = ele_id
|
||||
data['Name'] = ele_title
|
||||
data['Type'] = _type
|
||||
res.append(data)
|
||||
return res
|
||||
else:
|
||||
logger.error(f"Plex/Watchlist 未获取到返回数据")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"连接Plex/Watchlist 出错:" + str(e))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def plex_get_iteminfo(itemid):
|
||||
url = f"https://metadata.provider.plex.tv/library/metadata/{itemid}" \
|
||||
f"?X-Plex-Token={settings.PLEX_TOKEN}"
|
||||
ids = []
|
||||
try:
|
||||
resp = RequestUtils(accept_type="application/json, text/plain, */*").get_res(url=url)
|
||||
if resp:
|
||||
metadata = resp.json().get('MediaContainer').get('Metadata')
|
||||
for item in metadata:
|
||||
_guid = item.get('Guid')
|
||||
if not _guid:
|
||||
continue
|
||||
|
||||
id_list = [h.get('id') for h in _guid if h.get('id').__contains__("tmdb")]
|
||||
if not id_list:
|
||||
continue
|
||||
|
||||
ids.append({'Name': 'TheMovieDb', 'Url': id_list[0]})
|
||||
|
||||
if not ids:
|
||||
return []
|
||||
return {'ExternalUrls': ids}
|
||||
else:
|
||||
logger.error(f"Plex/Items 未获取到返回数据")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"连接Plex/Items 出错:" + str(e))
|
||||
return []
|
||||
|
||||
@eventmanager.register(EventType.WebhookMessage)
|
||||
def webhook_message_action(self, event):
|
||||
|
||||
if not self._enabled:
|
||||
return
|
||||
if not self._webhook_enabled:
|
||||
return
|
||||
|
||||
data: WebhookEventInfo = event.event_data
|
||||
# 排除不是收藏调用
|
||||
if data.channel not in ['jellyfin', 'emby', 'plex']:
|
||||
return
|
||||
if data.channel in ['emby', 'plex'] and data.event != 'item.rate':
|
||||
return
|
||||
if data.channel == 'jellyfin' and data.save_reason != 'UpdateUserRating':
|
||||
return
|
||||
logger.info(f'BestFilmVersion/webhook_message_action WebhookEventInfo打印:{data}')
|
||||
|
||||
# 获取锁
|
||||
_is_lock: bool = lock.acquire(timeout=60)
|
||||
if not _is_lock:
|
||||
return
|
||||
try:
|
||||
if not data.tmdb_id:
|
||||
info = None
|
||||
if (data.channel == 'jellyfin'
|
||||
and data.save_reason == 'UpdateUserRating'
|
||||
and data.item_favorite):
|
||||
info = Jellyfin().get_iteminfo(itemid=data.item_id)
|
||||
elif data.channel == 'emby' and data.event == 'item.rate':
|
||||
info = Emby().get_iteminfo(itemid=data.item_id)
|
||||
elif data.channel == 'plex' and data.event == 'item.rate':
|
||||
info = Plex().get_iteminfo(itemid=data.item_id)
|
||||
logger.debug(f'BestFilmVersion/webhook_message_action item打印:{info}')
|
||||
if not info:
|
||||
return
|
||||
if info.item_type not in ['Movie', 'MOV', 'movie']:
|
||||
return
|
||||
# 获取tmdb_id
|
||||
tmdb_id = info.tmdbid
|
||||
else:
|
||||
tmdb_id = data.tmdb_id
|
||||
if (data.channel == 'jellyfin'
|
||||
and (data.save_reason != 'UpdateUserRating' or not data.item_favorite)):
|
||||
return
|
||||
if data.item_type not in ['Movie', 'MOV', 'movie']:
|
||||
return
|
||||
# 识别媒体信息
|
||||
mediainfo = self.chain.recognize_media(tmdbid=tmdb_id, mtype=MediaType.MOVIE)
|
||||
if not mediainfo:
|
||||
logger.warn(f'未识别到媒体信息,标题:{data.item_name},tmdbID:{tmdb_id}')
|
||||
return
|
||||
# 读取缓存
|
||||
caches = self._cache_path.read_text().split("\n") if self._cache_path.exists() else []
|
||||
# 检查缓存
|
||||
if data.item_name in caches:
|
||||
return
|
||||
# 读取历史记录
|
||||
history = self.get_data('history') or []
|
||||
# 添加订阅
|
||||
self.subscribechain.add(mtype=MediaType.MOVIE,
|
||||
title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
best_version=True,
|
||||
username="收藏洗版",
|
||||
exist_ok=True)
|
||||
# 加入缓存
|
||||
caches.append(data.item_name)
|
||||
# 存储历史记录
|
||||
if mediainfo.tmdb_id not in [h.get("tmdbid") for h in history]:
|
||||
history.append({
|
||||
"title": mediainfo.title,
|
||||
"type": mediainfo.type.value,
|
||||
"year": mediainfo.year,
|
||||
"poster": mediainfo.get_poster_image(),
|
||||
"overview": mediainfo.overview,
|
||||
"tmdbid": mediainfo.tmdb_id,
|
||||
"time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
})
|
||||
# 保存历史记录
|
||||
self.save_data('history', history)
|
||||
# 保存缓存
|
||||
self._cache_path.write_text("\n".join(caches))
|
||||
finally:
|
||||
lock.release()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,231 +0,0 @@
|
||||
from typing import Any, List, Dict, Tuple
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.plugins.chatgpt.openai import OpenAi
|
||||
from app.schemas.types import EventType
|
||||
|
||||
|
||||
class ChatGPT(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "ChatGPT"
|
||||
# 插件描述
|
||||
plugin_desc = "消息交互支持与ChatGPT对话。"
|
||||
# 插件图标
|
||||
plugin_icon = "chatgpt.png"
|
||||
# 主题色
|
||||
plugin_color = "#74AA9C"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "chatgpt_"
|
||||
# 加载顺序
|
||||
plugin_order = 15
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
openai = None
|
||||
_enabled = False
|
||||
_proxy = False
|
||||
_recognize = False
|
||||
_openai_url = None
|
||||
_openai_key = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._proxy = config.get("proxy")
|
||||
self._recognize = config.get("recognize")
|
||||
self._openai_url = config.get("openai_url")
|
||||
self._openai_key = config.get("openai_key")
|
||||
self.openai = OpenAi(api_key=self._openai_key, api_url=self._openai_url,
|
||||
proxy=settings.PROXY if self._proxy else None)
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'proxy',
|
||||
'label': '使用代理',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'recognize',
|
||||
'label': '辅助识别',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'openai_url',
|
||||
'label': 'OpenAI API Url',
|
||||
'placeholder': 'https://api.openai.com',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'openai_key',
|
||||
'label': 'sk-xxx'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"proxy": False,
|
||||
"recognize": False,
|
||||
"openai_url": "https://api.openai.com",
|
||||
"openai_key": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
@eventmanager.register(EventType.UserMessage)
|
||||
def talk(self, event: Event):
|
||||
"""
|
||||
监听用户消息,获取ChatGPT回复
|
||||
"""
|
||||
if not self._enabled:
|
||||
return
|
||||
if not self.openai:
|
||||
return
|
||||
text = event.event_data.get("text")
|
||||
userid = event.event_data.get("userid")
|
||||
channel = event.event_data.get("channel")
|
||||
if not text:
|
||||
return
|
||||
response = self.openai.get_response(text=text, userid=userid)
|
||||
if response:
|
||||
self.post_message(channel=channel, title=response, userid=userid)
|
||||
|
||||
@eventmanager.register(EventType.NameRecognize)
|
||||
def recognize(self, event: Event):
|
||||
"""
|
||||
监听识别事件,使用ChatGPT辅助识别名称
|
||||
"""
|
||||
if not event.event_data:
|
||||
return
|
||||
title = event.event_data.get("title")
|
||||
if not title:
|
||||
return
|
||||
# 收到事件后需要立码返回,避免主程序等待
|
||||
if not self._enabled \
|
||||
or not self.openai \
|
||||
or not self._recognize:
|
||||
eventmanager.send_event(
|
||||
EventType.NameRecognizeResult,
|
||||
{
|
||||
'title': title
|
||||
}
|
||||
)
|
||||
return
|
||||
# 调用ChatGPT
|
||||
response = self.openai.get_media_name(filename=title)
|
||||
logger.info(f"ChatGPT辅助识别结果:{response}")
|
||||
if response:
|
||||
eventmanager.send_event(
|
||||
EventType.NameRecognizeResult,
|
||||
{
|
||||
'title': title,
|
||||
'name': response.get("title"),
|
||||
'year': response.get("year"),
|
||||
'season': response.get("season"),
|
||||
'episode': response.get("episode")
|
||||
}
|
||||
)
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
@@ -1,204 +0,0 @@
|
||||
import json
|
||||
import time
|
||||
from typing import List, Union
|
||||
|
||||
import openai
|
||||
from cacheout import Cache
|
||||
|
||||
OpenAISessionCache = Cache(maxsize=100, ttl=3600, timer=time.time, default=None)
|
||||
|
||||
|
||||
class OpenAi:
|
||||
_api_key: str = None
|
||||
_api_url: str = None
|
||||
|
||||
def __init__(self, api_key: str = None, api_url: str = None, proxy: dict = None):
|
||||
self._api_key = api_key
|
||||
self._api_url = api_url
|
||||
openai.api_base = self._api_url + "/v1"
|
||||
openai.api_key = self._api_key
|
||||
if proxy and proxy.get("https"):
|
||||
openai.proxy = proxy.get("https")
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return True if self._api_key else False
|
||||
|
||||
@staticmethod
|
||||
def __save_session(session_id: str, message: str):
|
||||
"""
|
||||
保存会话
|
||||
:param session_id: 会话ID
|
||||
:param message: 消息
|
||||
:return:
|
||||
"""
|
||||
seasion = OpenAISessionCache.get(session_id)
|
||||
if seasion:
|
||||
seasion.append({
|
||||
"role": "assistant",
|
||||
"content": message
|
||||
})
|
||||
OpenAISessionCache.set(session_id, seasion)
|
||||
|
||||
@staticmethod
|
||||
def __get_session(session_id: str, message: str) -> List[dict]:
|
||||
"""
|
||||
获取会话
|
||||
:param session_id: 会话ID
|
||||
:return: 会话上下文
|
||||
"""
|
||||
seasion = OpenAISessionCache.get(session_id)
|
||||
if seasion:
|
||||
seasion.append({
|
||||
"role": "user",
|
||||
"content": message
|
||||
})
|
||||
else:
|
||||
seasion = [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "请在接下来的对话中请使用中文回复,并且内容尽可能详细。"
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": message
|
||||
}]
|
||||
OpenAISessionCache.set(session_id, seasion)
|
||||
return seasion
|
||||
|
||||
@staticmethod
|
||||
def __get_model(message: Union[str, List[dict]],
|
||||
prompt: str = None,
|
||||
user: str = "MoviePilot",
|
||||
**kwargs):
|
||||
"""
|
||||
获取模型
|
||||
"""
|
||||
if not isinstance(message, list):
|
||||
if prompt:
|
||||
message = [
|
||||
{
|
||||
"role": "system",
|
||||
"content": prompt
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": message
|
||||
}
|
||||
]
|
||||
else:
|
||||
message = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": message
|
||||
}
|
||||
]
|
||||
return openai.ChatCompletion.create(
|
||||
model="gpt-3.5-turbo",
|
||||
user=user,
|
||||
messages=message,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def __clear_session(session_id: str):
|
||||
"""
|
||||
清除会话
|
||||
:param session_id: 会话ID
|
||||
:return:
|
||||
"""
|
||||
if OpenAISessionCache.get(session_id):
|
||||
OpenAISessionCache.delete(session_id)
|
||||
|
||||
def get_media_name(self, filename: str):
|
||||
"""
|
||||
从文件名中提取媒体名称等要素
|
||||
:param filename: 文件名
|
||||
:return: Json
|
||||
"""
|
||||
if not self.get_state():
|
||||
return None
|
||||
result = ""
|
||||
try:
|
||||
_filename_prompt = "I will give you a movie/tvshow file name.You need to return a Json." \
|
||||
"\nPay attention to the correct identification of the film name." \
|
||||
"\n{\"title\":string,\"version\":string,\"part\":string,\"year\":string,\"resolution\":string,\"season\":number|null,\"episode\":number|null}"
|
||||
completion = self.__get_model(prompt=_filename_prompt, message=filename)
|
||||
result = completion.choices[0].message.content
|
||||
return json.loads(result)
|
||||
except Exception as e:
|
||||
print(f"{str(e)}:{result}")
|
||||
return {}
|
||||
|
||||
def get_response(self, text: str, userid: str):
|
||||
"""
|
||||
聊天对话,获取答案
|
||||
:param text: 输入文本
|
||||
:param userid: 用户ID
|
||||
:return:
|
||||
"""
|
||||
if not self.get_state():
|
||||
return ""
|
||||
try:
|
||||
if not userid:
|
||||
return "用户信息错误"
|
||||
else:
|
||||
userid = str(userid)
|
||||
if text == "#清除":
|
||||
self.__clear_session(userid)
|
||||
return "会话已清除"
|
||||
# 获取历史上下文
|
||||
messages = self.__get_session(userid, text)
|
||||
completion = self.__get_model(message=messages, user=userid)
|
||||
result = completion.choices[0].message.content
|
||||
if result:
|
||||
self.__save_session(userid, text)
|
||||
return result
|
||||
except openai.error.RateLimitError as e:
|
||||
return f"请求被ChatGPT拒绝了,{str(e)}"
|
||||
except openai.error.APIConnectionError as e:
|
||||
return f"ChatGPT网络连接失败:{str(e)}"
|
||||
except openai.error.Timeout as e:
|
||||
return f"没有接收到ChatGPT的返回消息:{str(e)}"
|
||||
except Exception as e:
|
||||
return f"请求ChatGPT出现错误:{str(e)}"
|
||||
|
||||
def translate_to_zh(self, text: str):
|
||||
"""
|
||||
翻译为中文
|
||||
:param text: 输入文本
|
||||
"""
|
||||
if not self.get_state():
|
||||
return False, None
|
||||
system_prompt = "You are a translation engine that can only translate text and cannot interpret it."
|
||||
user_prompt = f"translate to zh-CN:\n\n{text}"
|
||||
result = ""
|
||||
try:
|
||||
completion = self.__get_model(prompt=system_prompt,
|
||||
message=user_prompt,
|
||||
temperature=0,
|
||||
top_p=1,
|
||||
frequency_penalty=0,
|
||||
presence_penalty=0)
|
||||
result = completion.choices[0].message.content.strip()
|
||||
return True, result
|
||||
except Exception as e:
|
||||
print(f"{str(e)}:{result}")
|
||||
return False, str(e)
|
||||
|
||||
def get_question_answer(self, question: str):
|
||||
"""
|
||||
从给定问题和选项中获取正确答案
|
||||
:param question: 问题及选项
|
||||
:return: Json
|
||||
"""
|
||||
if not self.get_state():
|
||||
return None
|
||||
result = ""
|
||||
try:
|
||||
_question_prompt = "下面我们来玩一个游戏,你是老师,我是学生,你需要回答我的问题,我会给你一个题目和几个选项,你的回复必须是给定选项中正确答案对应的序号,请直接回复数字"
|
||||
completion = self.__get_model(prompt=_question_prompt, message=question)
|
||||
result = completion.choices[0].message.content
|
||||
return result
|
||||
except Exception as e:
|
||||
print(f"{str(e)}:{result}")
|
||||
return {}
|
||||
@@ -1,256 +0,0 @@
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import List, Tuple, Dict, Any
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.context import MediaInfo
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import TransferInfo
|
||||
from app.schemas.types import EventType, MediaType
|
||||
from app.utils.http import RequestUtils
|
||||
|
||||
|
||||
class ChineseSubFinder(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "ChineseSubFinder"
|
||||
# 插件描述
|
||||
plugin_desc = "整理入库时通知ChineseSubFinder下载字幕。"
|
||||
# 插件图标
|
||||
plugin_icon = "chinesesubfinder.png"
|
||||
# 主题色
|
||||
plugin_color = "#83BE39"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "chinesesubfinder_"
|
||||
# 加载顺序
|
||||
plugin_order = 5
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_save_tmp_path = None
|
||||
_enabled = False
|
||||
_host = None
|
||||
_api_key = None
|
||||
_remote_path = None
|
||||
_local_path = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self._save_tmp_path = settings.TEMP_PATH
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._api_key = config.get("api_key")
|
||||
self._host = config.get('host')
|
||||
if self._host:
|
||||
if not self._host.startswith('http'):
|
||||
self._host = "http://" + self._host
|
||||
if not self._host.endswith('/'):
|
||||
self._host = self._host + "/"
|
||||
self._local_path = config.get("local_path")
|
||||
self._remote_path = config.get("remote_path")
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'host',
|
||||
'label': '服务器'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'api_key',
|
||||
'label': 'API密钥'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'local_path',
|
||||
'label': '本地路径'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'remote_path',
|
||||
'label': '远端路径'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"host": "",
|
||||
"api_key": "",
|
||||
"local_path": "",
|
||||
"remote_path": ""
|
||||
}
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
pass
|
||||
|
||||
@eventmanager.register(EventType.TransferComplete)
|
||||
def download(self, event: Event):
|
||||
"""
|
||||
调用ChineseSubFinder下载字幕
|
||||
"""
|
||||
if not self._enabled or not self._host or not self._api_key:
|
||||
return
|
||||
item = event.event_data
|
||||
if not item:
|
||||
return
|
||||
# 请求地址
|
||||
req_url = "%sapi/v1/add-job" % self._host
|
||||
|
||||
# 媒体信息
|
||||
item_media: MediaInfo = item.get("mediainfo")
|
||||
# 转移信息
|
||||
item_transfer: TransferInfo = item.get("transferinfo")
|
||||
# 类型
|
||||
item_type = item_media.type
|
||||
# 目的路径
|
||||
item_dest: Path = item_transfer.target_path
|
||||
# 是否蓝光原盘
|
||||
item_bluray = item_transfer.is_bluray
|
||||
# 文件清单
|
||||
item_file_list = item_transfer.file_list_new
|
||||
|
||||
if item_bluray:
|
||||
# 蓝光原盘虚拟个文件
|
||||
item_file_list = ["%s.mp4" % item_dest / item_dest.name]
|
||||
|
||||
for file_path in item_file_list:
|
||||
# 路径替换
|
||||
if self._local_path and self._remote_path and file_path.startswith(self._local_path):
|
||||
file_path = file_path.replace(self._local_path, self._remote_path).replace('\\', '/')
|
||||
|
||||
# 调用CSF下载字幕
|
||||
self.__request_csf(req_url=req_url,
|
||||
file_path=file_path,
|
||||
item_type=0 if item_type == MediaType.MOVIE.value else 1,
|
||||
item_bluray=item_bluray)
|
||||
|
||||
@lru_cache(maxsize=128)
|
||||
def __request_csf(self, req_url, file_path, item_type, item_bluray):
|
||||
# 一个名称只建一个任务
|
||||
logger.info("通知ChineseSubFinder下载字幕: %s" % file_path)
|
||||
params = {
|
||||
"video_type": item_type,
|
||||
"physical_video_file_full_path": file_path,
|
||||
"task_priority_level": 3,
|
||||
"media_server_inside_video_id": "",
|
||||
"is_bluray": item_bluray
|
||||
}
|
||||
try:
|
||||
res = RequestUtils(headers={
|
||||
"Authorization": "Bearer %s" % self._api_key
|
||||
}).post(req_url, json=params)
|
||||
if not res or res.status_code != 200:
|
||||
logger.error("调用ChineseSubFinder API失败!")
|
||||
else:
|
||||
# 如果文件目录没有识别的nfo元数据, 此接口会返回控制符,推测是ChineseSubFinder的原因
|
||||
# emby refresh元数据时异步的
|
||||
if res.text:
|
||||
job_id = res.json().get("job_id")
|
||||
message = res.json().get("message")
|
||||
if not job_id:
|
||||
logger.warn("ChineseSubFinder下载字幕出错:%s" % message)
|
||||
else:
|
||||
logger.info("ChineseSubFinder任务添加成功:%s" % job_id)
|
||||
elif res.status_code != 200:
|
||||
logger.warn(f"ChineseSubFinder调用出错:{res.status_code} - {res.reason}")
|
||||
except Exception as e:
|
||||
logger.error("连接ChineseSubFinder出错:" + str(e))
|
||||
@@ -1,436 +0,0 @@
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple
|
||||
|
||||
from app.schemas.types import EventType, MediaImageType, NotificationType, MediaType
|
||||
from app.utils.system import SystemUtils
|
||||
|
||||
|
||||
class CloudDiskDel(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "云盘文件删除"
|
||||
# 插件描述
|
||||
plugin_desc = "媒体库删除strm文件后同步删除云盘资源。"
|
||||
# 插件图标
|
||||
plugin_icon = "clouddisk.png"
|
||||
# 主题色
|
||||
plugin_color = "#ff9933"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "clouddiskdel_"
|
||||
# 加载顺序
|
||||
plugin_order = 26
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
# 任务执行间隔
|
||||
_paths = {}
|
||||
_notify = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._notify = config.get("notify")
|
||||
for path in str(config.get("path")).split("\n"):
|
||||
paths = path.split(":")
|
||||
self._paths[paths[0]] = paths[1]
|
||||
|
||||
@eventmanager.register(EventType.NetworkDiskDel)
|
||||
def clouddisk_del(self, event: Event):
|
||||
if not self._enabled:
|
||||
return
|
||||
|
||||
event_data = event.event_data
|
||||
logger.info(f"获取到云盘删除请求 {event_data}")
|
||||
|
||||
media_path = event_data.get("media_path")
|
||||
if not media_path:
|
||||
logger.error("未获取到删除路径")
|
||||
return
|
||||
|
||||
media_name = event_data.get("media_name")
|
||||
tmdb_id = event_data.get("tmdb_id")
|
||||
media_type = event_data.get("media_type")
|
||||
season_num = event_data.get("season_num")
|
||||
episode_num = event_data.get("episode_num")
|
||||
|
||||
# 判断删除媒体路径是否与配置的媒体库路径相符,相符则继续删除,不符则跳过
|
||||
for library_path in list(self._paths.keys()):
|
||||
if str(media_path).startswith(library_path):
|
||||
# 替换网盘路径
|
||||
media_path = str(media_path).replace(library_path, self._paths.get(library_path))
|
||||
logger.info(f"获取到moviepilot本地云盘挂载路径 {media_path}")
|
||||
path = Path(media_path)
|
||||
if path.is_file() or media_path.endswith(".strm"):
|
||||
# 删除文件、nfo、jpg等同名文件
|
||||
pattern = path.stem.replace('[', '?').replace(']', '?')
|
||||
logger.info(f"开始筛选同名文件 {pattern}")
|
||||
files = path.parent.glob(f"{pattern}.*")
|
||||
for file in files:
|
||||
Path(file).unlink()
|
||||
logger.info(f"云盘文件 {file} 已删除")
|
||||
else:
|
||||
# 非根目录,才删除目录
|
||||
shutil.rmtree(path)
|
||||
# 删除目录
|
||||
logger.warn(f"云盘目录 {path} 已删除")
|
||||
|
||||
# 判断当前媒体父路径下是否有媒体文件,如有则无需遍历父级
|
||||
if not SystemUtils.exits_files(path.parent, settings.RMT_MEDIAEXT):
|
||||
# 判断父目录是否为空, 为空则删除
|
||||
for parent_path in path.parents:
|
||||
if str(parent_path.parent) != str(path.root):
|
||||
# 父目录非根目录,才删除父目录
|
||||
if not SystemUtils.exits_files(parent_path, settings.RMT_MEDIAEXT):
|
||||
# 当前路径下没有媒体文件则删除
|
||||
shutil.rmtree(parent_path)
|
||||
logger.warn(f"云盘目录 {parent_path} 已删除")
|
||||
|
||||
break
|
||||
|
||||
# 发送消息
|
||||
image = 'https://emby.media/notificationicon.png'
|
||||
media_type = MediaType.MOVIE if media_type in ["Movie", "MOV"] else MediaType.TV
|
||||
if self._notify:
|
||||
backrop_image = self.chain.obtain_specific_image(
|
||||
mediaid=tmdb_id,
|
||||
mtype=media_type,
|
||||
image_type=MediaImageType.Backdrop,
|
||||
season=season_num,
|
||||
episode=episode_num
|
||||
) or image
|
||||
|
||||
# 类型
|
||||
if media_type == MediaType.MOVIE:
|
||||
msg = f'电影 {media_name} {tmdb_id}'
|
||||
# 删除电视剧
|
||||
elif media_type == MediaType.TV and not season_num and not episode_num:
|
||||
msg = f'剧集 {media_name} {tmdb_id}'
|
||||
# 删除季 S02
|
||||
elif media_type == MediaType.TV and season_num and not episode_num:
|
||||
msg = f'剧集 {media_name} S{season_num} {tmdb_id}'
|
||||
# 删除剧集S02E02
|
||||
elif media_type == MediaType.TV and season_num and episode_num:
|
||||
msg = f'剧集 {media_name} S{season_num}E{episode_num} {tmdb_id}'
|
||||
else:
|
||||
msg = media_name
|
||||
|
||||
# 发送通知
|
||||
self.post_message(
|
||||
mtype=NotificationType.MediaServer,
|
||||
title="云盘同步删除任务完成",
|
||||
image=backrop_image,
|
||||
text=f"{msg}\n"
|
||||
f"时间 {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))}"
|
||||
)
|
||||
|
||||
# 读取历史记录
|
||||
history = self.get_data('history') or []
|
||||
|
||||
# 获取poster
|
||||
poster_image = self.chain.obtain_specific_image(
|
||||
mediaid=tmdb_id,
|
||||
mtype=media_type,
|
||||
image_type=MediaImageType.Poster,
|
||||
) or image
|
||||
history.append({
|
||||
"type": media_type.value,
|
||||
"title": media_name,
|
||||
"path": media_path,
|
||||
"season": season_num,
|
||||
"episode": episode_num,
|
||||
"image": poster_image,
|
||||
"del_time": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
|
||||
})
|
||||
|
||||
# 保存历史
|
||||
self.save_data("history", history)
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '开启通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'path',
|
||||
'rows': '2',
|
||||
'label': '媒体库路径映射',
|
||||
'placeholder': '媒体服务器路径:moviepilot内云盘挂载路径(一行一个)'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '需要开启媒体库删除插件且正确配置排除路径。'
|
||||
'主要针对于strm文件删除后同步删除云盘资源。'
|
||||
'如遇删除失败,请检查文件权限问题。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '关于路径映射:'
|
||||
'emby:/data/series/A.mp4,'
|
||||
'moviepilot内云盘挂载路径:/mnt/link/series/A.mp4。'
|
||||
'路径映射填/data:/mnt/link'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"path": "",
|
||||
"notify": False
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
"""
|
||||
拼装插件详情页面,需要返回页面配置,同时附带数据
|
||||
"""
|
||||
# 查询同步详情
|
||||
historys = self.get_data('history')
|
||||
if not historys:
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'text': '暂无数据',
|
||||
'props': {
|
||||
'class': 'text-center',
|
||||
}
|
||||
}
|
||||
]
|
||||
# 数据按时间降序排序
|
||||
historys = sorted(historys, key=lambda x: x.get('del_time'), reverse=True)
|
||||
# 拼装页面
|
||||
contents = []
|
||||
for history in historys:
|
||||
htype = history.get("type")
|
||||
title = history.get("title")
|
||||
season = history.get("season")
|
||||
episode = history.get("episode")
|
||||
image = history.get("image")
|
||||
del_time = history.get("del_time")
|
||||
|
||||
if season:
|
||||
sub_contents = [
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'类型:{htype}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'标题:{title}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'季:{season}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'集:{episode}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'时间:{del_time}'
|
||||
}
|
||||
]
|
||||
else:
|
||||
sub_contents = [
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'类型:{htype}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'标题:{title}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'时间:{del_time}'
|
||||
}
|
||||
]
|
||||
|
||||
contents.append(
|
||||
{
|
||||
'component': 'VCard',
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'd-flex justify-space-start flex-nowrap flex-row',
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VImg',
|
||||
'props': {
|
||||
'src': image,
|
||||
'height': 120,
|
||||
'width': 80,
|
||||
'aspect-ratio': '2/3',
|
||||
'class': 'object-cover shadow ring-gray-500',
|
||||
'cover': True
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'div',
|
||||
'content': sub_contents
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'grid gap-3 grid-info-card',
|
||||
},
|
||||
'content': contents
|
||||
}
|
||||
]
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
@@ -1,812 +0,0 @@
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import List, Tuple, Dict, Any
|
||||
|
||||
import pytz
|
||||
import requests
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from python_hosts import Hosts, HostsEntry
|
||||
from requests import Response
|
||||
|
||||
from app import schemas
|
||||
from app.core.config import settings
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas.types import EventType, NotificationType
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.ip import IpUtils
|
||||
from app.utils.system import SystemUtils
|
||||
|
||||
|
||||
class CloudflareSpeedTest(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "Cloudflare IP优选"
|
||||
# 插件描述
|
||||
plugin_desc = "🌩 测试 Cloudflare CDN 延迟和速度,自动优选IP。"
|
||||
# 插件图标
|
||||
plugin_icon = "cloudflare.jpg"
|
||||
# 主题色
|
||||
plugin_color = "#F6821F"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "cloudflarespeedtest_"
|
||||
# 加载顺序
|
||||
plugin_order = 12
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_customhosts = False
|
||||
_cf_ip = None
|
||||
_scheduler = None
|
||||
_cron = None
|
||||
_onlyonce = False
|
||||
_ipv4 = False
|
||||
_ipv6 = False
|
||||
_version = None
|
||||
_additional_args = None
|
||||
_re_install = False
|
||||
_notify = False
|
||||
_check = False
|
||||
_cf_path = None
|
||||
_cf_ipv4 = None
|
||||
_cf_ipv6 = None
|
||||
_result_file = None
|
||||
_release_prefix = 'https://github.com/XIU2/CloudflareSpeedTest/releases/download'
|
||||
_binary_name = 'CloudflareST'
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
# 读取配置
|
||||
if config:
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._cron = config.get("cron")
|
||||
self._cf_ip = config.get("cf_ip")
|
||||
self._version = config.get("version")
|
||||
self._ipv4 = config.get("ipv4")
|
||||
self._ipv6 = config.get("ipv6")
|
||||
self._re_install = config.get("re_install")
|
||||
self._additional_args = config.get("additional_args")
|
||||
self._notify = config.get("notify")
|
||||
self._check = config.get("check")
|
||||
|
||||
if self.get_state() or self._onlyonce:
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
|
||||
try:
|
||||
if self.get_state() and self._cron:
|
||||
logger.info(f"Cloudflare CDN优选服务启动,周期:{self._cron}")
|
||||
self._scheduler.add_job(func=self.__cloudflareSpeedTest,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="Cloudflare优选")
|
||||
|
||||
if self._onlyonce:
|
||||
logger.info(f"Cloudflare CDN优选服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.__cloudflareSpeedTest, trigger='date',
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
name="Cloudflare优选")
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
self.__update_config()
|
||||
except Exception as err:
|
||||
logger.error(f"Cloudflare CDN优选服务出错:{str(err)}")
|
||||
self.systemmessage.put(f"Cloudflare CDN优选服务出错:{str(err)}")
|
||||
return
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
@eventmanager.register(EventType.CloudFlareSpeedTest)
|
||||
def __cloudflareSpeedTest(self, event: Event = None):
|
||||
"""
|
||||
CloudflareSpeedTest优选
|
||||
"""
|
||||
self._cf_path = self.get_data_path()
|
||||
self._cf_ipv4 = os.path.join(self._cf_path, "ip.txt")
|
||||
self._cf_ipv6 = os.path.join(self._cf_path, "ipv6.txt")
|
||||
self._result_file = os.path.join(self._cf_path, "result_hosts.txt")
|
||||
|
||||
# 获取自定义Hosts插件,若无设置则停止
|
||||
customHosts = self.get_config("CustomHosts")
|
||||
self._customhosts = customHosts and customHosts.get("enabled")
|
||||
if self._cf_ip and not customHosts or not customHosts.get("hosts"):
|
||||
logger.error(f"Cloudflare CDN优选依赖于自定义Hosts,请先维护hosts")
|
||||
return
|
||||
|
||||
if not self._cf_ip:
|
||||
logger.error("CloudflareSpeedTest加载成功,首次运行,需要配置优选ip")
|
||||
return
|
||||
|
||||
if event and event.event_data:
|
||||
logger.info("收到命令,开始Cloudflare IP优选 ...")
|
||||
self.post_message(channel=event.event_data.get("channel"),
|
||||
title="开始Cloudflare IP优选 ...",
|
||||
userid=event.event_data.get("user"))
|
||||
|
||||
# ipv4和ipv6必须其一
|
||||
if not self._ipv4 and not self._ipv6:
|
||||
self._ipv4 = True
|
||||
self.__update_config()
|
||||
logger.warn(f"Cloudflare CDN优选未指定ip类型,默认ipv4")
|
||||
|
||||
err_flag, release_version = self.__check_environment()
|
||||
if err_flag and release_version:
|
||||
# 更新版本
|
||||
self._version = release_version
|
||||
self.__update_config()
|
||||
|
||||
hosts = customHosts.get("hosts")
|
||||
if isinstance(hosts, str):
|
||||
hosts = str(hosts).split('\n')
|
||||
# 校正优选ip
|
||||
if self._check:
|
||||
self.__check_cf_ip(hosts=hosts)
|
||||
|
||||
# 开始优选
|
||||
if err_flag:
|
||||
logger.info("正在进行CLoudflare CDN优选,请耐心等待")
|
||||
# 执行优选命令,-dd不测速
|
||||
if SystemUtils.is_windows():
|
||||
cf_command = f'cd \"{self._cf_path}\" && CloudflareST {self._additional_args} -o \"{self._result_file}\"' + (
|
||||
f' -f \"{self._cf_ipv4}\"' if self._ipv4 else '') + (
|
||||
f' -f \"{self._cf_ipv6}\"' if self._ipv6 else '')
|
||||
else:
|
||||
cf_command = f'cd {self._cf_path} && chmod a+x {self._binary_name} && ./{self._binary_name} {self._additional_args} -o {self._result_file}' + (
|
||||
f' -f {self._cf_ipv4}' if self._ipv4 else '') + (f' -f {self._cf_ipv6}' if self._ipv6 else '')
|
||||
logger.info(f'正在执行优选命令 {cf_command}')
|
||||
if SystemUtils.is_windows():
|
||||
process = subprocess.Popen(cf_command, shell=True)
|
||||
# 执行命令后无法退出 采用异步和设置超时方案
|
||||
# 设置超时时间为120秒
|
||||
if cf_command.__contains__("-dd"):
|
||||
time.sleep(120)
|
||||
else:
|
||||
time.sleep(600)
|
||||
# 如果没有在120秒内完成任务,那么杀死该进程
|
||||
if process.poll() is None:
|
||||
os.system('taskkill /F /IM CloudflareST.exe')
|
||||
else:
|
||||
os.system(cf_command)
|
||||
|
||||
# 获取优选后最优ip
|
||||
if SystemUtils.is_windows():
|
||||
powershell_command = f"powershell.exe -Command \"Get-Content \'{self._result_file}\' | Select-Object -Skip 1 -First 1 | Write-Output\""
|
||||
logger.info(f'正在执行powershell命令 {powershell_command}')
|
||||
best_ip = SystemUtils.execute(powershell_command)
|
||||
best_ip = best_ip.split(',')[0]
|
||||
else:
|
||||
best_ip = SystemUtils.execute("sed -n '2,1p' " + self._result_file + " | awk -F, '{print $1}'")
|
||||
logger.info(f"\n获取到最优ip==>[{best_ip}]")
|
||||
|
||||
# 替换自定义Hosts插件数据库hosts
|
||||
if IpUtils.is_ipv4(best_ip) or IpUtils.is_ipv6(best_ip):
|
||||
if best_ip == self._cf_ip:
|
||||
logger.info(f"CloudflareSpeedTest CDN优选ip未变,不做处理")
|
||||
else:
|
||||
# 替换优选ip
|
||||
err_hosts = customHosts.get("err_hosts")
|
||||
|
||||
# 处理ip
|
||||
new_hosts = []
|
||||
for host in hosts:
|
||||
if host and host != '\n':
|
||||
host_arr = str(host).split()
|
||||
if host_arr[0] == self._cf_ip:
|
||||
new_hosts.append(host.replace(self._cf_ip, best_ip).replace("\n", "") + "\n")
|
||||
else:
|
||||
new_hosts.append(host.replace("\n", "") + "\n")
|
||||
|
||||
# 更新自定义Hosts
|
||||
self.update_config(
|
||||
{
|
||||
"hosts": ''.join(new_hosts),
|
||||
"err_hosts": err_hosts,
|
||||
"enabled": True
|
||||
}, "CustomHosts"
|
||||
)
|
||||
|
||||
# 更新优选ip
|
||||
old_ip = self._cf_ip
|
||||
self._cf_ip = best_ip
|
||||
self.__update_config()
|
||||
logger.info(f"Cloudflare CDN优选ip [{best_ip}] 已替换自定义Hosts插件")
|
||||
|
||||
# 解发自定义hosts插件重载
|
||||
logger.info("通知CustomHosts插件重载 ...")
|
||||
self.eventmanager.send_event(EventType.PluginReload,
|
||||
{
|
||||
"plugin_id": "CustomHosts"
|
||||
})
|
||||
if self._notify:
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title="【Cloudflare优选任务完成】",
|
||||
text=f"原ip:{old_ip}\n"
|
||||
f"新ip:{best_ip}"
|
||||
)
|
||||
else:
|
||||
logger.error("获取到最优ip格式错误,请重试")
|
||||
self._onlyonce = False
|
||||
self.__update_config()
|
||||
self.stop_service()
|
||||
|
||||
def __check_cf_ip(self, hosts):
|
||||
"""
|
||||
校正cf优选ip
|
||||
防止特殊情况下cf优选ip和自定义hosts插件中ip不一致
|
||||
"""
|
||||
# 统计每个IP地址出现的次数
|
||||
ip_count = {}
|
||||
for host in hosts:
|
||||
if host:
|
||||
ip = host.split()[0]
|
||||
if ip in ip_count:
|
||||
ip_count[ip] += 1
|
||||
else:
|
||||
ip_count[ip] = 1
|
||||
|
||||
# 找出出现次数最多的IP地址
|
||||
max_ips = [] # 保存最多出现的IP地址
|
||||
max_count = 0
|
||||
for ip, count in ip_count.items():
|
||||
if count > max_count:
|
||||
max_ips = [ip] # 更新最多的IP地址
|
||||
max_count = count
|
||||
elif count == max_count:
|
||||
max_ips.append(ip)
|
||||
|
||||
# 如果出现次数最多的ip不止一个,则不做兼容处理
|
||||
if len(max_ips) != 1:
|
||||
return
|
||||
|
||||
if max_ips[0] != self._cf_ip:
|
||||
self._cf_ip = max_ips[0]
|
||||
logger.info(f"获取到自定义hosts插件中ip {max_ips[0]} 出现次数最多,已自动校正优选ip")
|
||||
|
||||
def __check_environment(self):
|
||||
"""
|
||||
环境检查
|
||||
"""
|
||||
# 是否安装标识
|
||||
install_flag = False
|
||||
|
||||
# 是否重新安装
|
||||
if self._re_install:
|
||||
install_flag = True
|
||||
if SystemUtils.is_windows():
|
||||
os.system(f'rd /s /q \"{self._cf_path}\"')
|
||||
else:
|
||||
os.system(f'rm -rf {self._cf_path}')
|
||||
logger.info(f'删除CloudflareSpeedTest目录 {self._cf_path},开始重新安装')
|
||||
|
||||
# 判断目录是否存在
|
||||
cf_path = Path(self._cf_path)
|
||||
if not cf_path.exists():
|
||||
os.mkdir(self._cf_path)
|
||||
|
||||
# 获取CloudflareSpeedTest最新版本
|
||||
release_version = self.__get_release_version()
|
||||
if not release_version:
|
||||
# 如果升级失败但是有可执行文件CloudflareST,则可继续运行,反之停止
|
||||
if Path(f'{self._cf_path}/{self._binary_name}').exists():
|
||||
logger.warn(f"获取CloudflareSpeedTest版本失败,存在可执行版本,继续运行")
|
||||
return True, None
|
||||
elif self._version:
|
||||
logger.error(f"获取CloudflareSpeedTest版本失败,获取上次运行版本{self._version},开始安装")
|
||||
install_flag = True
|
||||
else:
|
||||
release_version = "v2.2.2"
|
||||
self._version = release_version
|
||||
logger.error(f"获取CloudflareSpeedTest版本失败,获取默认版本{release_version},开始安装")
|
||||
install_flag = True
|
||||
|
||||
# 有更新
|
||||
if not install_flag and release_version != self._version:
|
||||
logger.info(f"检测到CloudflareSpeedTest有版本[{release_version}]更新,开始安装")
|
||||
install_flag = True
|
||||
|
||||
# 重装后数据库有版本数据,但是本地没有则重装
|
||||
if not install_flag and release_version == self._version and not Path(
|
||||
f'{self._cf_path}/{self._binary_name}').exists() and not Path(
|
||||
f'{self._cf_path}/CloudflareST.exe').exists():
|
||||
logger.warn(f"未检测到CloudflareSpeedTest本地版本,重新安装")
|
||||
install_flag = True
|
||||
|
||||
if not install_flag:
|
||||
logger.info(f"CloudflareSpeedTest无新版本,存在可执行版本,继续运行")
|
||||
return True, None
|
||||
|
||||
# 检查环境、安装
|
||||
if SystemUtils.is_windows():
|
||||
# windows
|
||||
cf_file_name = 'CloudflareST_windows_amd64.zip'
|
||||
download_url = f'{self._release_prefix}/{release_version}/{cf_file_name}'
|
||||
return self.__os_install(download_url, cf_file_name, release_version,
|
||||
f"ditto -V -x -k --sequesterRsrc {self._cf_path}/{cf_file_name} {self._cf_path}")
|
||||
elif SystemUtils.is_macos():
|
||||
# mac
|
||||
uname = SystemUtils.execute('uname -m')
|
||||
arch = 'amd64' if uname == 'x86_64' else 'arm64'
|
||||
cf_file_name = f'CloudflareST_darwin_{arch}.zip'
|
||||
download_url = f'{self._release_prefix}/{release_version}/{cf_file_name}'
|
||||
return self.__os_install(download_url, cf_file_name, release_version,
|
||||
f"ditto -V -x -k --sequesterRsrc {self._cf_path}/{cf_file_name} {self._cf_path}")
|
||||
else:
|
||||
# docker
|
||||
uname = SystemUtils.execute('uname -m')
|
||||
arch = 'amd64' if uname == 'x86_64' else 'arm64'
|
||||
cf_file_name = f'CloudflareST_linux_{arch}.tar.gz'
|
||||
download_url = f'{self._release_prefix}/{release_version}/{cf_file_name}'
|
||||
return self.__os_install(download_url, cf_file_name, release_version,
|
||||
f"tar -zxf {self._cf_path}/{cf_file_name} -C {self._cf_path}")
|
||||
|
||||
def __os_install(self, download_url, cf_file_name, release_version, unzip_command):
|
||||
"""
|
||||
macos docker安装cloudflare
|
||||
"""
|
||||
# 手动下载安装包后,无需在此下载
|
||||
if not Path(f'{self._cf_path}/{cf_file_name}').exists():
|
||||
# 首次下载或下载新版压缩包
|
||||
proxies = settings.PROXY
|
||||
https_proxy = proxies.get("https") if proxies and proxies.get("https") else None
|
||||
if https_proxy:
|
||||
if SystemUtils.is_windows():
|
||||
self.__get_windows_cloudflarest(download_url, proxies)
|
||||
else:
|
||||
os.system(
|
||||
f'wget -P {self._cf_path} --no-check-certificate -e use_proxy=yes -e https_proxy={https_proxy} {download_url}')
|
||||
else:
|
||||
if SystemUtils.is_windows():
|
||||
self.__get_windows_cloudflarest(download_url, proxies)
|
||||
else:
|
||||
os.system(f'wget -P {self._cf_path} https://ghproxy.com/{download_url}')
|
||||
|
||||
# 判断是否下载好安装包
|
||||
if Path(f'{self._cf_path}/{cf_file_name}').exists():
|
||||
try:
|
||||
if SystemUtils.is_windows():
|
||||
with zipfile.ZipFile(f'{self._cf_path}/{cf_file_name}', 'r') as zip_ref:
|
||||
# 解压ZIP文件中的所有文件到指定目录
|
||||
zip_ref.extractall(self._cf_path)
|
||||
if Path(f'{self._cf_path}\\CloudflareST.exe').exists():
|
||||
logger.info(f"CloudflareSpeedTest安装成功,当前版本:{release_version}")
|
||||
return True, release_version
|
||||
else:
|
||||
logger.error(f"CloudflareSpeedTest安装失败,请检查")
|
||||
os.system(f'rd /s /q \"{self._cf_path}\"')
|
||||
return False, None
|
||||
# 解压
|
||||
os.system(f'{unzip_command}')
|
||||
# 删除压缩包
|
||||
os.system(f'rm -rf {self._cf_path}/{cf_file_name}')
|
||||
if Path(f'{self._cf_path}/{self._binary_name}').exists():
|
||||
logger.info(f"CloudflareSpeedTest安装成功,当前版本:{release_version}")
|
||||
return True, release_version
|
||||
else:
|
||||
logger.error(f"CloudflareSpeedTest安装失败,请检查")
|
||||
os.removedirs(self._cf_path)
|
||||
return False, None
|
||||
except Exception as err:
|
||||
# 如果升级失败但是有可执行文件CloudflareST,则可继续运行,反之停止
|
||||
if Path(f'{self._cf_path}/{self._binary_name}').exists() or \
|
||||
Path(f'{self._cf_path}\\CloudflareST.exe').exists():
|
||||
logger.error(f"CloudflareSpeedTest安装失败:{str(err)},继续使用现版本运行")
|
||||
return True, None
|
||||
else:
|
||||
logger.error(f"CloudflareSpeedTest安装失败:{str(err)},无可用版本,停止运行")
|
||||
if SystemUtils.is_windows():
|
||||
os.system(f'rd /s /q \"{self._cf_path}\"')
|
||||
else:
|
||||
os.removedirs(self._cf_path)
|
||||
return False, None
|
||||
else:
|
||||
# 如果升级失败但是有可执行文件CloudflareST,则可继续运行,反之停止
|
||||
if Path(f'{self._cf_path}/{self._binary_name}').exists() or \
|
||||
Path(f'{self._cf_path}\\CloudflareST.exe').exists():
|
||||
logger.warn(f"CloudflareSpeedTest安装失败,存在可执行版本,继续运行")
|
||||
return True, None
|
||||
else:
|
||||
logger.error(f"CloudflareSpeedTest安装失败,无可用版本,停止运行")
|
||||
if SystemUtils.is_windows():
|
||||
os.system(f'rd /s /q \"{self._cf_path}\"')
|
||||
else:
|
||||
os.removedirs(self._cf_path)
|
||||
return False, None
|
||||
|
||||
def __get_windows_cloudflarest(self, download_url, proxies):
|
||||
response = Response()
|
||||
try:
|
||||
response = requests.get(download_url, stream=True, proxies=proxies if proxies else None)
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"CloudflareSpeedTest下载失败:{str(e)}")
|
||||
if response.status_code == 200:
|
||||
with open(f'{self._cf_path}\\CloudflareST_windows_amd64.zip', 'wb') as file:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
file.write(chunk)
|
||||
|
||||
@staticmethod
|
||||
def __get_release_version():
|
||||
"""
|
||||
获取CloudflareSpeedTest最新版本
|
||||
"""
|
||||
version_res = RequestUtils().get_res(
|
||||
"https://api.github.com/repos/XIU2/CloudflareSpeedTest/releases/latest")
|
||||
if not version_res:
|
||||
version_res = RequestUtils(proxies=settings.PROXY).get_res(
|
||||
"https://api.github.com/repos/XIU2/CloudflareSpeedTest/releases/latest")
|
||||
if version_res:
|
||||
ver_json = version_res.json()
|
||||
version = f"{ver_json['tag_name']}"
|
||||
return version
|
||||
else:
|
||||
return None
|
||||
|
||||
def __update_config(self):
|
||||
"""
|
||||
更新优选插件配置
|
||||
"""
|
||||
self.update_config({
|
||||
"onlyonce": False,
|
||||
"cron": self._cron,
|
||||
"cf_ip": self._cf_ip,
|
||||
"version": self._version,
|
||||
"ipv4": self._ipv4,
|
||||
"ipv6": self._ipv6,
|
||||
"re_install": self._re_install,
|
||||
"additional_args": self._additional_args,
|
||||
"notify": self._notify,
|
||||
"check": self._check
|
||||
})
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return True if self._cf_ip and self._cron else False
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
"""
|
||||
定义远程控制命令
|
||||
:return: 命令关键字、事件、描述、附带数据
|
||||
"""
|
||||
return [{
|
||||
"cmd": "/cloudflare_speedtest",
|
||||
"event": EventType.CloudFlareSpeedTest,
|
||||
"desc": "Cloudflare IP优选",
|
||||
"data": {}
|
||||
}]
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
return [{
|
||||
"path": "/cloudflare_speedtest",
|
||||
"endpoint": self.cloudflare_speedtest,
|
||||
"methods": ["GET"],
|
||||
"summary": "Cloudflare IP优选",
|
||||
"description": "Cloudflare IP优选",
|
||||
}]
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cf_ip',
|
||||
'label': '优选IP',
|
||||
'placeholder': '121.121.121.121'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '优选周期',
|
||||
'placeholder': '0 0 0 ? *'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'version',
|
||||
'readonly': True,
|
||||
'label': 'CloudflareSpeedTest版本',
|
||||
'placeholder': '暂未安装'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'ipv4',
|
||||
'label': 'IPv4',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'ipv6',
|
||||
'label': 'IPv6',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'check',
|
||||
'label': '自动校准',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 're_install',
|
||||
'label': '重装后运行',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '运行时通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'additional_args',
|
||||
'label': '高级参数',
|
||||
'placeholder': '-dd'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': 'F12看请求的Server属性,如果是cloudflare说明该站点支持Cloudflare IP优选。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"cf_ip": "",
|
||||
"cron": "",
|
||||
"version": "",
|
||||
"ipv4": True,
|
||||
"ipv6": False,
|
||||
"check": False,
|
||||
"onlyonce": False,
|
||||
"re_install": False,
|
||||
"notify": True,
|
||||
"additional_args": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def cloudflare_speedtest(self) -> schemas.Response:
|
||||
"""
|
||||
API调用CloudflareSpeedTest IP优选
|
||||
"""
|
||||
self.__cloudflareSpeedTest()
|
||||
return schemas.Response(success=True)
|
||||
|
||||
@staticmethod
|
||||
def __read_system_hosts():
|
||||
"""
|
||||
读取系统hosts对象
|
||||
"""
|
||||
# 获取本机hosts路径
|
||||
if SystemUtils.is_windows():
|
||||
hosts_path = r"c:\windows\system32\drivers\etc\hosts"
|
||||
else:
|
||||
hosts_path = '/etc/hosts'
|
||||
# 读取系统hosts
|
||||
return Hosts(path=hosts_path)
|
||||
|
||||
def __add_hosts_to_system(self, hosts):
|
||||
"""
|
||||
添加hosts到系统
|
||||
"""
|
||||
# 系统hosts对象
|
||||
system_hosts = self.__read_system_hosts()
|
||||
# 过滤掉插件添加的hosts
|
||||
orgin_entries = []
|
||||
for entry in system_hosts.entries:
|
||||
if entry.entry_type == "comment" and entry.comment == "# CustomHostsPlugin":
|
||||
break
|
||||
orgin_entries.append(entry)
|
||||
system_hosts.entries = orgin_entries
|
||||
# 新的有效hosts
|
||||
new_entrys = []
|
||||
# 新的错误的hosts
|
||||
err_hosts = []
|
||||
err_flag = False
|
||||
for host in hosts:
|
||||
if not host:
|
||||
continue
|
||||
host_arr = str(host).split()
|
||||
try:
|
||||
host_entry = HostsEntry(entry_type='ipv4' if IpUtils.is_ipv4(str(host_arr[0])) else 'ipv6',
|
||||
address=host_arr[0],
|
||||
names=host_arr[1:])
|
||||
new_entrys.append(host_entry)
|
||||
except Exception as err:
|
||||
err_hosts.append(host + "\n")
|
||||
logger.error(f"[HOST] 格式转换错误:{str(err)}")
|
||||
# 推送实时消息
|
||||
self.systemmessage.put(f"[HOST] 格式转换错误:{str(err)}")
|
||||
|
||||
# 写入系统hosts
|
||||
if new_entrys:
|
||||
try:
|
||||
# 添加分隔标识
|
||||
system_hosts.add([HostsEntry(entry_type='comment', comment="# CustomHostsPlugin")])
|
||||
# 添加新的Hosts
|
||||
system_hosts.add(new_entrys)
|
||||
system_hosts.write()
|
||||
logger.info("更新系统hosts文件成功")
|
||||
except Exception as err:
|
||||
err_flag = True
|
||||
logger.error(f"更新系统hosts文件失败:{str(err) or '请检查权限'}")
|
||||
# 推送实时消息
|
||||
self.systemmessage.put(f"更新系统hosts文件失败:{str(err) or '请检查权限'}")
|
||||
return err_flag, err_hosts
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
@@ -1,260 +0,0 @@
|
||||
from typing import List, Tuple, Dict, Any
|
||||
|
||||
from python_hosts import Hosts, HostsEntry
|
||||
|
||||
from app.core.event import eventmanager
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas.types import EventType
|
||||
from app.utils.ip import IpUtils
|
||||
from app.utils.system import SystemUtils
|
||||
|
||||
|
||||
class CustomHosts(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "自定义Hosts"
|
||||
# 插件描述
|
||||
plugin_desc = "修改系统hosts文件,加速网络访问。"
|
||||
# 插件图标
|
||||
plugin_icon = "hosts.png"
|
||||
# 主题色
|
||||
plugin_color = "#02C4E0"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "customhosts_"
|
||||
# 加载顺序
|
||||
plugin_order = 10
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_hosts = []
|
||||
_enabled = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._hosts = config.get("hosts")
|
||||
if isinstance(self._hosts, str):
|
||||
self._hosts = str(self._hosts).split('\n')
|
||||
if self._enabled and self._hosts:
|
||||
# 排除空的host
|
||||
new_hosts = []
|
||||
for host in self._hosts:
|
||||
if host and host != '\n':
|
||||
new_hosts.append(host.replace("\n", "") + "\n")
|
||||
self._hosts = new_hosts
|
||||
|
||||
# 添加到系统
|
||||
error_flag, error_hosts = self.__add_hosts_to_system(self._hosts)
|
||||
self._enabled = self._enabled and not error_flag
|
||||
|
||||
# 更新错误Hosts
|
||||
self.update_config({
|
||||
"hosts": ''.join(self._hosts),
|
||||
"err_hosts": error_hosts,
|
||||
"enabled": self._enabled
|
||||
})
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'hosts',
|
||||
'label': '自定义hosts',
|
||||
'rows': 10,
|
||||
'placeholder': '每行一个配置,格式为:ip host1 host2 ...'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'err_hosts',
|
||||
'readonly': True,
|
||||
'label': '错误hosts',
|
||||
'rows': 2,
|
||||
'placeholder': '错误的hosts配置会展示在此处,请修改上方hosts重新提交(错误的hosts不会写入系统hosts文件)'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': 'host格式ip host,中间有空格!!!'
|
||||
'(注:容器运行则更新容器hosts!非宿主机!)'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"hosts": "",
|
||||
"err_hosts": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def __read_system_hosts():
|
||||
"""
|
||||
读取系统hosts对象
|
||||
"""
|
||||
# 获取本机hosts路径
|
||||
if SystemUtils.is_windows():
|
||||
hosts_path = r"c:\windows\system32\drivers\etc\hosts"
|
||||
else:
|
||||
hosts_path = '/etc/hosts'
|
||||
# 读取系统hosts
|
||||
return Hosts(path=hosts_path)
|
||||
|
||||
def __add_hosts_to_system(self, hosts):
|
||||
"""
|
||||
添加hosts到系统
|
||||
"""
|
||||
# 系统hosts对象
|
||||
system_hosts = self.__read_system_hosts()
|
||||
# 过滤掉插件添加的hosts
|
||||
orgin_entries = []
|
||||
for entry in system_hosts.entries:
|
||||
if entry.entry_type == "comment" and entry.comment == "# CustomHostsPlugin":
|
||||
break
|
||||
orgin_entries.append(entry)
|
||||
system_hosts.entries = orgin_entries
|
||||
# 新的有效hosts
|
||||
new_entrys = []
|
||||
# 新的错误的hosts
|
||||
err_hosts = []
|
||||
err_flag = False
|
||||
for host in hosts:
|
||||
if not host:
|
||||
continue
|
||||
host_arr = str(host).split()
|
||||
try:
|
||||
host_entry = HostsEntry(entry_type='ipv4' if IpUtils.is_ipv4(str(host_arr[0])) else 'ipv6',
|
||||
address=host_arr[0],
|
||||
names=host_arr[1:])
|
||||
new_entrys.append(host_entry)
|
||||
except Exception as err:
|
||||
err_hosts.append(host + "\n")
|
||||
logger.error(f"[HOST] 格式转换错误:{str(err)}")
|
||||
# 推送实时消息
|
||||
self.systemmessage.put(f"[HOST] 格式转换错误:{str(err)}")
|
||||
|
||||
# 写入系统hosts
|
||||
if new_entrys:
|
||||
try:
|
||||
# 添加分隔标识
|
||||
system_hosts.add([HostsEntry(entry_type='comment', comment="# CustomHostsPlugin")])
|
||||
# 添加新的Hosts
|
||||
system_hosts.add(new_entrys)
|
||||
system_hosts.write()
|
||||
logger.info("更新系统hosts文件成功")
|
||||
except Exception as err:
|
||||
err_flag = True
|
||||
logger.error(f"更新系统hosts文件失败:{str(err) or '请检查权限'}")
|
||||
# 推送实时消息
|
||||
self.systemmessage.put(f"更新系统hosts文件失败:{str(err) or '请检查权限'}")
|
||||
return err_flag, err_hosts
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
|
||||
@eventmanager.register(EventType.PluginReload)
|
||||
def reload(self, event):
|
||||
"""
|
||||
响应插件重载事件
|
||||
"""
|
||||
plugin_id = event.event_data.get("plugin_id")
|
||||
if not plugin_id:
|
||||
return
|
||||
if plugin_id != self.__class__.__name__:
|
||||
return
|
||||
return self.init_plugin(self.get_config())
|
||||
@@ -1,250 +0,0 @@
|
||||
from typing import Any, List, Dict, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.event import EventManager
|
||||
from app.helper.cookiecloud import CookieCloudHelper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas.types import EventType
|
||||
|
||||
|
||||
class CustomSites(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "自定义站点"
|
||||
# 插件描述
|
||||
plugin_desc = "增加自定义站点为签到和统计使用。"
|
||||
# 插件图标
|
||||
plugin_icon = "world.png"
|
||||
# 主题色
|
||||
plugin_color = "#9AC16C"
|
||||
# 插件版本
|
||||
plugin_version = "0.1"
|
||||
# 插件作者
|
||||
plugin_author = "lightolly"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/lightolly"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "customsites_"
|
||||
# 加载顺序
|
||||
plugin_order = 0
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 自定义站点起始 id
|
||||
site_id_base = 60000
|
||||
site_id_alloc = site_id_base
|
||||
|
||||
# 私有属性
|
||||
cookie_cloud: CookieCloudHelper = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
"""
|
||||
{
|
||||
"id": "站点ID",
|
||||
"name": "站点名称",
|
||||
"url": "站点地址",
|
||||
"cookie": "站点Cookie",
|
||||
"ua": "User-Agent",
|
||||
"proxy": "是否使用代理",
|
||||
"render": "是否仿真",
|
||||
}
|
||||
"""
|
||||
_sites: list[Dict] = []
|
||||
"""
|
||||
格式
|
||||
站点名称|url|是否仿真
|
||||
"""
|
||||
_site_urls: str = ""
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.cookie_cloud = CookieCloudHelper(
|
||||
server=settings.COOKIECLOUD_HOST,
|
||||
key=settings.COOKIECLOUD_KEY,
|
||||
password=settings.COOKIECLOUD_PASSWORD
|
||||
)
|
||||
|
||||
del_sites = []
|
||||
sites = []
|
||||
new_site_urls = []
|
||||
# 配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled", False)
|
||||
self._sites = config.get("sites", [])
|
||||
self._site_urls = config.get("site_urls", "")
|
||||
|
||||
if not self._enabled:
|
||||
return
|
||||
|
||||
site_urls = self._site_urls.splitlines()
|
||||
# 只保留 匹配site_urls的 sites
|
||||
urls = [site_url.split('|')[1] for site_url in site_urls]
|
||||
for site in self._sites:
|
||||
if site.get("url") not in urls:
|
||||
del_sites.append(site)
|
||||
else:
|
||||
sites.append(site)
|
||||
|
||||
for item in site_urls:
|
||||
_, url, _ = item.split("|")
|
||||
if url in [site.get("url") for site in self._sites]:
|
||||
continue
|
||||
else:
|
||||
new_site_urls.append(item)
|
||||
|
||||
# 获取待分配的最大ID
|
||||
alloc_ids = [site.get("id") for site in self._sites if site.get("id")] + [self.site_id_base]
|
||||
self.site_id_alloc = max(alloc_ids) + 1
|
||||
|
||||
# 补全 site_id
|
||||
for item in new_site_urls:
|
||||
site_name, item, site_render = item.split("|")
|
||||
sites.append({
|
||||
"id": self.site_id_alloc,
|
||||
"name": site_name,
|
||||
"url": item,
|
||||
"render": True if site_render.upper() == 'Y' else False,
|
||||
"cookie": "",
|
||||
})
|
||||
self.site_id_alloc += 1
|
||||
self._sites = sites
|
||||
# 保存配置
|
||||
self.sync_cookie()
|
||||
self.__update_config()
|
||||
|
||||
# 通知站点删除
|
||||
for site in del_sites:
|
||||
self.delete_site(site.get("id"))
|
||||
logger.info(f"删除站点 {site.get('name')}")
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
def __update_config(self):
|
||||
# 保存配置
|
||||
self.update_config(
|
||||
{
|
||||
"enabled": self._enabled,
|
||||
"sites": self._sites,
|
||||
"site_urls": self._site_urls
|
||||
}
|
||||
)
|
||||
|
||||
def __get_site_by_domain(self, domain):
|
||||
for site in self._sites:
|
||||
site_domain = urlparse(site.get("url")).netloc
|
||||
if site_domain.endswith(domain):
|
||||
return site
|
||||
return None
|
||||
|
||||
def sync_cookie(self):
|
||||
"""
|
||||
通过CookieCloud同步站点Cookie
|
||||
"""
|
||||
logger.info("开始同步CookieCloud站点 ...")
|
||||
cookies, msg = self.cookie_cloud.download()
|
||||
if not cookies:
|
||||
logger.error(f"CookieCloud同步失败:{msg}")
|
||||
return
|
||||
# 保存Cookie或新增站点
|
||||
_update_count = 0
|
||||
for domain, cookie in cookies.items():
|
||||
# 获取站点信息
|
||||
site_info = self.__get_site_by_domain(domain)
|
||||
if site_info:
|
||||
# 更新站点Cookie
|
||||
logger.info(f"更新站点 {domain} Cookie ...")
|
||||
site_info.update({"cookie": cookie})
|
||||
_update_count += 1
|
||||
|
||||
# 处理完成
|
||||
ret_msg = f"更新了{_update_count}个站点,总{len(self._sites)}个站点"
|
||||
logger.info(f"自定义站点 Cookie同步成功:{ret_msg}")
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'site_urls',
|
||||
'label': '站点列表',
|
||||
'rows': 5,
|
||||
'placeholder': '每一行一个站点,配置方式:\n'
|
||||
'站点名称|站点地址|是否仿真(Y/N)\n'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"site_urls": [],
|
||||
"sites": self._sites
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def delete_site(site_id):
|
||||
"""
|
||||
删除站点通知
|
||||
"""
|
||||
# 插件站点删除
|
||||
EventManager().send_event(EventType.SiteDeleted,
|
||||
{
|
||||
"site_id": site_id
|
||||
})
|
||||
@@ -1,847 +0,0 @@
|
||||
import datetime
|
||||
import re
|
||||
import shutil
|
||||
import threading
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import List, Tuple, Dict, Any, Optional
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.observers.polling import PollingObserver
|
||||
|
||||
from app import schemas
|
||||
from app.chain.tmdb import TmdbChain
|
||||
from app.chain.transfer import TransferChain
|
||||
from app.core.config import settings
|
||||
from app.core.context import MediaInfo
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.core.metainfo import MetaInfoPath
|
||||
from app.db.downloadhistory_oper import DownloadHistoryOper
|
||||
from app.db.transferhistory_oper import TransferHistoryOper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import Notification, NotificationType, TransferInfo
|
||||
from app.schemas.types import EventType, MediaType, SystemConfigKey
|
||||
from app.utils.string import StringUtils
|
||||
from app.utils.system import SystemUtils
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
class FileMonitorHandler(FileSystemEventHandler):
|
||||
"""
|
||||
目录监控响应类
|
||||
"""
|
||||
|
||||
def __init__(self, monpath: str, sync: Any, **kwargs):
|
||||
super(FileMonitorHandler, self).__init__(**kwargs)
|
||||
self._watch_path = monpath
|
||||
self.sync = sync
|
||||
|
||||
def on_created(self, event):
|
||||
self.sync.event_handler(event=event, text="创建",
|
||||
mon_path=self._watch_path, event_path=event.src_path)
|
||||
|
||||
def on_moved(self, event):
|
||||
self.sync.event_handler(event=event, text="移动",
|
||||
mon_path=self._watch_path, event_path=event.dest_path)
|
||||
|
||||
|
||||
class DirMonitor(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "目录监控"
|
||||
# 插件描述
|
||||
plugin_desc = "监控目录文件发生变化时实时整理到媒体库。"
|
||||
# 插件图标
|
||||
plugin_icon = "directory.png"
|
||||
# 主题色
|
||||
plugin_color = "#E0995E"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "dirmonitor_"
|
||||
# 加载顺序
|
||||
plugin_order = 4
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_scheduler = None
|
||||
transferhis = None
|
||||
downloadhis = None
|
||||
transferchian = None
|
||||
tmdbchain = None
|
||||
_observer = []
|
||||
_enabled = False
|
||||
_notify = False
|
||||
_onlyonce = False
|
||||
# 模式 compatibility/fast
|
||||
_mode = "fast"
|
||||
# 转移方式
|
||||
_transfer_type = settings.TRANSFER_TYPE
|
||||
_monitor_dirs = ""
|
||||
_exclude_keywords = ""
|
||||
_interval: int = 10
|
||||
# 存储源目录与目的目录关系
|
||||
_dirconf: Dict[str, Optional[Path]] = {}
|
||||
# 存储源目录转移方式
|
||||
_transferconf: Dict[str, Optional[str]] = {}
|
||||
_medias = {}
|
||||
# 退出事件
|
||||
_event = threading.Event()
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.transferhis = TransferHistoryOper()
|
||||
self.downloadhis = DownloadHistoryOper()
|
||||
self.transferchian = TransferChain()
|
||||
self.tmdbchain = TmdbChain()
|
||||
# 清空配置
|
||||
self._dirconf = {}
|
||||
self._transferconf = {}
|
||||
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._notify = config.get("notify")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._mode = config.get("mode")
|
||||
self._transfer_type = config.get("transfer_type")
|
||||
self._monitor_dirs = config.get("monitor_dirs") or ""
|
||||
self._exclude_keywords = config.get("exclude_keywords") or ""
|
||||
self._interval = config.get("interval") or 10
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
if self._enabled or self._onlyonce:
|
||||
# 定时服务管理器
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
# 追加入库消息统一发送服务
|
||||
self._scheduler.add_job(self.send_msg, trigger='interval', seconds=15)
|
||||
|
||||
# 读取目录配置
|
||||
monitor_dirs = self._monitor_dirs.split("\n")
|
||||
if not monitor_dirs:
|
||||
return
|
||||
for mon_path in monitor_dirs:
|
||||
# 格式源目录:目的目录
|
||||
if not mon_path:
|
||||
continue
|
||||
|
||||
# 自定义转移方式
|
||||
_transfer_type = self._transfer_type
|
||||
if mon_path.count("#") == 1:
|
||||
_transfer_type = mon_path.split("#")[1]
|
||||
mon_path = mon_path.split("#")[0]
|
||||
|
||||
# 存储目的目录
|
||||
if SystemUtils.is_windows():
|
||||
if mon_path.count(":") > 1:
|
||||
paths = [mon_path.split(":")[0] + ":" + mon_path.split(":")[1],
|
||||
mon_path.split(":")[2] + ":" + mon_path.split(":")[3]]
|
||||
else:
|
||||
paths = [mon_path]
|
||||
else:
|
||||
paths = mon_path.split(":")
|
||||
|
||||
# 目的目录
|
||||
target_path = None
|
||||
if len(paths) > 1:
|
||||
mon_path = paths[0]
|
||||
target_path = Path(paths[1])
|
||||
self._dirconf[mon_path] = target_path
|
||||
else:
|
||||
self._dirconf[mon_path] = None
|
||||
|
||||
# 转移方式
|
||||
self._transferconf[mon_path] = _transfer_type
|
||||
|
||||
# 启用目录监控
|
||||
if self._enabled:
|
||||
# 检查媒体库目录是不是下载目录的子目录
|
||||
try:
|
||||
if target_path and target_path.is_relative_to(Path(mon_path)):
|
||||
logger.warn(f"{target_path} 是下载目录 {mon_path} 的子目录,无法监控")
|
||||
self.systemmessage.put(f"{target_path} 是下载目录 {mon_path} 的子目录,无法监控")
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.debug(str(e))
|
||||
pass
|
||||
|
||||
try:
|
||||
if self._mode == "compatibility":
|
||||
# 兼容模式,目录同步性能降低且NAS不能休眠,但可以兼容挂载的远程共享目录如SMB
|
||||
observer = PollingObserver(timeout=10)
|
||||
else:
|
||||
# 内部处理系统操作类型选择最优解
|
||||
observer = Observer(timeout=10)
|
||||
self._observer.append(observer)
|
||||
observer.schedule(FileMonitorHandler(mon_path, self), path=mon_path, recursive=True)
|
||||
observer.daemon = True
|
||||
observer.start()
|
||||
logger.info(f"{mon_path} 的目录监控服务启动")
|
||||
except Exception as e:
|
||||
err_msg = str(e)
|
||||
if "inotify" in err_msg and "reached" in err_msg:
|
||||
logger.warn(
|
||||
f"目录监控服务启动出现异常:{err_msg},请在宿主机上(不是docker容器内)执行以下命令并重启:"
|
||||
+ """
|
||||
echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf
|
||||
echo fs.inotify.max_user_instances=524288 | sudo tee -a /etc/sysctl.conf
|
||||
sudo sysctl -p
|
||||
""")
|
||||
else:
|
||||
logger.error(f"{mon_path} 启动目录监控失败:{err_msg}")
|
||||
self.systemmessage.put(f"{mon_path} 启动目录监控失败:{err_msg}")
|
||||
|
||||
# 运行一次定时服务
|
||||
if self._onlyonce:
|
||||
logger.info("目录监控服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.sync_all, trigger='date',
|
||||
run_date=datetime.datetime.now(
|
||||
tz=pytz.timezone(settings.TZ)) + datetime.timedelta(seconds=3)
|
||||
)
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
# 保存配置
|
||||
self.__update_config()
|
||||
|
||||
# 启动定时服务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def __update_config(self):
|
||||
"""
|
||||
更新配置
|
||||
"""
|
||||
self.update_config({
|
||||
"enabled": self._enabled,
|
||||
"notify": self._notify,
|
||||
"onlyonce": self._onlyonce,
|
||||
"mode": self._mode,
|
||||
"transfer_type": self._transfer_type,
|
||||
"monitor_dirs": self._monitor_dirs,
|
||||
"exclude_keywords": self._exclude_keywords,
|
||||
"interval": self._interval
|
||||
})
|
||||
|
||||
@eventmanager.register(EventType.DirectorySync)
|
||||
def remote_sync(self, event: Event):
|
||||
"""
|
||||
远程全量同步
|
||||
"""
|
||||
if event:
|
||||
self.post_message(channel=event.event_data.get("channel"),
|
||||
title="开始同步监控目录 ...",
|
||||
userid=event.event_data.get("user"))
|
||||
self.sync_all()
|
||||
if event:
|
||||
self.post_message(channel=event.event_data.get("channel"),
|
||||
title="监控目录同步完成!", userid=event.event_data.get("user"))
|
||||
|
||||
def sync_all(self):
|
||||
"""
|
||||
立即运行一次,全量同步目录中所有文件
|
||||
"""
|
||||
logger.info("开始全量同步监控目录 ...")
|
||||
# 遍历所有监控目录
|
||||
for mon_path in self._dirconf.keys():
|
||||
# 遍历目录下所有文件
|
||||
for file_path in SystemUtils.list_files(Path(mon_path), settings.RMT_MEDIAEXT):
|
||||
self.__handle_file(event_path=str(file_path), mon_path=mon_path)
|
||||
logger.info("全量同步监控目录完成!")
|
||||
|
||||
def event_handler(self, event, mon_path: str, text: str, event_path: str):
|
||||
"""
|
||||
处理文件变化
|
||||
:param event: 事件
|
||||
:param mon_path: 监控目录
|
||||
:param text: 事件描述
|
||||
:param event_path: 事件文件路径
|
||||
"""
|
||||
if not event.is_directory:
|
||||
# 文件发生变化
|
||||
logger.debug("文件%s:%s" % (text, event_path))
|
||||
self.__handle_file(event_path=event_path, mon_path=mon_path)
|
||||
|
||||
def __handle_file(self, event_path: str, mon_path: str):
|
||||
"""
|
||||
同步一个文件
|
||||
:param event_path: 事件文件路径
|
||||
:param mon_path: 监控目录
|
||||
"""
|
||||
file_path = Path(event_path)
|
||||
try:
|
||||
if not file_path.exists():
|
||||
return
|
||||
# 全程加锁
|
||||
with lock:
|
||||
transfer_history = self.transferhis.get_by_src(event_path)
|
||||
if transfer_history:
|
||||
logger.debug("文件已处理过:%s" % event_path)
|
||||
return
|
||||
|
||||
# 回收站及隐藏的文件不处理
|
||||
if event_path.find('/@Recycle/') != -1 \
|
||||
or event_path.find('/#recycle/') != -1 \
|
||||
or event_path.find('/.') != -1 \
|
||||
or event_path.find('/@eaDir') != -1:
|
||||
logger.debug(f"{event_path} 是回收站或隐藏的文件")
|
||||
return
|
||||
|
||||
# 命中过滤关键字不处理
|
||||
if self._exclude_keywords:
|
||||
for keyword in self._exclude_keywords.split("\n"):
|
||||
if keyword and re.findall(keyword, event_path):
|
||||
logger.info(f"{event_path} 命中过滤关键字 {keyword},不处理")
|
||||
return
|
||||
|
||||
# 整理屏蔽词不处理
|
||||
transfer_exclude_words = self.systemconfig.get(SystemConfigKey.TransferExcludeWords)
|
||||
if transfer_exclude_words:
|
||||
for keyword in transfer_exclude_words:
|
||||
if not keyword:
|
||||
continue
|
||||
if keyword and re.search(r"%s" % keyword, event_path, re.IGNORECASE):
|
||||
logger.info(f"{event_path} 命中整理屏蔽词 {keyword},不处理")
|
||||
return
|
||||
|
||||
# 不是媒体文件不处理
|
||||
if file_path.suffix not in settings.RMT_MEDIAEXT:
|
||||
logger.debug(f"{event_path} 不是媒体文件")
|
||||
return
|
||||
|
||||
# 判断是不是蓝光目录
|
||||
if re.search(r"BDMV[/\\]STREAM", event_path, re.IGNORECASE):
|
||||
# 截取BDMV前面的路径
|
||||
event_path = event_path[:event_path.find("BDMV")]
|
||||
file_path = Path(event_path)
|
||||
|
||||
# 查询历史记录,已转移的不处理
|
||||
if self.transferhis.get_by_src(event_path):
|
||||
logger.info(f"{event_path} 已整理过")
|
||||
return
|
||||
|
||||
# 元数据
|
||||
file_meta = MetaInfoPath(file_path)
|
||||
if not file_meta.name:
|
||||
logger.error(f"{file_path.name} 无法识别有效信息")
|
||||
return
|
||||
|
||||
# 查询转移目的目录
|
||||
target: Path = self._dirconf.get(mon_path)
|
||||
# 查询转移方式
|
||||
transfer_type = self._transferconf.get(mon_path)
|
||||
# 根据父路径获取下载历史
|
||||
download_history = self.downloadhis.get_by_path(Path(event_path).parent)
|
||||
|
||||
# 识别媒体信息
|
||||
mediainfo: MediaInfo = self.chain.recognize_media(meta=file_meta,
|
||||
tmdbid=download_history.tmdbid if download_history else None)
|
||||
if not mediainfo:
|
||||
logger.warn(f'未识别到媒体信息,标题:{file_meta.name}')
|
||||
# 新增转移成功历史记录
|
||||
his = self.transferhis.add_fail(
|
||||
src_path=file_path,
|
||||
mode=transfer_type,
|
||||
meta=file_meta
|
||||
)
|
||||
if self._notify:
|
||||
self.chain.post_message(Notification(
|
||||
mtype=NotificationType.Manual,
|
||||
title=f"{file_path.name} 未识别到媒体信息,无法入库!\n"
|
||||
f"回复:```\n/redo {his.id} [tmdbid]|[类型]\n``` 手动识别转移。"
|
||||
))
|
||||
return
|
||||
|
||||
# 如果未开启新增已入库媒体是否跟随TMDB信息变化则根据tmdbid查询之前的title
|
||||
if not settings.SCRAP_FOLLOW_TMDB:
|
||||
transfer_history = self.transferhis.get_by_type_tmdbid(tmdbid=mediainfo.tmdb_id,
|
||||
mtype=mediainfo.type.value)
|
||||
if transfer_history:
|
||||
mediainfo.title = transfer_history.title
|
||||
logger.info(f"{file_path.name} 识别为:{mediainfo.type.value} {mediainfo.title_year}")
|
||||
|
||||
# 更新媒体图片
|
||||
self.chain.obtain_images(mediainfo=mediainfo)
|
||||
|
||||
# 获取集数据
|
||||
if mediainfo.type == MediaType.TV:
|
||||
episodes_info = self.tmdbchain.tmdb_episodes(tmdbid=mediainfo.tmdb_id,
|
||||
season=file_meta.begin_season or 1)
|
||||
else:
|
||||
episodes_info = None
|
||||
|
||||
# 获取downloadhash
|
||||
download_hash = self.get_download_hash(src=str(file_path))
|
||||
|
||||
# 转移
|
||||
transferinfo: TransferInfo = self.chain.transfer(mediainfo=mediainfo,
|
||||
path=file_path,
|
||||
transfer_type=transfer_type,
|
||||
target=target,
|
||||
meta=file_meta,
|
||||
episodes_info=episodes_info)
|
||||
|
||||
if not transferinfo:
|
||||
logger.error("文件转移模块运行失败")
|
||||
return
|
||||
if not transferinfo.success:
|
||||
# 转移失败
|
||||
logger.warn(f"{file_path.name} 入库失败:{transferinfo.message}")
|
||||
# 新增转移失败历史记录
|
||||
self.transferhis.add_fail(
|
||||
src_path=file_path,
|
||||
mode=transfer_type,
|
||||
download_hash=download_hash,
|
||||
meta=file_meta,
|
||||
mediainfo=mediainfo,
|
||||
transferinfo=transferinfo
|
||||
)
|
||||
if self._notify:
|
||||
self.chain.post_message(Notification(
|
||||
mtype=NotificationType.Manual,
|
||||
title=f"{mediainfo.title_year}{file_meta.season_episode} 入库失败!",
|
||||
text=f"原因:{transferinfo.message or '未知'}",
|
||||
image=mediainfo.get_message_image()
|
||||
))
|
||||
return
|
||||
|
||||
# 新增转移成功历史记录
|
||||
self.transferhis.add_success(
|
||||
src_path=file_path,
|
||||
mode=transfer_type,
|
||||
download_hash=download_hash,
|
||||
meta=file_meta,
|
||||
mediainfo=mediainfo,
|
||||
transferinfo=transferinfo
|
||||
)
|
||||
|
||||
# 刮削单个文件
|
||||
if settings.SCRAP_METADATA:
|
||||
self.chain.scrape_metadata(path=transferinfo.target_path,
|
||||
mediainfo=mediainfo,
|
||||
transfer_type=transfer_type)
|
||||
|
||||
"""
|
||||
{
|
||||
"title_year season": {
|
||||
"files": [
|
||||
{
|
||||
"path":,
|
||||
"mediainfo":,
|
||||
"file_meta":,
|
||||
"transferinfo":
|
||||
}
|
||||
],
|
||||
"time": "2023-08-24 23:23:23.332"
|
||||
}
|
||||
}
|
||||
"""
|
||||
# 发送消息汇总
|
||||
media_list = self._medias.get(mediainfo.title_year + " " + file_meta.season) or {}
|
||||
if media_list:
|
||||
media_files = media_list.get("files") or []
|
||||
if media_files:
|
||||
file_exists = False
|
||||
for file in media_files:
|
||||
if str(event_path) == file.get("path"):
|
||||
file_exists = True
|
||||
break
|
||||
if not file_exists:
|
||||
media_files.append({
|
||||
"path": event_path,
|
||||
"mediainfo": mediainfo,
|
||||
"file_meta": file_meta,
|
||||
"transferinfo": transferinfo
|
||||
})
|
||||
else:
|
||||
media_files = [
|
||||
{
|
||||
"path": event_path,
|
||||
"mediainfo": mediainfo,
|
||||
"file_meta": file_meta,
|
||||
"transferinfo": transferinfo
|
||||
}
|
||||
]
|
||||
media_list = {
|
||||
"files": media_files,
|
||||
"time": datetime.datetime.now()
|
||||
}
|
||||
else:
|
||||
media_list = {
|
||||
"files": [
|
||||
{
|
||||
"path": event_path,
|
||||
"mediainfo": mediainfo,
|
||||
"file_meta": file_meta,
|
||||
"transferinfo": transferinfo
|
||||
}
|
||||
],
|
||||
"time": datetime.datetime.now()
|
||||
}
|
||||
self._medias[mediainfo.title_year + " " + file_meta.season] = media_list
|
||||
|
||||
# 广播事件
|
||||
self.eventmanager.send_event(EventType.TransferComplete, {
|
||||
'meta': file_meta,
|
||||
'mediainfo': mediainfo,
|
||||
'transferinfo': transferinfo
|
||||
})
|
||||
|
||||
# 移动模式删除空目录
|
||||
if transfer_type == "move":
|
||||
for file_dir in file_path.parents:
|
||||
if len(str(file_dir)) <= len(str(Path(mon_path))):
|
||||
# 重要,删除到监控目录为止
|
||||
break
|
||||
files = SystemUtils.list_files(file_dir, settings.RMT_MEDIAEXT)
|
||||
if not files:
|
||||
logger.warn(f"移动模式,删除空目录:{file_dir}")
|
||||
shutil.rmtree(file_dir, ignore_errors=True)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("目录监控发生错误:%s - %s" % (str(e), traceback.format_exc()))
|
||||
|
||||
def send_msg(self):
|
||||
"""
|
||||
定时检查是否有媒体处理完,发送统一消息
|
||||
"""
|
||||
if not self._medias or not self._medias.keys():
|
||||
return
|
||||
|
||||
# 遍历检查是否已刮削完,发送消息
|
||||
for medis_title_year_season in list(self._medias.keys()):
|
||||
media_list = self._medias.get(medis_title_year_season)
|
||||
logger.info(f"开始处理媒体 {medis_title_year_season} 消息")
|
||||
|
||||
if not media_list:
|
||||
continue
|
||||
|
||||
# 获取最后更新时间
|
||||
last_update_time = media_list.get("time")
|
||||
media_files = media_list.get("files")
|
||||
if not last_update_time or not media_files:
|
||||
continue
|
||||
|
||||
transferinfo = media_files[0].get("transferinfo")
|
||||
file_meta = media_files[0].get("file_meta")
|
||||
mediainfo = media_files[0].get("mediainfo")
|
||||
# 判断剧集最后更新时间距现在是已超过10秒或者电影,发送消息
|
||||
if (datetime.datetime.now() - last_update_time).total_seconds() > int(self._interval) \
|
||||
or mediainfo.type == MediaType.MOVIE:
|
||||
# 发送通知
|
||||
if self._notify:
|
||||
|
||||
# 汇总处理文件总大小
|
||||
total_size = 0
|
||||
file_count = 0
|
||||
|
||||
# 剧集汇总
|
||||
episodes = []
|
||||
for file in media_files:
|
||||
transferinfo = file.get("transferinfo")
|
||||
total_size += transferinfo.total_size
|
||||
file_count += 1
|
||||
|
||||
file_meta = file.get("file_meta")
|
||||
if file_meta and file_meta.begin_episode:
|
||||
episodes.append(file_meta.begin_episode)
|
||||
|
||||
transferinfo.total_size = total_size
|
||||
# 汇总处理文件数量
|
||||
transferinfo.file_count = file_count
|
||||
|
||||
# 剧集季集信息 S01 E01-E04 || S01 E01、E02、E04
|
||||
season_episode = None
|
||||
# 处理文件多,说明是剧集,显示季入库消息
|
||||
if mediainfo.type == MediaType.TV:
|
||||
# 季集文本
|
||||
season_episode = f"{file_meta.season} {StringUtils.format_ep(episodes)}"
|
||||
# 发送消息
|
||||
self.transferchian.send_transfer_message(meta=file_meta,
|
||||
mediainfo=mediainfo,
|
||||
transferinfo=transferinfo,
|
||||
season_episode=season_episode)
|
||||
# 发送完消息,移出key
|
||||
del self._medias[medis_title_year_season]
|
||||
continue
|
||||
|
||||
def get_download_hash(self, src: str):
|
||||
"""
|
||||
从表中获取download_hash,避免连接下载器
|
||||
"""
|
||||
download_file = self.downloadhis.get_file_by_fullpath(src)
|
||||
if download_file:
|
||||
return download_file.download_hash
|
||||
return None
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
"""
|
||||
定义远程控制命令
|
||||
:return: 命令关键字、事件、描述、附带数据
|
||||
"""
|
||||
return [{
|
||||
"cmd": "/directory_sync",
|
||||
"event": EventType.DirectorySync,
|
||||
"desc": "目录监控同步",
|
||||
"category": "管理",
|
||||
"data": {}
|
||||
}]
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
return [{
|
||||
"path": "/directory_sync",
|
||||
"endpoint": self.sync,
|
||||
"methods": ["GET"],
|
||||
"summary": "目录监控同步",
|
||||
"description": "目录监控同步",
|
||||
}]
|
||||
|
||||
def sync(self) -> schemas.Response:
|
||||
"""
|
||||
API调用目录同步
|
||||
"""
|
||||
self.sync_all()
|
||||
return schemas.Response(success=True)
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '发送通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'mode',
|
||||
'label': '监控模式',
|
||||
'items': [
|
||||
{'title': '兼容模式', 'value': 'compatibility'},
|
||||
{'title': '性能模式', 'value': 'fast'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'transfer_type',
|
||||
'label': '转移方式',
|
||||
'items': [
|
||||
{'title': '移动', 'value': 'move'},
|
||||
{'title': '复制', 'value': 'copy'},
|
||||
{'title': '硬链接', 'value': 'link'},
|
||||
{'title': '软链接', 'value': 'softlink'},
|
||||
{'title': 'Rclone复制', 'value': 'rclone_copy'},
|
||||
{'title': 'Rclone移动', 'value': 'rclone_move'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'interval',
|
||||
'label': '入库消息延迟',
|
||||
'placeholder': '10'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'monitor_dirs',
|
||||
'label': '监控目录',
|
||||
'rows': 5,
|
||||
'placeholder': '每一行一个目录,支持以下几种配置方式,转移方式支持 move、copy、link、softlink、rclone_copy、rclone_move:\n'
|
||||
'监控目录\n'
|
||||
'监控目录#转移方式\n'
|
||||
'监控目录:转移目的目录\n'
|
||||
'监控目录:转移目的目录#转移方式'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'exclude_keywords',
|
||||
'label': '排除关键词',
|
||||
'rows': 2,
|
||||
'placeholder': '每一行一个关键词'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '入库消息延迟默认10s,如网络较慢可酌情调大,有助于发送统一入库消息。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"notify": False,
|
||||
"onlyonce": False,
|
||||
"mode": "fast",
|
||||
"transfer_type": settings.TRANSFER_TYPE,
|
||||
"monitor_dirs": "",
|
||||
"exclude_keywords": "",
|
||||
"interval": 10
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
if self._observer:
|
||||
for observer in self._observer:
|
||||
try:
|
||||
observer.stop()
|
||||
observer.join()
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
self._observer = []
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._event.set()
|
||||
self._scheduler.shutdown()
|
||||
self._event.clear()
|
||||
self._scheduler = None
|
||||
@@ -1,574 +0,0 @@
|
||||
import datetime
|
||||
import re
|
||||
import xml.dom.minidom
|
||||
from threading import Event
|
||||
from typing import Tuple, List, Dict, Any
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.chain.douban import DoubanChain
|
||||
from app.chain.download import DownloadChain
|
||||
from app.chain.subscribe import SubscribeChain
|
||||
from app.core.config import settings
|
||||
from app.core.context import MediaInfo
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.utils.dom import DomUtils
|
||||
from app.utils.http import RequestUtils
|
||||
|
||||
|
||||
class DoubanRank(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "豆瓣榜单订阅"
|
||||
# 插件描述
|
||||
plugin_desc = "监控豆瓣热门榜单,自动添加订阅。"
|
||||
# 插件图标
|
||||
plugin_icon = "movie.jpg"
|
||||
# 主题色
|
||||
plugin_color = "#01B3E3"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "doubanrank_"
|
||||
# 加载顺序
|
||||
plugin_order = 6
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 退出事件
|
||||
_event = Event()
|
||||
# 私有属性
|
||||
downloadchain: DownloadChain = None
|
||||
subscribechain: SubscribeChain = None
|
||||
doubanchain: DoubanChain = None
|
||||
_scheduler = None
|
||||
_douban_address = {
|
||||
'movie-ustop': 'https://rsshub.app/douban/movie/ustop',
|
||||
'movie-weekly': 'https://rsshub.app/douban/movie/weekly',
|
||||
'movie-real-time': 'https://rsshub.app/douban/movie/weekly/subject_real_time_hotest',
|
||||
'show-domestic': 'https://rsshub.app/douban/movie/weekly/show_domestic',
|
||||
'movie-hot-gaia': 'https://rsshub.app/douban/movie/weekly/movie_hot_gaia',
|
||||
'tv-hot': 'https://rsshub.app/douban/movie/weekly/tv_hot',
|
||||
'movie-top250': 'https://rsshub.app/douban/movie/weekly/movie_top250',
|
||||
}
|
||||
_enabled = False
|
||||
_cron = ""
|
||||
_onlyonce = False
|
||||
_rss_addrs = []
|
||||
_ranks = []
|
||||
_vote = 0
|
||||
_clear = False
|
||||
_clearflag = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloadchain = DownloadChain()
|
||||
self.subscribechain = SubscribeChain()
|
||||
self.doubanchain = DoubanChain()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._cron = config.get("cron")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._vote = float(config.get("vote")) if config.get("vote") else 0
|
||||
rss_addrs = config.get("rss_addrs")
|
||||
if rss_addrs:
|
||||
if isinstance(rss_addrs, str):
|
||||
self._rss_addrs = rss_addrs.split('\n')
|
||||
else:
|
||||
self._rss_addrs = rss_addrs
|
||||
else:
|
||||
self._rss_addrs = []
|
||||
self._ranks = config.get("ranks") or []
|
||||
self._clear = config.get("clear")
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
# 启动服务
|
||||
if self._enabled or self._onlyonce:
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
if self._cron:
|
||||
logger.info(f"豆瓣榜单订阅服务启动,周期:{self._cron}")
|
||||
try:
|
||||
self._scheduler.add_job(func=self.__refresh_rss,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="豆瓣榜单订阅")
|
||||
except Exception as e:
|
||||
logger.error(f"豆瓣榜单订阅服务启动失败,错误信息:{str(e)}")
|
||||
self.systemmessage.put(f"豆瓣榜单订阅服务启动失败,错误信息:{str(e)}")
|
||||
else:
|
||||
self._scheduler.add_job(func=self.__refresh_rss, trigger='date',
|
||||
run_date=datetime.datetime.now(
|
||||
tz=pytz.timezone(settings.TZ)) + datetime.timedelta(seconds=3)
|
||||
)
|
||||
logger.info("豆瓣榜单订阅服务启动,周期:每天 08:00")
|
||||
|
||||
if self._onlyonce:
|
||||
logger.info("豆瓣榜单订阅服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.__refresh_rss, trigger='date',
|
||||
run_date=datetime.datetime.now(
|
||||
tz=pytz.timezone(settings.TZ)) + datetime.timedelta(seconds=3)
|
||||
)
|
||||
|
||||
if self._onlyonce or self._clear:
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
# 记录缓存清理标志
|
||||
self._clearflag = self._clear
|
||||
# 关闭清理缓存
|
||||
self._clear = False
|
||||
# 保存配置
|
||||
self.__update_config()
|
||||
|
||||
if self._scheduler.get_jobs():
|
||||
# 启动服务
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '5位cron表达式,留空自动'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'vote',
|
||||
'label': '评分',
|
||||
'placeholder': '评分大于等于该值才订阅'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'model': 'ranks',
|
||||
'label': '热门榜单',
|
||||
'items': [
|
||||
{'title': '电影北美票房榜', 'value': 'movie-ustop'},
|
||||
{'title': '一周口碑电影榜', 'value': 'movie-weekly'},
|
||||
{'title': '实时热门电影', 'value': 'movie-real-time'},
|
||||
{'title': '热门综艺', 'value': 'show-domestic'},
|
||||
{'title': '热门电影', 'value': 'movie-hot-gaia'},
|
||||
{'title': '热门电视剧', 'value': 'tv-hot'},
|
||||
{'title': '电影TOP10', 'value': 'movie-top250'},
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'rss_addrs',
|
||||
'label': '自定义榜单地址',
|
||||
'placeholder': '每行一个地址,如:https://rsshub.app/douban/movie/ustop'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'clear',
|
||||
'label': '清理历史记录',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"cron": "",
|
||||
"onlyonce": False,
|
||||
"vote": "",
|
||||
"ranks": [],
|
||||
"rss_addrs": "",
|
||||
"clear": False
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
"""
|
||||
拼装插件详情页面,需要返回页面配置,同时附带数据
|
||||
"""
|
||||
# 查询历史记录
|
||||
historys = self.get_data('history')
|
||||
if not historys:
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'text': '暂无数据',
|
||||
'props': {
|
||||
'class': 'text-center',
|
||||
}
|
||||
}
|
||||
]
|
||||
# 数据按时间降序排序
|
||||
historys = sorted(historys, key=lambda x: x.get('time'), reverse=True)
|
||||
# 拼装页面
|
||||
contents = []
|
||||
for history in historys:
|
||||
title = history.get("title")
|
||||
poster = history.get("poster")
|
||||
mtype = history.get("type")
|
||||
time_str = history.get("time")
|
||||
doubanid = history.get("doubanid")
|
||||
contents.append(
|
||||
{
|
||||
'component': 'VCard',
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'd-flex justify-space-start flex-nowrap flex-row',
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VImg',
|
||||
'props': {
|
||||
'src': poster,
|
||||
'height': 120,
|
||||
'width': 80,
|
||||
'aspect-ratio': '2/3',
|
||||
'class': 'object-cover shadow ring-gray-500',
|
||||
'cover': True
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCardSubtitle',
|
||||
'props': {
|
||||
'class': 'pa-2 font-bold break-words whitespace-break-spaces'
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'a',
|
||||
'props': {
|
||||
'href': f"https://movie.douban.com/subject/{doubanid}",
|
||||
'target': '_blank'
|
||||
},
|
||||
'text': title
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'类型:{mtype}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'时间:{time_str}'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'grid gap-3 grid-info-card',
|
||||
},
|
||||
'content': contents
|
||||
}
|
||||
]
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
停止服务
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._event.set()
|
||||
self._scheduler.shutdown()
|
||||
self._event.clear()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
|
||||
def __update_config(self):
|
||||
"""
|
||||
列新配置
|
||||
"""
|
||||
self.update_config({
|
||||
"enabled": self._enabled,
|
||||
"cron": self._cron,
|
||||
"onlyonce": self._onlyonce,
|
||||
"vote": self._vote,
|
||||
"ranks": self._ranks,
|
||||
"rss_addrs": '\n'.join(map(str, self._rss_addrs)),
|
||||
"clear": self._clear
|
||||
})
|
||||
|
||||
def __refresh_rss(self):
|
||||
"""
|
||||
刷新RSS
|
||||
"""
|
||||
logger.info(f"开始刷新豆瓣榜单 ...")
|
||||
addr_list = self._rss_addrs + [self._douban_address.get(rank) for rank in self._ranks]
|
||||
if not addr_list:
|
||||
logger.info(f"未设置榜单RSS地址")
|
||||
return
|
||||
else:
|
||||
logger.info(f"共 {len(addr_list)} 个榜单RSS地址需要刷新")
|
||||
|
||||
# 读取历史记录
|
||||
if self._clearflag:
|
||||
history = []
|
||||
else:
|
||||
history: List[dict] = self.get_data('history') or []
|
||||
|
||||
for addr in addr_list:
|
||||
if not addr:
|
||||
continue
|
||||
try:
|
||||
logger.info(f"获取RSS:{addr} ...")
|
||||
rss_infos = self.__get_rss_info(addr)
|
||||
if not rss_infos:
|
||||
logger.error(f"RSS地址:{addr} ,未查询到数据")
|
||||
continue
|
||||
else:
|
||||
logger.info(f"RSS地址:{addr} ,共 {len(rss_infos)} 条数据")
|
||||
for rss_info in rss_infos:
|
||||
if self._event.is_set():
|
||||
logger.info(f"订阅服务停止")
|
||||
return
|
||||
|
||||
title = rss_info.get('title')
|
||||
douban_id = rss_info.get('doubanid')
|
||||
unique_flag = f"doubanrank: {title} (DB:{douban_id})"
|
||||
# 检查是否已处理过
|
||||
if unique_flag in [h.get("unique") for h in history]:
|
||||
continue
|
||||
# 元数据
|
||||
meta = MetaInfo(title)
|
||||
# 识别媒体信息
|
||||
if douban_id:
|
||||
# 识别豆瓣信息
|
||||
context = self.doubanchain.recognize_by_doubanid(douban_id)
|
||||
mediainfo = context.media_info
|
||||
if not mediainfo or not mediainfo.tmdb_id:
|
||||
logger.warn(f'未识别到媒体信息,标题:{title},豆瓣ID:{douban_id}')
|
||||
continue
|
||||
|
||||
else:
|
||||
# 匹配媒体信息
|
||||
mediainfo: MediaInfo = self.chain.recognize_media(meta=meta)
|
||||
if not mediainfo:
|
||||
logger.warn(f'未识别到媒体信息,标题:{title},豆瓣ID:{douban_id}')
|
||||
continue
|
||||
# 查询缺失的媒体信息
|
||||
exist_flag, _ = self.downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
if exist_flag:
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
|
||||
continue
|
||||
# 判断用户是否已经添加订阅
|
||||
if self.subscribechain.exists(mediainfo=mediainfo, meta=meta):
|
||||
logger.info(f'{mediainfo.title_year} 订阅已存在')
|
||||
continue
|
||||
# 添加订阅
|
||||
self.subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="豆瓣榜单")
|
||||
# 存储历史记录
|
||||
history.append({
|
||||
"title": title,
|
||||
"type": mediainfo.type.value,
|
||||
"year": mediainfo.year,
|
||||
"poster": mediainfo.get_poster_image(),
|
||||
"overview": mediainfo.overview,
|
||||
"tmdbid": mediainfo.tmdb_id,
|
||||
"doubanid": douban_id,
|
||||
"time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"unique": unique_flag
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
|
||||
# 保存历史记录
|
||||
self.save_data('history', history)
|
||||
# 缓存只清理一次
|
||||
self._clearflag = False
|
||||
logger.info(f"所有榜单RSS刷新完成")
|
||||
|
||||
@staticmethod
|
||||
def __get_rss_info(addr) -> List[dict]:
|
||||
"""
|
||||
获取RSS
|
||||
"""
|
||||
try:
|
||||
ret = RequestUtils().get_res(addr)
|
||||
if not ret:
|
||||
return []
|
||||
ret_xml = ret.text
|
||||
ret_array = []
|
||||
# 解析XML
|
||||
dom_tree = xml.dom.minidom.parseString(ret_xml)
|
||||
rootNode = dom_tree.documentElement
|
||||
items = rootNode.getElementsByTagName("item")
|
||||
for item in items:
|
||||
try:
|
||||
# 标题
|
||||
title = DomUtils.tag_value(item, "title", default="")
|
||||
# 链接
|
||||
link = DomUtils.tag_value(item, "link", default="")
|
||||
if not title and not link:
|
||||
logger.warn(f"条目标题和链接均为空,无法处理")
|
||||
continue
|
||||
doubanid = re.findall(r"/(\d+)/", link)
|
||||
if doubanid:
|
||||
doubanid = doubanid[0]
|
||||
if doubanid and not str(doubanid).isdigit():
|
||||
logger.warn(f"解析的豆瓣ID格式不正确:{doubanid}")
|
||||
continue
|
||||
# 返回对象
|
||||
ret_array.append({
|
||||
'title': title,
|
||||
'link': link,
|
||||
'doubanid': doubanid
|
||||
})
|
||||
except Exception as e1:
|
||||
logger.error("解析RSS条目失败:" + str(e1))
|
||||
continue
|
||||
return ret_array
|
||||
except Exception as e:
|
||||
logger.error("获取RSS失败:" + str(e))
|
||||
return []
|
||||
@@ -1,561 +0,0 @@
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from threading import Lock
|
||||
from typing import Optional, Any, List, Dict, Tuple
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.chain.douban import DoubanChain
|
||||
from app.chain.download import DownloadChain
|
||||
from app.chain.search import SearchChain
|
||||
from app.chain.subscribe import SubscribeChain
|
||||
from app.core.config import settings
|
||||
from app.core.event import Event
|
||||
from app.core.event import eventmanager
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.helper.rss import RssHelper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas.types import EventType
|
||||
|
||||
lock = Lock()
|
||||
|
||||
|
||||
class DoubanSync(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "豆瓣想看"
|
||||
# 插件描述
|
||||
plugin_desc = "同步豆瓣想看数据,自动添加订阅。"
|
||||
# 插件图标
|
||||
plugin_icon = "douban.png"
|
||||
# 主题色
|
||||
plugin_color = "#05B711"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "doubansync_"
|
||||
# 加载顺序
|
||||
plugin_order = 3
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有变量
|
||||
_interests_url: str = "https://www.douban.com/feed/people/%s/interests"
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
_cache_path: Optional[Path] = None
|
||||
rsshelper = None
|
||||
downloadchain = None
|
||||
searchchain = None
|
||||
subscribechain = None
|
||||
doubanchain = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
_onlyonce: bool = False
|
||||
_cron: str = ""
|
||||
_notify: bool = False
|
||||
_days: int = 7
|
||||
_users: str = ""
|
||||
_clear: bool = False
|
||||
_clearflag: bool = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.rsshelper = RssHelper()
|
||||
self.downloadchain = DownloadChain()
|
||||
self.searchchain = SearchChain()
|
||||
self.subscribechain = SubscribeChain()
|
||||
self.doubanchain = DoubanChain()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
# 配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._cron = config.get("cron")
|
||||
self._notify = config.get("notify")
|
||||
self._days = config.get("days")
|
||||
self._users = config.get("users")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._clear = config.get("clear")
|
||||
|
||||
if self._enabled or self._onlyonce:
|
||||
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
if self._cron:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.sync,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="豆瓣想看")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
# 推送实时消息
|
||||
self.systemmessage.put(f"执行周期配置错误:{str(err)}")
|
||||
else:
|
||||
self._scheduler.add_job(self.sync, "interval", minutes=30, name="豆瓣想看")
|
||||
|
||||
if self._onlyonce:
|
||||
logger.info(f"豆瓣想看服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.sync, trigger='date',
|
||||
run_date=datetime.datetime.now(
|
||||
tz=pytz.timezone(settings.TZ)) + datetime.timedelta(seconds=3)
|
||||
)
|
||||
|
||||
if self._onlyonce or self._clear:
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
# 记录缓存清理标志
|
||||
self._clearflag = self._clear
|
||||
# 关闭清理缓存
|
||||
self._clear = False
|
||||
# 保存配置
|
||||
self.__update_config()
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
"""
|
||||
定义远程控制命令
|
||||
:return: 命令关键字、事件、描述、附带数据
|
||||
"""
|
||||
return [{
|
||||
"cmd": "/douban_sync",
|
||||
"event": EventType.DoubanSync,
|
||||
"desc": "同步豆瓣想看",
|
||||
"category": "订阅",
|
||||
"data": {}
|
||||
}]
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取插件API
|
||||
[{
|
||||
"path": "/xx",
|
||||
"endpoint": self.xxx,
|
||||
"methods": ["GET", "POST"],
|
||||
"summary": "API说明"
|
||||
}]
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '发送通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '5位cron表达式,留空自动'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'days',
|
||||
'label': '同步天数'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'users',
|
||||
'label': '用户列表',
|
||||
'placeholder': '豆瓣用户ID,多个用英文逗号分隔'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'clear',
|
||||
'label': '清理历史记录',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"notify": True,
|
||||
"onlyonce": False,
|
||||
"cron": "*/30 * * * *",
|
||||
"days": 7,
|
||||
"users": "",
|
||||
"clear": False
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
"""
|
||||
拼装插件详情页面,需要返回页面配置,同时附带数据
|
||||
"""
|
||||
# 查询同步详情
|
||||
historys = self.get_data('history')
|
||||
if not historys:
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'text': '暂无数据',
|
||||
'props': {
|
||||
'class': 'text-center',
|
||||
}
|
||||
}
|
||||
]
|
||||
# 数据按时间降序排序
|
||||
historys = sorted(historys, key=lambda x: x.get('time'), reverse=True)
|
||||
# 拼装页面
|
||||
contents = []
|
||||
for history in historys:
|
||||
title = history.get("title")
|
||||
poster = history.get("poster")
|
||||
mtype = history.get("type")
|
||||
time_str = history.get("time")
|
||||
doubanid = history.get("doubanid")
|
||||
contents.append(
|
||||
{
|
||||
'component': 'VCard',
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'd-flex justify-space-start flex-nowrap flex-row',
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VImg',
|
||||
'props': {
|
||||
'src': poster,
|
||||
'height': 120,
|
||||
'width': 80,
|
||||
'aspect-ratio': '2/3',
|
||||
'class': 'object-cover shadow ring-gray-500',
|
||||
'cover': True
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCardSubtitle',
|
||||
'props': {
|
||||
'class': 'pa-2 font-bold break-words whitespace-break-spaces'
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'a',
|
||||
'props': {
|
||||
'href': f"https://movie.douban.com/subject/{doubanid}",
|
||||
'target': '_blank'
|
||||
},
|
||||
'text': title
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'类型:{mtype}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'时间:{time_str}'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'grid gap-3 grid-info-card',
|
||||
},
|
||||
'content': contents
|
||||
}
|
||||
]
|
||||
|
||||
def __update_config(self):
|
||||
"""
|
||||
更新配置
|
||||
"""
|
||||
self.update_config({
|
||||
"enabled": self._enabled,
|
||||
"notify": self._notify,
|
||||
"onlyonce": self._onlyonce,
|
||||
"cron": self._cron,
|
||||
"days": self._days,
|
||||
"users": self._users,
|
||||
"clear": self._clear
|
||||
})
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
|
||||
def sync(self):
|
||||
"""
|
||||
通过用户RSS同步豆瓣想看数据
|
||||
"""
|
||||
if not self._users:
|
||||
return
|
||||
# 读取历史记录
|
||||
if self._clearflag:
|
||||
history = []
|
||||
else:
|
||||
history: List[dict] = self.get_data('history') or []
|
||||
for user_id in self._users.split(","):
|
||||
# 同步每个用户的豆瓣数据
|
||||
if not user_id:
|
||||
continue
|
||||
logger.info(f"开始同步用户 {user_id} 的豆瓣想看数据 ...")
|
||||
url = self._interests_url % user_id
|
||||
results = self.rsshelper.parse(url)
|
||||
if not results:
|
||||
logger.warn(f"未获取到用户 {user_id} 豆瓣RSS数据:{url}")
|
||||
continue
|
||||
else:
|
||||
logger.info(f"获取到用户 {user_id} 豆瓣RSS数据:{len(results)}")
|
||||
# 解析数据
|
||||
for result in results:
|
||||
try:
|
||||
dtype = result.get("title", "")[:2]
|
||||
title = result.get("title", "")[2:]
|
||||
if dtype not in ["想看", "在看"]:
|
||||
logger.info(f'标题:{title},非想看/在看数据,跳过')
|
||||
continue
|
||||
if not result.get("link"):
|
||||
logger.warn(f'标题:{title},未获取到链接,跳过')
|
||||
continue
|
||||
# 判断是否在天数范围
|
||||
pubdate: Optional[datetime.datetime] = result.get("pubdate")
|
||||
if pubdate:
|
||||
if (datetime.datetime.now(datetime.timezone.utc) - pubdate).days > float(self._days):
|
||||
logger.info(f'已超过同步天数,标题:{title},发布时间:{pubdate}')
|
||||
continue
|
||||
douban_id = result.get("link", "").split("/")[-2]
|
||||
# 检查是否处理过
|
||||
if not douban_id or douban_id in [h.get("doubanid") for h in history]:
|
||||
logger.info(f'标题:{title},豆瓣ID:{douban_id} 已处理过')
|
||||
continue
|
||||
# 识别媒体信息
|
||||
meta = MetaInfo(title=title)
|
||||
context = self.doubanchain.recognize_by_doubanid(douban_id)
|
||||
mediainfo = context.media_info
|
||||
if not mediainfo or not mediainfo.tmdb_id:
|
||||
logger.warn(f'未识别到媒体信息,标题:{title},豆瓣ID:{douban_id}')
|
||||
continue
|
||||
# 查询缺失的媒体信息
|
||||
exist_flag, no_exists = self.downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
if exist_flag:
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
|
||||
action = "exist"
|
||||
else:
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中不存在,开始搜索 ...')
|
||||
# 搜索
|
||||
contexts = self.searchchain.process(mediainfo=mediainfo,
|
||||
no_exists=no_exists)
|
||||
if not contexts:
|
||||
logger.warn(f'{mediainfo.title_year} 未搜索到资源')
|
||||
# 添加订阅
|
||||
self.subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="豆瓣想看")
|
||||
action = "subscribe"
|
||||
else:
|
||||
# 自动下载
|
||||
downloads, lefts = self.downloadchain.batch_download(contexts=contexts, no_exists=no_exists,
|
||||
username="豆瓣想看")
|
||||
if downloads and not lefts:
|
||||
# 全部下载完成
|
||||
logger.info(f'{mediainfo.title_year} 下载完成')
|
||||
action = "download"
|
||||
else:
|
||||
# 未完成下载
|
||||
logger.info(f'{mediainfo.title_year} 未下载未完整,添加订阅 ...')
|
||||
# 添加订阅
|
||||
self.subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="豆瓣想看")
|
||||
action = "subscribe"
|
||||
# 存储历史记录
|
||||
history.append({
|
||||
"action": action,
|
||||
"title": title,
|
||||
"type": mediainfo.type.value,
|
||||
"year": mediainfo.year,
|
||||
"poster": mediainfo.get_poster_image(),
|
||||
"overview": mediainfo.overview,
|
||||
"tmdbid": mediainfo.tmdb_id,
|
||||
"doubanid": douban_id,
|
||||
"time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
})
|
||||
except Exception as err:
|
||||
logger.error(f'同步用户 {user_id} 豆瓣想看数据出错:{str(err)}')
|
||||
logger.info(f"用户 {user_id} 豆瓣想看同步完成")
|
||||
# 保存历史记录
|
||||
self.save_data('history', history)
|
||||
# 缓存只清理一次
|
||||
self._clearflag = False
|
||||
|
||||
@eventmanager.register(EventType.DoubanSync)
|
||||
def remote_sync(self, event: Event):
|
||||
"""
|
||||
豆瓣想看同步
|
||||
"""
|
||||
if event:
|
||||
logger.info("收到命令,开始执行豆瓣想看同步 ...")
|
||||
self.post_message(channel=event.event_data.get("channel"),
|
||||
title="开始同步豆瓣想看 ...",
|
||||
userid=event.event_data.get("user"))
|
||||
self.sync()
|
||||
|
||||
if event:
|
||||
self.post_message(channel=event.event_data.get("channel"),
|
||||
title="同步豆瓣想看数据完成!", userid=event.event_data.get("user"))
|
||||
@@ -1,324 +0,0 @@
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from app.chain.download import DownloadChain
|
||||
from app.chain.media import MediaChain
|
||||
from app.core.config import settings
|
||||
from app.db.downloadhistory_oper import DownloadHistoryOper
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple, Optional, Union
|
||||
from app.log import logger
|
||||
from app.schemas import NotificationType, TransferTorrent, DownloadingTorrent
|
||||
from app.schemas.types import TorrentStatus, MessageChannel
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class DownloadingMsg(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "下载进度推送"
|
||||
# 插件描述
|
||||
plugin_desc = "定时推送正在下载进度。"
|
||||
# 插件图标
|
||||
plugin_icon = "downloadmsg.png"
|
||||
# 主题色
|
||||
plugin_color = "#3DE75D"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "downloading_"
|
||||
# 加载顺序
|
||||
plugin_order = 22
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
# 任务执行间隔
|
||||
_seconds = None
|
||||
_type = None
|
||||
_adminuser = None
|
||||
_downloadhis = None
|
||||
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._seconds = config.get("seconds") or 300
|
||||
self._type = config.get("type") or 'admin'
|
||||
self._adminuser = config.get("adminuser")
|
||||
|
||||
# 加载模块
|
||||
if self._enabled:
|
||||
self._downloadhis = DownloadHistoryOper()
|
||||
# 定时服务
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
|
||||
if self._seconds:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.__downloading,
|
||||
trigger='interval',
|
||||
seconds=int(self._seconds),
|
||||
name="下载进度推送")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def __downloading(self):
|
||||
"""
|
||||
定时推送正在下载进度
|
||||
"""
|
||||
# 正在下载种子
|
||||
torrents = DownloadChain().list_torrents(status=TorrentStatus.DOWNLOADING)
|
||||
if not torrents:
|
||||
logger.info("当前没有正在下载的任务!")
|
||||
return
|
||||
# 推送用户
|
||||
if self._type == "admin" or self._type == "both":
|
||||
if not self._adminuser:
|
||||
logger.error("未配置管理员用户")
|
||||
return
|
||||
|
||||
for username in str(self._adminuser).split(","):
|
||||
self.__send_msg(torrents=torrents, username=username)
|
||||
|
||||
if self._type == "user" or self._type == "both":
|
||||
user_torrents = {}
|
||||
# 根据正在下载种子hash获取下载历史
|
||||
for torrent in torrents:
|
||||
downloadhis = self._downloadhis.get_by_hash(download_hash=torrent.hash)
|
||||
if not downloadhis:
|
||||
logger.warn(f"种子 {torrent.hash} 未获取到MoviePilot下载历史,无法推送下载进度")
|
||||
continue
|
||||
if not downloadhis.username:
|
||||
logger.debug(f"种子 {torrent.hash} 未获取到下载用户记录,无法推送下载进度")
|
||||
continue
|
||||
user_torrent = user_torrents.get(downloadhis.username) or []
|
||||
user_torrent.append(torrent)
|
||||
user_torrents[downloadhis.username] = user_torrent
|
||||
|
||||
if not user_torrents or not user_torrents.keys():
|
||||
logger.warn("未获取到用户下载记录,无法推送下载进度")
|
||||
return
|
||||
|
||||
# 推送用户下载任务进度
|
||||
for username in list(user_torrents.keys()):
|
||||
if not username:
|
||||
continue
|
||||
# 如果用户是管理员,无需重复推送
|
||||
if (self._type == "admin" or self._type == "both") and self._adminuser and username in str(
|
||||
self._adminuser).split(","):
|
||||
logger.debug("管理员已推送")
|
||||
continue
|
||||
|
||||
user_torrent = user_torrents.get(username)
|
||||
if not user_torrent:
|
||||
logger.warn(f"未获取到用户 {username} 下载任务")
|
||||
continue
|
||||
self.__send_msg(torrents=user_torrent,
|
||||
username=username)
|
||||
|
||||
if self._type == "all":
|
||||
self.__send_msg(torrents=torrents)
|
||||
|
||||
def __send_msg(self, torrents: Optional[List[Union[TransferTorrent, DownloadingTorrent]]], username: str = None):
|
||||
"""
|
||||
发送消息
|
||||
"""
|
||||
title = f"共 {len(torrents)} 个任务正在下载:"
|
||||
messages = []
|
||||
index = 1
|
||||
channel_value = None
|
||||
for torrent in torrents:
|
||||
year = None
|
||||
name = None
|
||||
se = None
|
||||
ep = None
|
||||
# 先查询下载记录,没有再识别
|
||||
downloadhis = self._downloadhis.get_by_hash(download_hash=torrent.hash)
|
||||
if downloadhis:
|
||||
name = downloadhis.title
|
||||
year = downloadhis.year
|
||||
se = downloadhis.seasons
|
||||
ep = downloadhis.episodes
|
||||
if not channel_value:
|
||||
channel_value = downloadhis.channel
|
||||
else:
|
||||
try:
|
||||
context = MediaChain().recognize_by_title(title=torrent.title)
|
||||
if not context or not context.media_info:
|
||||
continue
|
||||
media_info = context.media_info
|
||||
year = media_info.year
|
||||
name = media_info.title
|
||||
if media_info.number_of_seasons:
|
||||
se = f"S{str(media_info.number_of_seasons).rjust(2, '0')}"
|
||||
if media_info.number_of_episodes:
|
||||
ep = f"E{str(media_info.number_of_episodes).rjust(2, '0')}"
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
|
||||
# 拼装标题
|
||||
if year:
|
||||
media_name = "%s (%s) %s%s" % (name, year, se, ep)
|
||||
elif name:
|
||||
media_name = "%s %s%s" % (name, se, ep)
|
||||
else:
|
||||
media_name = torrent.title
|
||||
|
||||
if not self._adminuser or username not in str(self._adminuser).split(","):
|
||||
# 下载用户发送精简消息
|
||||
messages.append(f"{index}. {media_name} {round(torrent.progress, 1)}%")
|
||||
else:
|
||||
messages.append(f"{index}. {media_name}\n"
|
||||
f"{torrent.title} "
|
||||
f"{StringUtils.str_filesize(torrent.size)} "
|
||||
f"{round(torrent.progress, 1)}%")
|
||||
index += 1
|
||||
|
||||
# 用户消息渠道
|
||||
if channel_value:
|
||||
channel = next(
|
||||
(channel for channel in MessageChannel.__members__.values() if channel.value == channel_value), None)
|
||||
else:
|
||||
channel = None
|
||||
self.post_message(mtype=NotificationType.Download,
|
||||
channel=channel,
|
||||
title=title,
|
||||
text="\n".join(messages),
|
||||
userid=username)
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'seconds',
|
||||
'label': '执行间隔',
|
||||
'placeholder': '单位(秒)'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'adminuser',
|
||||
'label': '管理员用户',
|
||||
'placeholder': '多个用户,分割'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'type',
|
||||
'label': '推送类型',
|
||||
'items': [
|
||||
{'title': '管理员', 'value': 'admin'},
|
||||
{'title': '下载用户', 'value': 'user'},
|
||||
{'title': '管理员和下载用户', 'value': 'both'},
|
||||
{'title': '所有用户', 'value': 'all'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"seconds": 300,
|
||||
"adminuser": "",
|
||||
"type": "admin"
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
@@ -1,313 +0,0 @@
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.core.config import settings
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
from app.log import logger
|
||||
from app.schemas import NotificationType
|
||||
from app.utils.http import RequestUtils
|
||||
|
||||
|
||||
class InvitesSignin(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "药丸签到"
|
||||
# 插件描述
|
||||
plugin_desc = "药丸论坛签到。"
|
||||
# 插件图标
|
||||
plugin_icon = "invites.png"
|
||||
# 主题色
|
||||
plugin_color = "#FFFFFF"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "invitessignin_"
|
||||
# 加载顺序
|
||||
plugin_order = 24
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
# 任务执行间隔
|
||||
_cron = None
|
||||
_cookie = None
|
||||
_onlyonce = False
|
||||
_notify = False
|
||||
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._cron = config.get("cron")
|
||||
self._cookie = config.get("cookie")
|
||||
self._notify = config.get("notify")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
|
||||
# 加载模块
|
||||
if self._enabled:
|
||||
# 定时服务
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
|
||||
if self._cron:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.__signin,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="药丸签到")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
|
||||
if self._onlyonce:
|
||||
logger.info(f"药丸签到服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.__signin, trigger='date',
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
name="药丸签到")
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
self.update_config({
|
||||
"onlyonce": False,
|
||||
"cron": self._cron,
|
||||
"enabled": self._enabled,
|
||||
"cookie": self._cookie,
|
||||
"notify": self._notify,
|
||||
})
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def __signin(self):
|
||||
"""
|
||||
药丸签到
|
||||
"""
|
||||
res = RequestUtils(cookies=self._cookie).get_res(url="https://invites.fun")
|
||||
if not res or res.status_code != 200:
|
||||
logger.error("请求药丸错误")
|
||||
return
|
||||
|
||||
# 获取csrfToken
|
||||
pattern = r'"csrfToken":"(.*?)"'
|
||||
csrfToken = re.findall(pattern, res.text)
|
||||
if not csrfToken:
|
||||
logger.error("请求csrfToken失败")
|
||||
return
|
||||
|
||||
csrfToken = csrfToken[0]
|
||||
logger.info(f"获取csrfToken成功 {csrfToken}")
|
||||
|
||||
# 获取userid
|
||||
pattern = r'"userId":(\d+)'
|
||||
match = re.search(pattern, res.text)
|
||||
|
||||
if match:
|
||||
userId = match.group(1)
|
||||
logger.info(f"获取userid成功 {userId}")
|
||||
else:
|
||||
logger.error("未找到userId")
|
||||
return
|
||||
|
||||
headers = {
|
||||
"X-Csrf-Token": csrfToken,
|
||||
"X-Http-Method-Override": "PATCH",
|
||||
"Cookie": self._cookie
|
||||
}
|
||||
|
||||
data = {
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"canCheckin": False,
|
||||
"totalContinuousCheckIn": 2
|
||||
},
|
||||
"id": userId
|
||||
}
|
||||
}
|
||||
|
||||
# 开始签到
|
||||
res = RequestUtils(headers=headers).post_res(url=f"https://invites.fun/api/users/{userId}", json=data)
|
||||
|
||||
if not res or res.status_code != 200:
|
||||
logger.error("药丸签到失败")
|
||||
return
|
||||
|
||||
sign_dict = json.loads(res.text)
|
||||
money = sign_dict['data']['attributes']['money']
|
||||
totalContinuousCheckIn = sign_dict['data']['attributes']['totalContinuousCheckIn']
|
||||
|
||||
# 发送通知
|
||||
if self._notify:
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title="【药丸签到任务完成】",
|
||||
text=f"累计签到 {totalContinuousCheckIn} \n"
|
||||
f"剩余药丸 {money}")
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '开启通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '签到周期'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cookie',
|
||||
'label': '药丸cookie'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '整点定时签到失败?不妨换个时间试试'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"onlyonce": False,
|
||||
"notify": False,
|
||||
"cookie": "",
|
||||
"cron": "0 9 * * *"
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,166 +0,0 @@
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
|
||||
from app.utils.http import RequestUtils
|
||||
|
||||
|
||||
class IyuuHelper(object):
|
||||
_version = "2.0.0"
|
||||
_api_base = "https://api.iyuu.cn/%s"
|
||||
_sites = {}
|
||||
_token = None
|
||||
|
||||
def __init__(self, token):
|
||||
self._token = token
|
||||
if self._token:
|
||||
self.init_config()
|
||||
|
||||
def init_config(self):
|
||||
pass
|
||||
|
||||
def __request_iyuu(self, url, method="get", params=None):
|
||||
"""
|
||||
向IYUUApi发送请求
|
||||
"""
|
||||
if params:
|
||||
if not params.get("sign"):
|
||||
params.update({"sign": self._token})
|
||||
if not params.get("version"):
|
||||
params.update({"version": self._version})
|
||||
else:
|
||||
params = {"sign": self._token, "version": self._version}
|
||||
# 开始请求
|
||||
if method == "get":
|
||||
ret = RequestUtils(
|
||||
accept_type="application/json"
|
||||
).get_res(f"{url}", params=params)
|
||||
else:
|
||||
ret = RequestUtils(
|
||||
accept_type="application/json"
|
||||
).post_res(f"{url}", data=params)
|
||||
if ret:
|
||||
result = ret.json()
|
||||
if result.get('ret') == 200:
|
||||
return result.get('data'), ""
|
||||
else:
|
||||
return None, f"请求IYUU失败,状态码:{result.get('ret')},返回信息:{result.get('msg')}"
|
||||
elif ret is not None:
|
||||
return None, f"请求IYUU失败,状态码:{ret.status_code},错误原因:{ret.reason}"
|
||||
else:
|
||||
return None, f"请求IYUU失败,未获取到返回信息"
|
||||
|
||||
def get_torrent_url(self, sid):
|
||||
if not sid:
|
||||
return None, None
|
||||
if not self._sites:
|
||||
self._sites = self.__get_sites()
|
||||
if not self._sites.get(sid):
|
||||
return None, None
|
||||
site = self._sites.get(sid)
|
||||
return site.get('base_url'), site.get('download_page')
|
||||
|
||||
def __get_sites(self):
|
||||
"""
|
||||
返回支持辅种的全部站点
|
||||
:return: 站点列表、错误信息
|
||||
{
|
||||
"ret": 200,
|
||||
"data": {
|
||||
"sites": [
|
||||
{
|
||||
"id": 1,
|
||||
"site": "keepfrds",
|
||||
"nickname": "朋友",
|
||||
"base_url": "pt.keepfrds.com",
|
||||
"download_page": "download.php?id={}&passkey={passkey}",
|
||||
"reseed_check": "passkey",
|
||||
"is_https": 2
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
result, msg = self.__request_iyuu(url=self._api_base % 'App.Api.Sites')
|
||||
if result:
|
||||
ret_sites = {}
|
||||
sites = result.get('sites') or []
|
||||
for site in sites:
|
||||
ret_sites[site.get('id')] = site
|
||||
return ret_sites
|
||||
else:
|
||||
print(msg)
|
||||
return {}
|
||||
|
||||
def get_seed_info(self, info_hashs: list):
|
||||
"""
|
||||
返回info_hash对应的站点id、种子id
|
||||
{
|
||||
"ret": 200,
|
||||
"data": [
|
||||
{
|
||||
"sid": 3,
|
||||
"torrent_id": 377467,
|
||||
"info_hash": "a444850638e7a6f6220e2efdde94099c53358159"
|
||||
},
|
||||
{
|
||||
"sid": 7,
|
||||
"torrent_id": 35538,
|
||||
"info_hash": "cf7d88fd656d10fe5130d13567aec27068b96676"
|
||||
}
|
||||
],
|
||||
"msg": "",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
"""
|
||||
info_hashs.sort()
|
||||
json_data = json.dumps(info_hashs, separators=(',', ':'), ensure_ascii=False)
|
||||
sha1 = self.get_sha1(json_data)
|
||||
result, msg = self.__request_iyuu(url=self._api_base % 'App.Api.Infohash',
|
||||
method="post",
|
||||
params={
|
||||
"timestamp": time.time(),
|
||||
"hash": json_data,
|
||||
"sha1": sha1
|
||||
})
|
||||
return result, msg
|
||||
|
||||
@staticmethod
|
||||
def get_sha1(json_str) -> str:
|
||||
return hashlib.sha1(json_str.encode('utf-8')).hexdigest()
|
||||
|
||||
def get_auth_sites(self):
|
||||
"""
|
||||
返回支持鉴权的站点列表
|
||||
[
|
||||
{
|
||||
"id": 2,
|
||||
"site": "pthome",
|
||||
"bind_check": "passkey,uid"
|
||||
}
|
||||
]
|
||||
"""
|
||||
result, msg = self.__request_iyuu(url=self._api_base % 'App.Api.GetRecommendSites')
|
||||
if result:
|
||||
return result.get('recommend') or []
|
||||
else:
|
||||
print(msg)
|
||||
return []
|
||||
|
||||
def bind_site(self, site, passkey, uid):
|
||||
"""
|
||||
绑定站点
|
||||
:param site: 站点名称
|
||||
:param passkey: passkey
|
||||
:param uid: 用户id
|
||||
:return: 状态码、错误信息
|
||||
"""
|
||||
result, msg = self.__request_iyuu(url=self._api_base % 'App.Api.Bind',
|
||||
method="get",
|
||||
params={
|
||||
"token": self._token,
|
||||
"site": site,
|
||||
"passkey": self.get_sha1(passkey),
|
||||
"id": uid
|
||||
})
|
||||
return result, msg
|
||||
@@ -1,433 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from threading import Event
|
||||
from typing import List, Tuple, Dict, Any
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.meta import MetaBase
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.db.transferhistory_oper import TransferHistoryOper
|
||||
from app.helper.nfo import NfoReader
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import MediaType
|
||||
from app.utils.system import SystemUtils
|
||||
|
||||
|
||||
class LibraryScraper(_PluginBase):
|
||||
|
||||
# 插件名称
|
||||
plugin_name = "媒体库刮削"
|
||||
# 插件描述
|
||||
plugin_desc = "定时对媒体库进行刮削,补齐缺失元数据和图片。"
|
||||
# 插件图标
|
||||
plugin_icon = "scraper.png"
|
||||
# 主题色
|
||||
plugin_color = "#FF7D00"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "libraryscraper_"
|
||||
# 加载顺序
|
||||
plugin_order = 7
|
||||
# 可使用的用户级别
|
||||
user_level = 1
|
||||
|
||||
# 私有属性
|
||||
transferhis = None
|
||||
_scheduler = None
|
||||
_scraper = None
|
||||
# 限速开关
|
||||
_enabled = False
|
||||
_onlyonce = False
|
||||
_cron = None
|
||||
_mode = ""
|
||||
_scraper_paths = ""
|
||||
_exclude_paths = ""
|
||||
# 退出事件
|
||||
_event = Event()
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._cron = config.get("cron")
|
||||
self._mode = config.get("mode") or ""
|
||||
self._scraper_paths = config.get("scraper_paths") or ""
|
||||
self._exclude_paths = config.get("exclude_paths") or ""
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
# 启动定时任务 & 立即运行一次
|
||||
if self._enabled or self._onlyonce:
|
||||
self.transferhis = TransferHistoryOper()
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
if self._cron:
|
||||
logger.info(f"媒体库刮削服务启动,周期:{self._cron}")
|
||||
try:
|
||||
self._scheduler.add_job(func=self.__libraryscraper,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="媒体库刮削")
|
||||
except Exception as e:
|
||||
logger.error(f"媒体库刮削服务启动失败,原因:{str(e)}")
|
||||
self.systemmessage.put(f"媒体库刮削服务启动失败,原因:{str(e)}")
|
||||
else:
|
||||
logger.info(f"媒体库刮削服务启动,周期:每7天")
|
||||
self._scheduler.add_job(func=self.__libraryscraper,
|
||||
trigger=CronTrigger.from_crontab("0 0 */7 * *"),
|
||||
name="媒体库刮削")
|
||||
if self._onlyonce:
|
||||
logger.info(f"媒体库刮削服务,立即运行一次")
|
||||
self._scheduler.add_job(func=self.__libraryscraper, trigger='date',
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
name="Cloudflare优选")
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
self.update_config({
|
||||
"onlyonce": False,
|
||||
"enabled": self._enabled,
|
||||
"cron": self._cron,
|
||||
"mode": self._mode,
|
||||
"scraper_paths": self._scraper_paths,
|
||||
"exclude_paths": self._exclude_paths
|
||||
})
|
||||
if self._scheduler.get_jobs():
|
||||
# 启动服务
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'mode',
|
||||
'label': '刮削模式',
|
||||
'items': [
|
||||
{'title': '仅刮削缺失元数据和图片', 'value': ''},
|
||||
{'title': '覆盖所有元数据和图片', 'value': 'force_all'},
|
||||
{'title': '覆盖所有元数据', 'value': 'force_nfo'},
|
||||
{'title': '覆盖所有图片', 'value': 'force_image'},
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '5位cron表达式,留空自动'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'scraper_paths',
|
||||
'label': '削刮路径',
|
||||
'rows': 5,
|
||||
'placeholder': '每一行一个目录,需配置到媒体文件的上级目录,即开了二级分类时需要配置到二级分类目录'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'exclude_paths',
|
||||
'label': '排除路径',
|
||||
'rows': 2,
|
||||
'placeholder': '每一行一个目录'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"cron": "0 0 */7 * *",
|
||||
"mode": "",
|
||||
"scraper_paths": "",
|
||||
"err_hosts": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def __libraryscraper(self):
|
||||
"""
|
||||
开始刮削媒体库
|
||||
"""
|
||||
if not self._scraper_paths:
|
||||
return
|
||||
# 排除目录
|
||||
exclude_paths = self._exclude_paths.split("\n")
|
||||
# 已选择的目录
|
||||
paths = self._scraper_paths.split("\n")
|
||||
for path in paths:
|
||||
if not path:
|
||||
continue
|
||||
scraper_path = Path(path)
|
||||
if not scraper_path.exists():
|
||||
logger.warning(f"媒体库刮削路径不存在:{path}")
|
||||
continue
|
||||
logger.info(f"开始刮削媒体库:{path} ...")
|
||||
# 遍历一层文件夹
|
||||
for sub_path in scraper_path.iterdir():
|
||||
if self._event.is_set():
|
||||
logger.info(f"媒体库刮削服务停止")
|
||||
return
|
||||
# 排除目录
|
||||
exclude_flag = False
|
||||
for exclude_path in exclude_paths:
|
||||
try:
|
||||
if sub_path.is_relative_to(Path(exclude_path)):
|
||||
exclude_flag = True
|
||||
break
|
||||
except Exception as err:
|
||||
print(str(err))
|
||||
if exclude_flag:
|
||||
logger.debug(f"{sub_path} 在排除目录中,跳过 ...")
|
||||
continue
|
||||
# 开始刮削目录
|
||||
if sub_path.is_dir():
|
||||
# 判断目录是不是媒体目录
|
||||
dir_meta = MetaInfo(sub_path.name)
|
||||
if not dir_meta.name or not dir_meta.year:
|
||||
logger.warn(f"{sub_path} 可能不是媒体目录,请检查刮削目录配置,跳过 ...")
|
||||
continue
|
||||
logger.info(f"开始刮削目录:{sub_path} ...")
|
||||
self.__scrape_dir(path=sub_path, dir_meta=dir_meta)
|
||||
logger.info(f"目录 {sub_path} 刮削完成")
|
||||
logger.info(f"媒体库 {path} 刮削完成")
|
||||
|
||||
def __scrape_dir(self, path: Path, dir_meta: MetaBase):
|
||||
"""
|
||||
削刮一个目录,该目录必须是媒体文件目录
|
||||
"""
|
||||
|
||||
# 媒体信息
|
||||
mediainfo = None
|
||||
|
||||
# 查找目录下所有的文件
|
||||
files = SystemUtils.list_files(path, settings.RMT_MEDIAEXT)
|
||||
for file in files:
|
||||
if self._event.is_set():
|
||||
logger.info(f"媒体库刮削服务停止")
|
||||
return
|
||||
|
||||
# 识别元数据
|
||||
meta_info = MetaInfo(file.stem)
|
||||
# 合并
|
||||
meta_info.merge(dir_meta)
|
||||
# 是否刮削
|
||||
scrap_metadata = settings.SCRAP_METADATA
|
||||
|
||||
# 没有媒体信息或者名字出现变化时,需要重新识别
|
||||
if not mediainfo \
|
||||
or meta_info.name != dir_meta.name:
|
||||
# 优先读取本地nfo文件
|
||||
tmdbid = None
|
||||
if meta_info.type == MediaType.MOVIE:
|
||||
# 电影
|
||||
movie_nfo = file.parent / "movie.nfo"
|
||||
if movie_nfo.exists():
|
||||
tmdbid = self.__get_tmdbid_from_nfo(movie_nfo)
|
||||
file_nfo = file.with_suffix(".nfo")
|
||||
if not tmdbid and file_nfo.exists():
|
||||
tmdbid = self.__get_tmdbid_from_nfo(file_nfo)
|
||||
else:
|
||||
# 电视剧
|
||||
tv_nfo = file.parent.parent / "tvshow.nfo"
|
||||
if tv_nfo.exists():
|
||||
tmdbid = self.__get_tmdbid_from_nfo(tv_nfo)
|
||||
if tmdbid:
|
||||
# 按TMDBID识别
|
||||
logger.info(f"读取到本地nfo文件的tmdbid:{tmdbid}")
|
||||
mediainfo = self.chain.recognize_media(tmdbid=tmdbid, mtype=meta_info.type)
|
||||
else:
|
||||
# 按名称识别
|
||||
mediainfo = self.chain.recognize_media(meta=meta_info)
|
||||
if not mediainfo:
|
||||
logger.warn(f"未识别到媒体信息:{file}")
|
||||
continue
|
||||
|
||||
# 如果未开启新增已入库媒体是否跟随TMDB信息变化则根据tmdbid查询之前的title
|
||||
if not settings.SCRAP_FOLLOW_TMDB:
|
||||
transfer_history = self.transferhis.get_by_type_tmdbid(tmdbid=mediainfo.tmdb_id,
|
||||
mtype=mediainfo.type.value)
|
||||
if transfer_history:
|
||||
mediainfo.title = transfer_history.title
|
||||
|
||||
# 覆盖模式时,提前删除nfo
|
||||
if self._mode in ["force_all", "force_nfo"]:
|
||||
scrap_metadata = True
|
||||
nfo_files = SystemUtils.list_files(path, [".nfo"])
|
||||
for nfo_file in nfo_files:
|
||||
try:
|
||||
logger.warn(f"删除nfo文件:{nfo_file}")
|
||||
nfo_file.unlink()
|
||||
except Exception as err:
|
||||
print(str(err))
|
||||
|
||||
# 覆盖模式时,提前删除图片文件
|
||||
if self._mode in ["force_all", "force_image"]:
|
||||
scrap_metadata = True
|
||||
image_files = SystemUtils.list_files(path, [".jpg", ".png"])
|
||||
for image_file in image_files:
|
||||
if ".actors" in str(image_file):
|
||||
continue
|
||||
try:
|
||||
logger.warn(f"删除图片文件:{image_file}")
|
||||
image_file.unlink()
|
||||
except Exception as err:
|
||||
print(str(err))
|
||||
|
||||
# 刮削单个文件
|
||||
if scrap_metadata:
|
||||
self.chain.scrape_metadata(path=file, mediainfo=mediainfo, transfer_type=settings.TRANSFER_TYPE)
|
||||
|
||||
@staticmethod
|
||||
def __get_tmdbid_from_nfo(file_path: Path):
|
||||
"""
|
||||
从nfo文件中获取信息
|
||||
:param file_path:
|
||||
:return: tmdbid
|
||||
"""
|
||||
if not file_path:
|
||||
return None
|
||||
xpaths = [
|
||||
"uniqueid[@type='Tmdb']",
|
||||
"uniqueid[@type='tmdb']",
|
||||
"uniqueid[@type='TMDB']",
|
||||
"tmdbid"
|
||||
]
|
||||
reader = NfoReader(file_path)
|
||||
for xpath in xpaths:
|
||||
try:
|
||||
tmdbid = reader.get_element_value(xpath)
|
||||
if tmdbid:
|
||||
return tmdbid
|
||||
except Exception as err:
|
||||
print(str(err))
|
||||
return None
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._event.set()
|
||||
self._scheduler.shutdown()
|
||||
self._event.clear()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
@@ -1,244 +0,0 @@
|
||||
import time
|
||||
from typing import Any, List, Dict, Tuple
|
||||
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import WebhookEventInfo
|
||||
from app.schemas.types import EventType, MediaType, MediaImageType, NotificationType
|
||||
from app.utils.web import WebUtils
|
||||
|
||||
|
||||
class MediaServerMsg(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "媒体库服务器通知"
|
||||
# 插件描述
|
||||
plugin_desc = "发送Emby/Jellyfin/Plex服务器的播放、入库等通知消息。"
|
||||
# 插件图标
|
||||
plugin_icon = "mediaplay.png"
|
||||
# 主题色
|
||||
plugin_color = "#42A3DB"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "mediaservermsg_"
|
||||
# 加载顺序
|
||||
plugin_order = 14
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
_types = []
|
||||
|
||||
# 拼装消息内容
|
||||
_webhook_actions = {
|
||||
"library.new": "新入库",
|
||||
"system.webhooktest": "测试",
|
||||
"playback.start": "开始播放",
|
||||
"playback.stop": "停止播放",
|
||||
"user.authenticated": "登录成功",
|
||||
"user.authenticationfailed": "登录失败",
|
||||
"media.play": "开始播放",
|
||||
"media.stop": "停止播放",
|
||||
"PlaybackStart": "开始播放",
|
||||
"PlaybackStop": "停止播放",
|
||||
"item.rate": "标记了"
|
||||
}
|
||||
_webhook_images = {
|
||||
"emby": "https://emby.media/notificationicon.png",
|
||||
"plex": "https://www.plex.tv/wp-content/uploads/2022/04/new-logo-process-lines-gray.png",
|
||||
"jellyfin": "https://play-lh.googleusercontent.com/SCsUK3hCCRqkJbmLDctNYCfehLxsS4ggD1ZPHIFrrAN1Tn9yhjmGMPep2D9lMaaa9eQi"
|
||||
}
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._types = config.get("types") or []
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
types_options = [
|
||||
{"title": "新入库", "value": "library.new"},
|
||||
{"title": "开始播放", "value": "playback.start|media.play|PlaybackStart"},
|
||||
{"title": "停止播放", "value": "playback.stop|media.stop|PlaybackStop"},
|
||||
{"title": "用户标记", "value": "item.rate"},
|
||||
{"title": "测试", "value": "system.webhooktest"},
|
||||
]
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'model': 'types',
|
||||
'label': '消息类型',
|
||||
'items': types_options
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '需要设置媒体服务器Webhook,回调相对路径为 /api/v1/webhook?token=moviepilot(3001端口),其中 moviepilot 为设置的 API_TOKEN。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"types": []
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
@eventmanager.register(EventType.WebhookMessage)
|
||||
def send(self, event: Event):
|
||||
"""
|
||||
发送通知消息
|
||||
"""
|
||||
if not self._enabled:
|
||||
return
|
||||
|
||||
event_info: WebhookEventInfo = event.event_data
|
||||
if not event_info:
|
||||
return
|
||||
|
||||
# 不在支持范围不处理
|
||||
if not self._webhook_actions.get(event_info.event):
|
||||
return
|
||||
|
||||
# 不在选中范围不处理
|
||||
msgflag = False
|
||||
for _type in self._types:
|
||||
if event_info.event in _type.split("|"):
|
||||
msgflag = True
|
||||
break
|
||||
if not msgflag:
|
||||
logger.info(f"未开启 {event_info.event} 类型的消息通知")
|
||||
return
|
||||
|
||||
# 消息标题
|
||||
if event_info.item_type in ["TV", "SHOW"]:
|
||||
message_title = f"{self._webhook_actions.get(event_info.event)}剧集 {event_info.item_name}"
|
||||
elif event_info.item_type == "MOV":
|
||||
message_title = f"{self._webhook_actions.get(event_info.event)}电影 {event_info.item_name}"
|
||||
elif event_info.item_type == "AUD":
|
||||
message_title = f"{self._webhook_actions.get(event_info.event)}有声书 {event_info.item_name}"
|
||||
else:
|
||||
message_title = f"{self._webhook_actions.get(event_info.event)}"
|
||||
|
||||
# 消息内容
|
||||
message_texts = []
|
||||
if event_info.user_name:
|
||||
message_texts.append(f"用户:{event_info.user_name}")
|
||||
if event_info.device_name:
|
||||
message_texts.append(f"设备:{event_info.client} {event_info.device_name}")
|
||||
if event_info.ip:
|
||||
message_texts.append(f"IP地址:{event_info.ip} {WebUtils.get_location(event_info.ip)}")
|
||||
if event_info.percentage:
|
||||
percentage = round(float(event_info.percentage), 2)
|
||||
message_texts.append(f"进度:{percentage}%")
|
||||
if event_info.overview:
|
||||
message_texts.append(f"剧情:{event_info.overview}")
|
||||
message_texts.append(f"时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))}")
|
||||
|
||||
# 消息内容
|
||||
message_content = "\n".join(message_texts)
|
||||
|
||||
# 消息图片
|
||||
image_url = event_info.image_url
|
||||
# 查询剧集图片
|
||||
if (event_info.tmdb_id
|
||||
and event_info.season_id
|
||||
and event_info.episode_id):
|
||||
specific_image = self.chain.obtain_specific_image(
|
||||
mediaid=event_info.tmdb_id,
|
||||
mtype=MediaType.TV,
|
||||
image_type=MediaImageType.Backdrop,
|
||||
season=event_info.season_id,
|
||||
episode=event_info.episode_id
|
||||
)
|
||||
if specific_image:
|
||||
image_url = specific_image
|
||||
# 使用默认图片
|
||||
if not image_url:
|
||||
image_url = self._webhook_images.get(event_info.channel)
|
||||
|
||||
# 发送消息
|
||||
self.post_message(mtype=NotificationType.MediaServer,
|
||||
title=message_title, text=message_content, image=image_url)
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
@@ -1,136 +0,0 @@
|
||||
from typing import Any, List, Dict, Tuple
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.context import MediaInfo
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.modules.emby import Emby
|
||||
from app.modules.jellyfin import Jellyfin
|
||||
from app.modules.plex import Plex
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import TransferInfo, RefreshMediaItem
|
||||
from app.schemas.types import EventType
|
||||
|
||||
|
||||
class MediaServerRefresh(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "媒体库服务器刷新"
|
||||
# 插件描述
|
||||
plugin_desc = "入库后自动刷新Emby/Jellyfin/Plex服务器海报墙。"
|
||||
# 插件图标
|
||||
plugin_icon = "refresh2.png"
|
||||
# 主题色
|
||||
plugin_color = "#347180"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "mediaserverrefresh_"
|
||||
# 加载顺序
|
||||
plugin_order = 14
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
@eventmanager.register(EventType.TransferComplete)
|
||||
def refresh(self, event: Event):
|
||||
"""
|
||||
发送通知消息
|
||||
"""
|
||||
if not self._enabled:
|
||||
return
|
||||
|
||||
event_info: dict = event.event_data
|
||||
if not event_info:
|
||||
return
|
||||
|
||||
# 刷新媒体库
|
||||
if not settings.MEDIASERVER:
|
||||
return
|
||||
|
||||
# 入库数据
|
||||
transferinfo: TransferInfo = event_info.get("transferinfo")
|
||||
mediainfo: MediaInfo = event_info.get("mediainfo")
|
||||
items = [
|
||||
RefreshMediaItem(
|
||||
title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
type=mediainfo.type,
|
||||
category=mediainfo.category,
|
||||
target_path=transferinfo.target_path
|
||||
)
|
||||
]
|
||||
# Emby
|
||||
if "emby" in settings.MEDIASERVER:
|
||||
Emby().refresh_library_by_items(items)
|
||||
|
||||
# Jeyllyfin
|
||||
if "jellyfin" in settings.MEDIASERVER:
|
||||
# FIXME Jellyfin未找到刷新单个项目的API
|
||||
Jellyfin().refresh_root_library()
|
||||
|
||||
# Plex
|
||||
if "plex" in settings.MEDIASERVER:
|
||||
Plex().refresh_library_by_items(items)
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,387 +0,0 @@
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
from app.core.config import settings
|
||||
from app.plugins import _PluginBase
|
||||
from app.core.event import eventmanager
|
||||
from app.schemas.types import EventType, MessageChannel
|
||||
from app.utils.http import RequestUtils
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
from app.log import logger
|
||||
|
||||
|
||||
class MessageForward(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "消息转发"
|
||||
# 插件描述
|
||||
plugin_desc = "根据正则转发通知到其他WeChat应用。"
|
||||
# 插件图标
|
||||
plugin_icon = "forward.png"
|
||||
# 主题色
|
||||
plugin_color = "#32ABD1"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "messageforward_"
|
||||
# 加载顺序
|
||||
plugin_order = 16
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
_wechat = None
|
||||
_pattern = None
|
||||
_pattern_token = {}
|
||||
|
||||
# 企业微信发送消息URL
|
||||
_send_msg_url = f"{settings.WECHAT_PROXY}/cgi-bin/message/send?access_token=%s"
|
||||
# 企业微信获取TokenURL
|
||||
_token_url = f"{settings.WECHAT_PROXY}/cgi-bin/gettoken?corpid=%s&corpsecret=%s"
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._wechat = config.get("wechat")
|
||||
self._pattern = config.get("pattern")
|
||||
|
||||
# 获取token存库
|
||||
if self._enabled and self._wechat:
|
||||
self.__save_wechat_token()
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '开启转发'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'wechat',
|
||||
'rows': '3',
|
||||
'label': '应用配置',
|
||||
'placeholder': 'appid:corpid:appsecret(一行一个配置)'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'pattern',
|
||||
'rows': '3',
|
||||
'label': '正则配置',
|
||||
'placeholder': '对应上方应用配置,一行一个,一一对应'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"wechat": "",
|
||||
"pattern": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
@eventmanager.register(EventType.NoticeMessage)
|
||||
def send(self, event):
|
||||
"""
|
||||
消息转发
|
||||
"""
|
||||
if not self._enabled:
|
||||
return
|
||||
|
||||
# 消息体
|
||||
data = event.event_data
|
||||
channel = data['channel']
|
||||
if channel and channel != MessageChannel.Wechat:
|
||||
return
|
||||
|
||||
title = data['title']
|
||||
text = data['text']
|
||||
image = data['image']
|
||||
userid = data['userid']
|
||||
|
||||
# 正则匹配
|
||||
patterns = self._pattern.split("\n")
|
||||
for index, pattern in enumerate(patterns):
|
||||
msg_match = re.search(pattern, title)
|
||||
if msg_match:
|
||||
access_token, appid = self.__flush_access_token(index)
|
||||
if not access_token:
|
||||
logger.error("未获取到有效token,请检查配置")
|
||||
continue
|
||||
|
||||
# 发送消息
|
||||
if image:
|
||||
self.__send_image_message(title, text, image, userid, access_token, appid, index)
|
||||
else:
|
||||
self.__send_message(title, text, userid, access_token, appid, index)
|
||||
|
||||
def __save_wechat_token(self):
|
||||
"""
|
||||
获取并存储wechat token
|
||||
"""
|
||||
# 解析配置
|
||||
wechats = self._wechat.split("\n")
|
||||
for index, wechat in enumerate(wechats):
|
||||
wechat_config = wechat.split(":")
|
||||
if len(wechat_config) != 3:
|
||||
logger.error(f"{wechat} 应用配置不正确")
|
||||
continue
|
||||
appid = wechat_config[0]
|
||||
corpid = wechat_config[1]
|
||||
appsecret = wechat_config[2]
|
||||
|
||||
# 已过期,重新获取token
|
||||
access_token, expires_in, access_token_time = self.__get_access_token(corpid=corpid,
|
||||
appsecret=appsecret)
|
||||
if not access_token:
|
||||
# 没有token,获取token
|
||||
logger.error(f"wechat配置 appid = {appid} 获取token失败,请检查配置")
|
||||
continue
|
||||
|
||||
self._pattern_token[index] = {
|
||||
"appid": appid,
|
||||
"corpid": corpid,
|
||||
"appsecret": appsecret,
|
||||
"access_token": access_token,
|
||||
"expires_in": expires_in,
|
||||
"access_token_time": access_token_time,
|
||||
}
|
||||
|
||||
def __flush_access_token(self, index: int, force: bool = False):
|
||||
"""
|
||||
获取第i个配置wechat token
|
||||
"""
|
||||
wechat_token = self._pattern_token[index]
|
||||
if not wechat_token:
|
||||
logger.error(f"未获取到第 {index} 条正则对应的wechat应用token,请检查配置")
|
||||
return None
|
||||
access_token = wechat_token['access_token']
|
||||
expires_in = wechat_token['expires_in']
|
||||
access_token_time = wechat_token['access_token_time']
|
||||
appid = wechat_token['appid']
|
||||
corpid = wechat_token['corpid']
|
||||
appsecret = wechat_token['appsecret']
|
||||
|
||||
# 判断token有效期
|
||||
if force or (datetime.now() - access_token_time).seconds >= expires_in:
|
||||
# 重新获取token
|
||||
access_token, expires_in, access_token_time = self.__get_access_token(corpid=corpid,
|
||||
appsecret=appsecret)
|
||||
if not access_token:
|
||||
logger.error(f"wechat配置 appid = {appid} 获取token失败,请检查配置")
|
||||
return None, None
|
||||
|
||||
self._pattern_token[index] = {
|
||||
"appid": appid,
|
||||
"corpid": corpid,
|
||||
"appsecret": appsecret,
|
||||
"access_token": access_token,
|
||||
"expires_in": expires_in,
|
||||
"access_token_time": access_token_time,
|
||||
}
|
||||
return access_token, appid
|
||||
|
||||
def __send_message(self, title: str, text: str = None, userid: str = None, access_token: str = None,
|
||||
appid: str = None, index: int = None) -> Optional[bool]:
|
||||
"""
|
||||
发送文本消息
|
||||
:param title: 消息标题
|
||||
:param text: 消息内容
|
||||
:param userid: 消息发送对象的ID,为空则发给所有人
|
||||
:return: 发送状态,错误信息
|
||||
"""
|
||||
if text:
|
||||
conent = "%s\n%s" % (title, text.replace("\n\n", "\n"))
|
||||
else:
|
||||
conent = title
|
||||
|
||||
if not userid:
|
||||
userid = "@all"
|
||||
req_json = {
|
||||
"touser": userid,
|
||||
"msgtype": "text",
|
||||
"agentid": appid,
|
||||
"text": {
|
||||
"content": conent
|
||||
},
|
||||
"safe": 0,
|
||||
"enable_id_trans": 0,
|
||||
"enable_duplicate_check": 0
|
||||
}
|
||||
return self.__post_request(access_token=access_token, req_json=req_json, index=index, title=title)
|
||||
|
||||
def __send_image_message(self, title: str, text: str, image_url: str, userid: str = None,
|
||||
access_token: str = None, appid: str = None, index: int = None) -> Optional[bool]:
|
||||
"""
|
||||
发送图文消息
|
||||
:param title: 消息标题
|
||||
:param text: 消息内容
|
||||
:param image_url: 图片地址
|
||||
:param userid: 消息发送对象的ID,为空则发给所有人
|
||||
:return: 发送状态,错误信息
|
||||
"""
|
||||
if text:
|
||||
text = text.replace("\n\n", "\n")
|
||||
if not userid:
|
||||
userid = "@all"
|
||||
req_json = {
|
||||
"touser": userid,
|
||||
"msgtype": "news",
|
||||
"agentid": appid,
|
||||
"news": {
|
||||
"articles": [
|
||||
{
|
||||
"title": title,
|
||||
"description": text,
|
||||
"picurl": image_url,
|
||||
"url": ''
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
return self.__post_request(access_token=access_token, req_json=req_json, index=index, title=title)
|
||||
|
||||
def __post_request(self, access_token: str, req_json: dict, index: int, title: str, retry: int = 0) -> bool:
|
||||
message_url = self._send_msg_url % access_token
|
||||
"""
|
||||
向微信发送请求
|
||||
"""
|
||||
try:
|
||||
res = RequestUtils(content_type='application/json').post(
|
||||
message_url,
|
||||
data=json.dumps(req_json, ensure_ascii=False).encode('utf-8')
|
||||
)
|
||||
if res and res.status_code == 200:
|
||||
ret_json = res.json()
|
||||
if ret_json.get('errcode') == 0:
|
||||
logger.info(f"转发消息 {title} 成功")
|
||||
return True
|
||||
else:
|
||||
if ret_json.get('errcode') == 81013:
|
||||
return False
|
||||
|
||||
logger.error(f"转发消息 {title} 失败,错误信息:{ret_json}")
|
||||
if ret_json.get('errcode') == 42001 or ret_json.get('errcode') == 40014:
|
||||
logger.info("token已过期,正在重新刷新token重试")
|
||||
# 重新获取token
|
||||
access_token, appid = self.__flush_access_token(index=index,
|
||||
force=True)
|
||||
if access_token:
|
||||
retry += 1
|
||||
# 重发请求
|
||||
if retry <= 3:
|
||||
return self.__post_request(access_token=access_token,
|
||||
req_json=req_json,
|
||||
index=index,
|
||||
title=title,
|
||||
retry=retry)
|
||||
return False
|
||||
elif res is not None:
|
||||
logger.error(f"转发消息 {title} 失败,错误码:{res.status_code},错误原因:{res.reason}")
|
||||
return False
|
||||
else:
|
||||
logger.error(f"转发消息 {title} 失败,未获取到返回信息")
|
||||
return False
|
||||
except Exception as err:
|
||||
logger.error(f"转发消息 {title} 异常,错误信息:{str(err)}")
|
||||
return False
|
||||
|
||||
def __get_access_token(self, corpid: str, appsecret: str):
|
||||
"""
|
||||
获取微信Token
|
||||
:return: 微信Token
|
||||
"""
|
||||
try:
|
||||
token_url = self._token_url % (corpid, appsecret)
|
||||
res = RequestUtils().get_res(token_url)
|
||||
if res:
|
||||
ret_json = res.json()
|
||||
if ret_json.get('errcode') == 0:
|
||||
access_token = ret_json.get('access_token')
|
||||
expires_in = ret_json.get('expires_in')
|
||||
access_token_time = datetime.now()
|
||||
|
||||
return access_token, expires_in, access_token_time
|
||||
else:
|
||||
logger.error(f"{ret_json.get('errmsg')}")
|
||||
return None, None, None
|
||||
else:
|
||||
logger.error(f"{corpid} {appsecret} 获取token失败")
|
||||
return None, None, None
|
||||
except Exception as e:
|
||||
logger.error(f"获取微信access_token失败,错误信息:{str(e)}")
|
||||
return None, None, None
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
@@ -1,267 +0,0 @@
|
||||
import datetime
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.chain.system import SystemChain
|
||||
from app.core.config import settings
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
from app.log import logger
|
||||
from app.schemas import NotificationType
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.system import SystemUtils
|
||||
|
||||
|
||||
class MoviePilotUpdateNotify(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "MoviePilot更新推送"
|
||||
# 插件描述
|
||||
plugin_desc = "MoviePilot推送release更新通知、自动重启。"
|
||||
# 插件图标
|
||||
plugin_icon = "update.png"
|
||||
# 主题色
|
||||
plugin_color = "#4179F4"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "moviepilotupdatenotify_"
|
||||
# 加载顺序
|
||||
plugin_order = 25
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
# 任务执行间隔
|
||||
_cron = None
|
||||
_restart = False
|
||||
_notify = False
|
||||
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._cron = config.get("cron")
|
||||
self._restart = config.get("restart")
|
||||
self._notify = config.get("notify")
|
||||
|
||||
# 加载模块
|
||||
if self._enabled:
|
||||
# 定时服务
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
|
||||
if self._cron:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.__check_update,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="检查MoviePilot更新")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def __check_update(self):
|
||||
"""
|
||||
检查MoviePilot更新
|
||||
"""
|
||||
release_version, description, update_time = self.__get_release_version()
|
||||
if not release_version:
|
||||
logger.error("最新版本获取失败,停止运行")
|
||||
return
|
||||
|
||||
# 本地版本
|
||||
local_version = SystemChain().get_local_version()
|
||||
if local_version and release_version <= local_version:
|
||||
logger.info(f"当前版本:{local_version} 远程版本:{release_version} 停止运行")
|
||||
return
|
||||
|
||||
# 推送更新消息
|
||||
if self._notify:
|
||||
# 将时间字符串转为datetime对象
|
||||
dt = datetime.datetime.strptime(update_time, "%Y-%m-%dT%H:%M:%SZ")
|
||||
# 设置时区
|
||||
timezone = pytz.timezone(settings.TZ)
|
||||
dt = dt.replace(tzinfo=timezone)
|
||||
# 将datetime对象转换为带时区的字符串
|
||||
update_time = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title="【MoviePilot更新通知】",
|
||||
text=f"{release_version} \n"
|
||||
f"\n"
|
||||
f"{description} \n"
|
||||
f"\n"
|
||||
f"{update_time}")
|
||||
|
||||
# 自动重启
|
||||
if self._restart:
|
||||
logger.info("开始执行自动重启…")
|
||||
SystemUtils.restart()
|
||||
|
||||
@staticmethod
|
||||
def __get_release_version():
|
||||
"""
|
||||
获取最新版本
|
||||
"""
|
||||
version_res = RequestUtils(proxies=settings.PROXY).get_res(
|
||||
"https://api.github.com/repos/jxxghp/MoviePilot/releases/latest")
|
||||
if version_res:
|
||||
ver_json = version_res.json()
|
||||
version = f"{ver_json['tag_name']}"
|
||||
description = f"{ver_json['body']}"
|
||||
update_time = f"{ver_json['published_at']}"
|
||||
return version, description, update_time
|
||||
else:
|
||||
return None, None, None
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'restart',
|
||||
'label': '自动重启',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '发送通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '检查周期',
|
||||
'placeholder': '5位cron表达式'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '如要开启自动重启,请确认MOVIEPILOT_AUTO_UPDATE设置为true,重启即更新。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"restart": False,
|
||||
"notify": False,
|
||||
"cron": "0 9 * * *"
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
@@ -1,657 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
|
||||
from app.core.config import settings
|
||||
from app.db.downloadhistory_oper import DownloadHistoryOper
|
||||
from app.db.plugindata_oper import PluginDataOper
|
||||
from app.db.transferhistory_oper import TransferHistoryOper
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple
|
||||
from app.log import logger
|
||||
|
||||
|
||||
class NAStoolSync(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "历史记录同步"
|
||||
# 插件描述
|
||||
plugin_desc = "同步NAStool历史记录、下载记录、插件记录到MoviePilot。"
|
||||
# 插件图标
|
||||
plugin_icon = "sync.png"
|
||||
# 主题色
|
||||
plugin_color = "#53BA47"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "nastoolsync_"
|
||||
# 加载顺序
|
||||
plugin_order = 15
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_transferhistory = None
|
||||
_plugindata = None
|
||||
_downloadhistory = None
|
||||
_clear = None
|
||||
_nt_db_path = None
|
||||
_path = None
|
||||
_site = None
|
||||
_downloader = None
|
||||
_transfer = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self._transferhistory = TransferHistoryOper()
|
||||
self._plugindata = PluginDataOper()
|
||||
self._downloadhistory = DownloadHistoryOper()
|
||||
|
||||
if config:
|
||||
self._clear = config.get("clear")
|
||||
self._nt_db_path = config.get("nt_db_path")
|
||||
self._path = config.get("path")
|
||||
self._site = config.get("site")
|
||||
self._downloader = config.get("downloader")
|
||||
self._transfer = config.get("transfer")
|
||||
|
||||
if self._nt_db_path and self._transfer:
|
||||
# 读取sqlite数据
|
||||
try:
|
||||
gradedb = sqlite3.connect(self._nt_db_path)
|
||||
except Exception as e:
|
||||
self.update_config(
|
||||
{
|
||||
"transfer": False,
|
||||
"clear": False,
|
||||
"nt_db_path": None,
|
||||
"path": self._path,
|
||||
"downloader": self._downloader,
|
||||
"site": self._site,
|
||||
}
|
||||
)
|
||||
logger.error(f"无法打开数据库文件 {self._nt_db_path},请检查路径是否正确:{str(e)}")
|
||||
return
|
||||
|
||||
# 创建游标cursor来执行executeSQL语句
|
||||
cursor = gradedb.cursor()
|
||||
|
||||
download_history = self.get_nt_download_history(cursor)
|
||||
plugin_history = self.get_nt_plugin_history(cursor)
|
||||
transfer_history = self.get_nt_transfer_history(cursor)
|
||||
|
||||
# 关闭游标
|
||||
cursor.close()
|
||||
|
||||
# 导入下载记录
|
||||
if download_history:
|
||||
self.sync_download_history(download_history)
|
||||
|
||||
# 导入插件记录
|
||||
if plugin_history:
|
||||
self.sync_plugin_history(plugin_history)
|
||||
|
||||
# 导入历史记录
|
||||
if transfer_history:
|
||||
self.sync_transfer_history(transfer_history)
|
||||
|
||||
self.update_config(
|
||||
{
|
||||
"transfer": False,
|
||||
"clear": False,
|
||||
"nt_db_path": self._nt_db_path,
|
||||
"path": self._path,
|
||||
"downloader": self._downloader,
|
||||
"site": self._site,
|
||||
}
|
||||
)
|
||||
|
||||
def sync_plugin_history(self, plugin_history):
|
||||
"""
|
||||
导入插件记录
|
||||
|
||||
NAStool
|
||||
{
|
||||
"id": "TorrentTransfer",
|
||||
"key: "1-4bdc22bc1e062803c8686beb2796369c59ee141f",
|
||||
"value": "{"to_download": 2, "to_download_id": "4bdc22bc1e062803c8686beb2796369c59ee141f", "delete_source": true}"
|
||||
},
|
||||
{
|
||||
"id": "IYUUAutoSeed",
|
||||
"key: "f161efaf008d2e56e7939272e8d95eca58fa71dd",
|
||||
"value": "[{"downloader": "2", "torrents": ["bd64a8edc5afe6b4beb8813bdbf6faedfb1d4cc4"]}]"
|
||||
}
|
||||
"""
|
||||
# 开始计时
|
||||
start_time = datetime.now()
|
||||
logger.info("开始同步NAStool插件历史记录到MoviePilot")
|
||||
# 清空MoviePilot插件记录
|
||||
if self._clear:
|
||||
logger.info("MoviePilot插件记录已清空")
|
||||
self._plugindata.truncate()
|
||||
|
||||
cnt = 0
|
||||
for history in plugin_history:
|
||||
plugin_id = history[1]
|
||||
plugin_key = history[2]
|
||||
plugin_value = history[3]
|
||||
|
||||
# 替换转种记录
|
||||
if str(plugin_id) == "TorrentTransfer":
|
||||
keys = str(plugin_key).split("-")
|
||||
|
||||
# 1-2cd5d6fe32dca4e39a3e9f10961bfbdb00437e91
|
||||
if len(keys) == 2 and keys[0].isdigit():
|
||||
mp_downloader = self.__get_target_downloader(int(keys[0]))
|
||||
# 替换key
|
||||
plugin_key = mp_downloader + "-" + keys[1]
|
||||
|
||||
# 替换value
|
||||
"""
|
||||
{
|
||||
"to_download":2,
|
||||
"to_download_id":"2cd5d6fe32dca4e39a3e9f10961bfbdb00437e91",
|
||||
"delete_source":true
|
||||
}
|
||||
"""
|
||||
if isinstance(plugin_value, str):
|
||||
plugin_value: dict = json.loads(plugin_value)
|
||||
if isinstance(plugin_value, dict):
|
||||
if str(plugin_value.get("to_download")).isdigit():
|
||||
to_downloader = self.__get_target_downloader(int(plugin_value.get("to_download")))
|
||||
plugin_value["to_download"] = to_downloader
|
||||
|
||||
# 替换辅种记录
|
||||
elif str(plugin_id) == "IYUUAutoSeed":
|
||||
"""
|
||||
[
|
||||
{
|
||||
"downloader":"2",
|
||||
"torrents":[
|
||||
"a18aa62abab42613edba15e7dbad0d729d8500da",
|
||||
"e494f372316bbfd8572da80138a6ef4c491d5991",
|
||||
"cc2bbc1e654d8fc0f83297f6cd36a38805aa2864",
|
||||
"68aec0db3aa7fe28a887e5e41a0d0d5bc284910f",
|
||||
"f02962474287e11441e34e40b8326ddf28d034f6"
|
||||
]
|
||||
},
|
||||
{
|
||||
"downloader":"2",
|
||||
"torrents":[
|
||||
"4f042003ce90519e1aadd02b76f51c0c0711adb3"
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
if isinstance(plugin_value, str):
|
||||
plugin_value: list = json.loads(plugin_value)
|
||||
if not isinstance(plugin_value, list):
|
||||
plugin_value = [plugin_value]
|
||||
for value in plugin_value:
|
||||
if str(value.get("downloader")).isdigit():
|
||||
downloader = self.__get_target_downloader(int(value.get("downloader")))
|
||||
value["downloader"] = downloader
|
||||
|
||||
self._plugindata.save(plugin_id=plugin_id,
|
||||
key=plugin_key,
|
||||
value=plugin_value)
|
||||
cnt += 1
|
||||
if cnt % 100 == 0:
|
||||
logger.info(f"插件记录同步进度 {cnt} / {len(plugin_history)}")
|
||||
|
||||
# 计算耗时
|
||||
end_time = datetime.now()
|
||||
|
||||
logger.info(f"插件记录已同步完成。总耗时 {(end_time - start_time).seconds} 秒")
|
||||
|
||||
def __get_target_downloader(self, download_id: int):
|
||||
"""
|
||||
获取NAStool下载器id对应的Moviepilot下载器
|
||||
"""
|
||||
# 处理下载器映射
|
||||
if self._downloader:
|
||||
downloaders = self._downloader.split("\n")
|
||||
for downloader in downloaders:
|
||||
if not downloader:
|
||||
continue
|
||||
sub_downloaders = downloader.split(":")
|
||||
if not str(sub_downloaders[0]).isdigit():
|
||||
logger.error(f"下载器映射配置错误:NAStool下载器id 应为数字!")
|
||||
continue
|
||||
if int(sub_downloaders[0]) == download_id:
|
||||
return str(sub_downloaders[1])
|
||||
return download_id
|
||||
|
||||
def sync_download_history(self, download_history):
|
||||
"""
|
||||
导入下载记录
|
||||
"""
|
||||
# 开始计时
|
||||
start_time = datetime.now()
|
||||
logger.info("开始同步NAStool下载历史记录到MoviePilot")
|
||||
# 清空MoviePilot下载记录
|
||||
if self._clear:
|
||||
logger.info("MoviePilot下载记录已清空")
|
||||
self._downloadhistory.truncate()
|
||||
|
||||
cnt = 0
|
||||
for history in download_history:
|
||||
mpath = history[0]
|
||||
mtype = history[1]
|
||||
mtitle = history[2]
|
||||
myear = history[3]
|
||||
mtmdbid = history[4]
|
||||
mseasons = history[5]
|
||||
mepisodes = history[6]
|
||||
mimages = history[7]
|
||||
mdownload_hash = history[8]
|
||||
mtorrent = history[9]
|
||||
mdesc = history[10]
|
||||
msite = history[11]
|
||||
mdate = history[12]
|
||||
|
||||
# 处理站点映射
|
||||
if self._site:
|
||||
sites = self._site.split("\n")
|
||||
for site in sites:
|
||||
sub_sites = site.split(":")
|
||||
if str(msite) == str(sub_sites[0]):
|
||||
msite = str(sub_sites[1])
|
||||
|
||||
self._downloadhistory.add(
|
||||
path=os.path.basename(mpath),
|
||||
type=mtype,
|
||||
title=mtitle,
|
||||
year=myear,
|
||||
tmdbid=mtmdbid,
|
||||
seasons=mseasons,
|
||||
episodes=mepisodes,
|
||||
image=mimages,
|
||||
download_hash=mdownload_hash,
|
||||
torrent_name=mtorrent,
|
||||
torrent_description=mdesc,
|
||||
torrent_site=msite,
|
||||
userid=settings.SUPERUSER,
|
||||
date=mdate
|
||||
)
|
||||
cnt += 1
|
||||
if cnt % 100 == 0:
|
||||
logger.info(f"下载记录同步进度 {cnt} / {len(download_history)}")
|
||||
|
||||
# 计算耗时
|
||||
end_time = datetime.now()
|
||||
|
||||
logger.info(f"下载记录已同步完成。总耗时 {(end_time - start_time).seconds} 秒")
|
||||
|
||||
def sync_transfer_history(self, transfer_history):
|
||||
"""
|
||||
导入nt转移记录
|
||||
"""
|
||||
# 开始计时
|
||||
start_time = datetime.now()
|
||||
logger.info("开始同步NAStool转移历史记录到MoviePilot")
|
||||
|
||||
# 清空MoviePilot转移记录
|
||||
if self._clear:
|
||||
logger.info("MoviePilot转移记录已清空")
|
||||
self._transferhistory.truncate()
|
||||
|
||||
# 处理数据,存入mp数据库
|
||||
cnt = 0
|
||||
for history in transfer_history:
|
||||
msrc_path = history[0]
|
||||
msrc_filename = history[1]
|
||||
mdest_path = history[2]
|
||||
mdest_filename = history[3]
|
||||
mmode = history[4]
|
||||
mtype = history[5]
|
||||
mcategory = history[6]
|
||||
mtitle = history[7]
|
||||
myear = history[8]
|
||||
mtmdbid = history[9]
|
||||
mseasons = history[10]
|
||||
mepisodes = history[11]
|
||||
mimage = history[12]
|
||||
mdate = history[13]
|
||||
|
||||
if not msrc_path or not mdest_path:
|
||||
continue
|
||||
|
||||
msrc = msrc_path + "/" + msrc_filename
|
||||
mdest = mdest_path + "/" + mdest_filename
|
||||
|
||||
# 处理路径映射
|
||||
if self._path:
|
||||
paths = self._path.split("\n")
|
||||
for path in paths:
|
||||
sub_paths = path.split(":")
|
||||
msrc = msrc.replace(sub_paths[0], sub_paths[1]).replace('\\', '/')
|
||||
mdest = mdest.replace(sub_paths[0], sub_paths[1]).replace('\\', '/')
|
||||
|
||||
# 存库
|
||||
self._transferhistory.add(
|
||||
src=msrc,
|
||||
dest=mdest,
|
||||
mode=mmode,
|
||||
type=mtype,
|
||||
category=mcategory,
|
||||
title=mtitle,
|
||||
year=myear,
|
||||
tmdbid=mtmdbid,
|
||||
seasons=mseasons,
|
||||
episodes=mepisodes,
|
||||
image=mimage,
|
||||
date=mdate
|
||||
)
|
||||
logger.debug(f"{mtitle} {myear} {mtmdbid} {mseasons} {mepisodes} 已同步")
|
||||
|
||||
cnt += 1
|
||||
if cnt % 100 == 0:
|
||||
logger.info(f"转移记录同步进度 {cnt} / {len(transfer_history)}")
|
||||
|
||||
# 计算耗时
|
||||
end_time = datetime.now()
|
||||
|
||||
logger.info(f"转移记录已同步完成。总耗时 {(end_time - start_time).seconds} 秒")
|
||||
|
||||
@staticmethod
|
||||
def get_nt_plugin_history(cursor):
|
||||
"""
|
||||
获取插件历史记录
|
||||
"""
|
||||
sql = 'select * from PLUGIN_HISTORY;'
|
||||
cursor.execute(sql)
|
||||
plugin_history = cursor.fetchall()
|
||||
|
||||
if not plugin_history:
|
||||
logger.error("未获取到NAStool数据库文件中的插件历史,请检查数据库路径是正确")
|
||||
return
|
||||
|
||||
logger.info(f"获取到NAStool插件记录 {len(plugin_history)} 条")
|
||||
return plugin_history
|
||||
|
||||
@staticmethod
|
||||
def get_nt_download_history(cursor):
|
||||
"""
|
||||
获取下载历史记录
|
||||
"""
|
||||
sql = '''
|
||||
SELECT
|
||||
SAVE_PATH,
|
||||
TYPE,
|
||||
TITLE,
|
||||
YEAR,
|
||||
TMDBID,
|
||||
CASE
|
||||
SE
|
||||
WHEN NULL THEN
|
||||
NULL ELSE substr( SE, 1, instr ( SE, ' ' ) - 1 )
|
||||
END AS seasons,
|
||||
CASE
|
||||
SE
|
||||
WHEN NULL THEN
|
||||
NULL ELSE substr( SE, instr ( SE, ' ' ) + 1 )
|
||||
END AS episodes,
|
||||
POSTER,
|
||||
DOWNLOAD_ID,
|
||||
TORRENT,
|
||||
DESC,
|
||||
SITE,
|
||||
DATE
|
||||
FROM
|
||||
DOWNLOAD_HISTORY
|
||||
WHERE
|
||||
SAVE_PATH IS NOT NULL;
|
||||
'''
|
||||
cursor.execute(sql)
|
||||
download_history = cursor.fetchall()
|
||||
|
||||
if not download_history:
|
||||
logger.error("未获取到NAStool数据库文件中的下载历史,请检查数据库路径是正确")
|
||||
return
|
||||
|
||||
logger.info(f"获取到NAStool下载记录 {len(download_history)} 条")
|
||||
return download_history
|
||||
|
||||
@staticmethod
|
||||
def get_nt_transfer_history(cursor):
|
||||
"""
|
||||
获取nt转移记录
|
||||
"""
|
||||
sql = '''
|
||||
SELECT
|
||||
t.SOURCE_PATH AS src_path,
|
||||
t.SOURCE_FILENAME AS src_filename,
|
||||
t.DEST_PATH AS dest_path,
|
||||
t.DEST_FILENAME AS dest_filename,
|
||||
CASE
|
||||
t.MODE
|
||||
WHEN '硬链接' THEN
|
||||
'link'
|
||||
WHEN '移动' THEN
|
||||
'move'
|
||||
WHEN '复制' THEN
|
||||
'copy'
|
||||
END AS mode,
|
||||
CASE
|
||||
t.TYPE
|
||||
WHEN '动漫' THEN
|
||||
'电视剧' ELSE t.TYPE
|
||||
END AS type,
|
||||
t.CATEGORY AS category,
|
||||
t.TITLE AS title,
|
||||
t.YEAR AS year,
|
||||
t.TMDBID AS tmdbid,
|
||||
CASE
|
||||
t.SEASON_EPISODE
|
||||
WHEN NULL THEN
|
||||
NULL ELSE substr( t.SEASON_EPISODE, 1, instr ( t.SEASON_EPISODE, ' ' ) - 1 )
|
||||
END AS seasons,
|
||||
CASE
|
||||
t.SEASON_EPISODE
|
||||
WHEN NULL THEN
|
||||
NULL ELSE substr( t.SEASON_EPISODE, instr ( t.SEASON_EPISODE, ' ' ) + 1 )
|
||||
END AS episodes,
|
||||
d.POSTER AS image,
|
||||
t.DATE AS date
|
||||
FROM
|
||||
TRANSFER_HISTORY t
|
||||
LEFT JOIN ( SELECT * FROM DOWNLOAD_HISTORY GROUP BY TMDBID ) d ON t.TMDBID = d.TMDBID
|
||||
AND t.TYPE = d.TYPE;
|
||||
'''
|
||||
cursor.execute(sql)
|
||||
nt_historys = cursor.fetchall()
|
||||
|
||||
if not nt_historys:
|
||||
logger.error("未获取到NAStool数据库文件中的转移历史,请检查数据库路径是正确")
|
||||
return
|
||||
|
||||
logger.info(f"获取到NAStool转移记录 {len(nt_historys)} 条")
|
||||
return nt_historys
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'transfer',
|
||||
'label': '同步记录'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'clear',
|
||||
'label': '清空记录'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'nt_db_path',
|
||||
'label': 'NAStool数据库user.db路径',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'path',
|
||||
'rows': '2',
|
||||
'label': '历史记录路径映射',
|
||||
'placeholder': 'NAStool路径:MoviePilot路径(一行一个)'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'downloader',
|
||||
'rows': '2',
|
||||
'label': '插件数据下载器映射',
|
||||
'placeholder': 'NAStool下载器id:qbittorrent|transmission(一行一个)'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'site',
|
||||
'label': '下载历史站点映射',
|
||||
'placeholder': 'NAStool站点名:MoviePilot站点名(一行一个)'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '开启清空记录时,会在导入历史数据之前删除MoviePilot之前的记录。'
|
||||
'如果转移记录很多,同步时间可能会长(3-10分钟),'
|
||||
'所以点击确定后页面没反应是正常现象,后台正在处理。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"transfer": False,
|
||||
"clear": False,
|
||||
"supp": False,
|
||||
"nt_db_path": "",
|
||||
"path": "",
|
||||
"downloader": "",
|
||||
"site": "",
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,668 +0,0 @@
|
||||
import datetime
|
||||
import re
|
||||
from pathlib import Path
|
||||
from threading import Lock
|
||||
from typing import Optional, Any, List, Dict, Tuple
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.chain.download import DownloadChain
|
||||
from app.chain.search import SearchChain
|
||||
from app.chain.subscribe import SubscribeChain
|
||||
from app.core.config import settings
|
||||
from app.core.context import MediaInfo, TorrentInfo, Context
|
||||
from app.core.metainfo import MetaInfo
|
||||
from app.helper.rss import RssHelper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas.types import SystemConfigKey, MediaType
|
||||
|
||||
lock = Lock()
|
||||
|
||||
|
||||
class RssSubscribe(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "自定义订阅"
|
||||
# 插件描述
|
||||
plugin_desc = "定时刷新RSS报文,识别内容后添加订阅或直接下载。"
|
||||
# 插件图标
|
||||
plugin_icon = "rss.png"
|
||||
# 主题色
|
||||
plugin_color = "#F78421"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "rsssubscribe_"
|
||||
# 加载顺序
|
||||
plugin_order = 19
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有变量
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
_cache_path: Optional[Path] = None
|
||||
rsshelper = None
|
||||
downloadchain = None
|
||||
searchchain = None
|
||||
subscribechain = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
_cron: str = ""
|
||||
_notify: bool = False
|
||||
_onlyonce: bool = False
|
||||
_address: str = ""
|
||||
_include: str = ""
|
||||
_exclude: str = ""
|
||||
_proxy: bool = False
|
||||
_filter: bool = False
|
||||
_clear: bool = False
|
||||
_clearflag: bool = False
|
||||
_action: str = "subscribe"
|
||||
_save_path: str = ""
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.rsshelper = RssHelper()
|
||||
self.downloadchain = DownloadChain()
|
||||
self.searchchain = SearchChain()
|
||||
self.subscribechain = SubscribeChain()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
# 配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._cron = config.get("cron")
|
||||
self._notify = config.get("notify")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._address = config.get("address")
|
||||
self._include = config.get("include")
|
||||
self._exclude = config.get("exclude")
|
||||
self._proxy = config.get("proxy")
|
||||
self._filter = config.get("filter")
|
||||
self._clear = config.get("clear")
|
||||
self._action = config.get("action")
|
||||
self._save_path = config.get("save_path")
|
||||
|
||||
if self._enabled or self._onlyonce:
|
||||
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
if self._cron:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.check,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="RSS订阅")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
# 推送实时消息
|
||||
self.systemmessage.put(f"执行周期配置错误:{str(err)}")
|
||||
else:
|
||||
self._scheduler.add_job(self.check, "interval", minutes=30, name="RSS订阅")
|
||||
|
||||
if self._onlyonce:
|
||||
logger.info(f"RSS订阅服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.check, trigger='date',
|
||||
run_date=datetime.datetime.now(
|
||||
tz=pytz.timezone(settings.TZ)) + datetime.timedelta(seconds=3)
|
||||
)
|
||||
|
||||
if self._onlyonce or self._clear:
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
# 记录清理缓存设置
|
||||
self._clearflag = self._clear
|
||||
# 关闭清理缓存开关
|
||||
self._clear = False
|
||||
# 保存设置
|
||||
self.__update_config()
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
"""
|
||||
定义远程控制命令
|
||||
:return: 命令关键字、事件、描述、附带数据
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取插件API
|
||||
[{
|
||||
"path": "/xx",
|
||||
"endpoint": self.xxx,
|
||||
"methods": ["GET", "POST"],
|
||||
"summary": "API说明"
|
||||
}]
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '发送通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '5位cron表达式,留空自动'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'action',
|
||||
'label': '动作',
|
||||
'items': [
|
||||
{'title': '订阅', 'value': 'subscribe'},
|
||||
{'title': '下载', 'value': 'download'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'address',
|
||||
'label': 'RSS地址',
|
||||
'rows': 3,
|
||||
'placeholder': '每行一个RSS地址'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'include',
|
||||
'label': '包含',
|
||||
'placeholder': '支持正则表达式'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'exclude',
|
||||
'label': '排除',
|
||||
'placeholder': '支持正则表达式'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'save_path',
|
||||
'label': '保存目录',
|
||||
'placeholder': '下载时有效,留空自动'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'proxy',
|
||||
'label': '使用代理服务器',
|
||||
}
|
||||
}
|
||||
]
|
||||
}, {
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'filter',
|
||||
'label': '使用过滤规则',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'clear',
|
||||
'label': '清理历史记录',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"notify": True,
|
||||
"onlyonce": False,
|
||||
"cron": "*/30 * * * *",
|
||||
"address": "",
|
||||
"include": "",
|
||||
"exclude": "",
|
||||
"proxy": False,
|
||||
"clear": False,
|
||||
"filter": False,
|
||||
"action": "subscribe",
|
||||
"save_path": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
"""
|
||||
拼装插件详情页面,需要返回页面配置,同时附带数据
|
||||
"""
|
||||
# 查询同步详情
|
||||
historys = self.get_data('history')
|
||||
if not historys:
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'text': '暂无数据',
|
||||
'props': {
|
||||
'class': 'text-center',
|
||||
}
|
||||
}
|
||||
]
|
||||
# 数据按时间降序排序
|
||||
historys = sorted(historys, key=lambda x: x.get('time'), reverse=True)
|
||||
# 拼装页面
|
||||
contents = []
|
||||
for history in historys:
|
||||
title = history.get("title")
|
||||
poster = history.get("poster")
|
||||
mtype = history.get("type")
|
||||
time_str = history.get("time")
|
||||
contents.append(
|
||||
{
|
||||
'component': 'VCard',
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'd-flex justify-space-start flex-nowrap flex-row',
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VImg',
|
||||
'props': {
|
||||
'src': poster,
|
||||
'height': 120,
|
||||
'width': 80,
|
||||
'aspect-ratio': '2/3',
|
||||
'class': 'object-cover shadow ring-gray-500',
|
||||
'cover': True
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'div',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCardSubtitle',
|
||||
'props': {
|
||||
'class': 'pa-2 font-bold break-words whitespace-break-spaces'
|
||||
},
|
||||
'text': title
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'类型:{mtype}'
|
||||
},
|
||||
{
|
||||
'component': 'VCardText',
|
||||
'props': {
|
||||
'class': 'pa-0 px-2'
|
||||
},
|
||||
'text': f'时间:{time_str}'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
return [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'class': 'grid gap-3 grid-info-card',
|
||||
},
|
||||
'content': contents
|
||||
}
|
||||
]
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
|
||||
def __update_config(self):
|
||||
"""
|
||||
更新设置
|
||||
"""
|
||||
self.update_config({
|
||||
"enabled": self._enabled,
|
||||
"notify": self._notify,
|
||||
"onlyonce": self._onlyonce,
|
||||
"cron": self._cron,
|
||||
"address": self._address,
|
||||
"include": self._include,
|
||||
"exclude": self._exclude,
|
||||
"proxy": self._proxy,
|
||||
"clear": self._clear,
|
||||
"filter": self._filter,
|
||||
"action": self._action,
|
||||
"save_path": self._save_path
|
||||
})
|
||||
|
||||
def check(self):
|
||||
"""
|
||||
通过用户RSS同步豆瓣想看数据
|
||||
"""
|
||||
if not self._address:
|
||||
return
|
||||
# 读取历史记录
|
||||
if self._clearflag:
|
||||
history = []
|
||||
else:
|
||||
history: List[dict] = self.get_data('history') or []
|
||||
for url in self._address.split("\n"):
|
||||
# 处理每一个RSS链接
|
||||
if not url:
|
||||
continue
|
||||
logger.info(f"开始刷新RSS:{url} ...")
|
||||
results = self.rsshelper.parse(url, proxy=self._proxy)
|
||||
if not results:
|
||||
logger.error(f"未获取到RSS数据:{url}")
|
||||
return
|
||||
# 过滤规则
|
||||
filter_rule = self.systemconfig.get(SystemConfigKey.SubscribeFilterRules)
|
||||
# 解析数据
|
||||
for result in results:
|
||||
try:
|
||||
title = result.get("title")
|
||||
description = result.get("description")
|
||||
enclosure = result.get("enclosure")
|
||||
link = result.get("link")
|
||||
sise = result.get("sise")
|
||||
pubdate: datetime.datetime = result.get("pubdate")
|
||||
# 检查是否处理过
|
||||
if not title or title in [h.get("key") for h in history]:
|
||||
continue
|
||||
# 检查规则
|
||||
if self._include and not re.search(r"%s" % self._include,
|
||||
f"{title} {description}", re.IGNORECASE):
|
||||
logger.info(f"{title} - {description} 不符合包含规则")
|
||||
continue
|
||||
if self._exclude and re.search(r"%s" % self._exclude,
|
||||
f"{title} {description}", re.IGNORECASE):
|
||||
logger.info(f"{title} - {description} 不符合排除规则")
|
||||
continue
|
||||
# 识别媒体信息
|
||||
meta = MetaInfo(title=title, subtitle=description)
|
||||
if not meta.name:
|
||||
logger.warn(f"{title} 未识别到有效数据")
|
||||
continue
|
||||
mediainfo: MediaInfo = self.chain.recognize_media(meta=meta)
|
||||
if not mediainfo:
|
||||
logger.warn(f'未识别到媒体信息,标题:{title}')
|
||||
continue
|
||||
# 种子
|
||||
torrentinfo = TorrentInfo(
|
||||
title=title,
|
||||
description=description,
|
||||
enclosure=enclosure,
|
||||
page_url=link,
|
||||
size=sise,
|
||||
pubdate=pubdate.strftime("%Y-%m-%d %H:%M:%S") if pubdate else None,
|
||||
site_proxy=self._proxy,
|
||||
)
|
||||
# 过滤种子
|
||||
if self._filter:
|
||||
result = self.chain.filter_torrents(
|
||||
rule_string=filter_rule,
|
||||
torrent_list=[torrentinfo],
|
||||
mediainfo=mediainfo
|
||||
)
|
||||
if not result:
|
||||
logger.info(f"{title} {description} 不匹配过滤规则")
|
||||
continue
|
||||
# 查询缺失的媒体信息
|
||||
exist_flag, no_exists = self.downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
if exist_flag:
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
|
||||
continue
|
||||
else:
|
||||
if self._action == "download":
|
||||
if mediainfo.type == MediaType.TV:
|
||||
if no_exists:
|
||||
exist_info = no_exists.get(mediainfo.tmdb_id)
|
||||
season_info = exist_info.get(meta.begin_season or 1)
|
||||
if not season_info:
|
||||
logger.info(f'{mediainfo.title_year} {meta.season} 己存在')
|
||||
continue
|
||||
if (season_info.episodes
|
||||
and not set(meta.episode_list).issubset(set(season_info.episodes))):
|
||||
logger.info(f'{mediainfo.title_year} {meta.season_episode} 己存在')
|
||||
continue
|
||||
# 添加下载
|
||||
result = self.downloadchain.download_single(
|
||||
context=Context(
|
||||
meta_info=meta,
|
||||
media_info=mediainfo,
|
||||
torrent_info=torrentinfo,
|
||||
),
|
||||
save_path=self._save_path,
|
||||
username="RSS订阅"
|
||||
)
|
||||
if not result:
|
||||
logger.error(f'{title} 下载失败')
|
||||
continue
|
||||
else:
|
||||
# 检查是否在订阅中
|
||||
subflag = self.subscribechain.exists(mediainfo=mediainfo, meta=meta)
|
||||
if subflag:
|
||||
logger.info(f'{mediainfo.title_year} {meta.season} 正在订阅中')
|
||||
continue
|
||||
# 添加订阅
|
||||
self.subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="RSS订阅")
|
||||
# 存储历史记录
|
||||
history.append({
|
||||
"title": f"{mediainfo.title} {meta.season}",
|
||||
"key": f"{title}",
|
||||
"type": mediainfo.type.value,
|
||||
"year": mediainfo.year,
|
||||
"poster": mediainfo.get_poster_image(),
|
||||
"overview": mediainfo.overview,
|
||||
"tmdbid": mediainfo.tmdb_id,
|
||||
"time": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
})
|
||||
except Exception as err:
|
||||
logger.error(f'刷新RSS数据出错:{str(err)}')
|
||||
logger.info(f"RSS {url} 刷新完成")
|
||||
# 保存历史记录
|
||||
self.save_data('history', history)
|
||||
# 缓存只清理一次
|
||||
self._clearflag = False
|
||||
@@ -1,227 +0,0 @@
|
||||
from typing import Any, List, Dict, Tuple
|
||||
|
||||
from app.chain.site import SiteChain
|
||||
from app.core.event import eventmanager
|
||||
from app.db.site_oper import SiteOper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas.types import EventType, NotificationType
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class SiteRefresh(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "站点自动更新"
|
||||
# 插件描述
|
||||
plugin_desc = "自动登录获取站点Cookie和User-Agent。"
|
||||
# 插件图标
|
||||
plugin_icon = "login.png"
|
||||
# 主题色
|
||||
plugin_color = "#99b3ff"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "siterefresh_"
|
||||
# 加载顺序
|
||||
plugin_order = 2
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
siteoper: SiteOper = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
_notify: bool = False
|
||||
"""
|
||||
格式
|
||||
站点domain|用户名|用户密码
|
||||
"""
|
||||
_siteconf: list = []
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.siteoper = SiteOper()
|
||||
# 配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._notify = config.get("notify")
|
||||
self._siteconf = str(config.get("siteconf")).split('\n')
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@eventmanager.register(EventType.SiteLogin)
|
||||
def site_login(self, event):
|
||||
"""
|
||||
开始站点登录
|
||||
"""
|
||||
if not self.get_state():
|
||||
return
|
||||
|
||||
# 站点id
|
||||
site_id = event.event_data.get("site_id")
|
||||
if not site_id:
|
||||
logger.error(f"未获取到site_id")
|
||||
return
|
||||
|
||||
site = self.siteoper.get(site_id)
|
||||
if not site:
|
||||
logger.error(f"未获取到site_id {site_id} 对应的站点数据")
|
||||
return
|
||||
|
||||
site_name = site.name
|
||||
logger.info(f"开始尝试登录站点 {site_name}")
|
||||
siteurl, siteuser, sitepwd = None, None, None
|
||||
# 判断site是否已配置用户名密码
|
||||
for site_conf in self._siteconf:
|
||||
if not site_conf:
|
||||
continue
|
||||
site_confs = str(site_conf).split("|")
|
||||
if len(site_confs) == 3:
|
||||
siteurl = site_confs[0]
|
||||
siteuser = site_confs[1]
|
||||
sitepwd = site_confs[2]
|
||||
else:
|
||||
logger.error(f"{site_conf}配置有误,已跳过")
|
||||
continue
|
||||
|
||||
# 判断是否是目标域名
|
||||
if str(siteurl) in StringUtils.get_url_domain(site.url):
|
||||
# 找到目标域名配置,跳出循环
|
||||
break
|
||||
|
||||
# 开始登录更新cookie和ua
|
||||
if siteurl and siteuser and sitepwd:
|
||||
state, messages = SiteChain().update_cookie(site_info=site,
|
||||
username=siteuser,
|
||||
password=sitepwd)
|
||||
if state:
|
||||
logger.info(f"站点{site_name}自动更新Cookie和Ua成功")
|
||||
else:
|
||||
logger.error(f"站点{site_name}自动更新Cookie和Ua失败")
|
||||
|
||||
if self._notify:
|
||||
self.post_message(mtype=NotificationType.SiteMessage,
|
||||
title=f"站点 {site_name} Cookie已失效。",
|
||||
text=f"自动更新Cookie和Ua{'成功' if state else '失败'}")
|
||||
else:
|
||||
logger.error(f"未获取到站点{site_name}配置,已跳过")
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '开启通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'siteconf',
|
||||
'label': '站点配置',
|
||||
'rows': 5,
|
||||
'placeholder': '每一行一个站点,配置方式:\n'
|
||||
'域名domain|用户名|用户密码\n'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '站点签到提示Cookie过期时自动触发。'
|
||||
'不支持开启两步认证的站点。'
|
||||
'不是所有站点都支持,失败请手动更新。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"notify": False,
|
||||
"siteconf": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,338 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import json
|
||||
import re
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
from urllib.parse import urljoin, urlsplit
|
||||
|
||||
from requests import Session
|
||||
|
||||
from app.core.config import settings
|
||||
from app.helper.cloudflare import under_challenge
|
||||
from app.log import logger
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.site import SiteUtils
|
||||
|
||||
SITE_BASE_ORDER = 1000
|
||||
|
||||
|
||||
# 站点框架
|
||||
class SiteSchema(Enum):
|
||||
DiscuzX = "Discuz!"
|
||||
Gazelle = "Gazelle"
|
||||
Ipt = "IPTorrents"
|
||||
NexusPhp = "NexusPhp"
|
||||
NexusProject = "NexusProject"
|
||||
NexusRabbit = "NexusRabbit"
|
||||
NexusHhanclub = "NexusHhanclub"
|
||||
SmallHorse = "Small Horse"
|
||||
Unit3d = "Unit3d"
|
||||
TorrentLeech = "TorrentLeech"
|
||||
FileList = "FileList"
|
||||
TNode = "TNode"
|
||||
|
||||
|
||||
class ISiteUserInfo(metaclass=ABCMeta):
|
||||
# 站点模版
|
||||
schema = SiteSchema.NexusPhp
|
||||
# 站点解析时判断顺序,值越小越先解析
|
||||
order = SITE_BASE_ORDER
|
||||
|
||||
def __init__(self, site_name: str,
|
||||
url: str,
|
||||
site_cookie: str,
|
||||
index_html: str,
|
||||
session: Session = None,
|
||||
ua: str = None,
|
||||
emulate: bool = False,
|
||||
proxy: bool = None):
|
||||
super().__init__()
|
||||
# 站点信息
|
||||
self.site_name = None
|
||||
self.site_url = None
|
||||
# 用户信息
|
||||
self.username = None
|
||||
self.userid = None
|
||||
# 未读消息
|
||||
self.message_unread = 0
|
||||
self.message_unread_contents = []
|
||||
|
||||
# 流量信息
|
||||
self.upload = 0
|
||||
self.download = 0
|
||||
self.ratio = 0
|
||||
|
||||
# 种子信息
|
||||
self.seeding = 0
|
||||
self.leeching = 0
|
||||
self.uploaded = 0
|
||||
self.completed = 0
|
||||
self.incomplete = 0
|
||||
self.seeding_size = 0
|
||||
self.leeching_size = 0
|
||||
self.uploaded_size = 0
|
||||
self.completed_size = 0
|
||||
self.incomplete_size = 0
|
||||
# 做种人数, 种子大小
|
||||
self.seeding_info = []
|
||||
|
||||
# 用户详细信息
|
||||
self.user_level = None
|
||||
self.join_at = None
|
||||
self.bonus = 0.0
|
||||
|
||||
# 错误信息
|
||||
self.err_msg = None
|
||||
# 内部数据
|
||||
self._base_url = None
|
||||
self._site_cookie = None
|
||||
self._index_html = None
|
||||
self._addition_headers = None
|
||||
|
||||
# 站点页面
|
||||
self._brief_page = "index.php"
|
||||
self._user_detail_page = "userdetails.php?id="
|
||||
self._user_traffic_page = "index.php"
|
||||
self._torrent_seeding_page = "getusertorrentlistajax.php?userid="
|
||||
self._user_mail_unread_page = "messages.php?action=viewmailbox&box=1&unread=yes"
|
||||
self._sys_mail_unread_page = "messages.php?action=viewmailbox&box=-2&unread=yes"
|
||||
self._torrent_seeding_params = None
|
||||
self._torrent_seeding_headers = None
|
||||
|
||||
split_url = urlsplit(url)
|
||||
self.site_name = site_name
|
||||
self.site_url = url
|
||||
self._base_url = f"{split_url.scheme}://{split_url.netloc}"
|
||||
self._site_cookie = site_cookie
|
||||
self._index_html = index_html
|
||||
self._session = session if session else None
|
||||
self._ua = ua
|
||||
|
||||
self._emulate = emulate
|
||||
self._proxy = proxy
|
||||
|
||||
def site_schema(self) -> SiteSchema:
|
||||
"""
|
||||
站点解析模型
|
||||
:return: 站点解析模型
|
||||
"""
|
||||
return self.schema
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
"""
|
||||
是否匹配当前解析模型
|
||||
:param html_text: 站点首页html
|
||||
:return: 是否匹配
|
||||
"""
|
||||
pass
|
||||
|
||||
def parse(self):
|
||||
"""
|
||||
解析站点信息
|
||||
:return:
|
||||
"""
|
||||
if not self._parse_logged_in(self._index_html):
|
||||
return
|
||||
|
||||
self._parse_site_page(self._index_html)
|
||||
self._parse_user_base_info(self._index_html)
|
||||
self._pase_unread_msgs()
|
||||
if self._user_traffic_page:
|
||||
self._parse_user_traffic_info(self._get_page_content(urljoin(self._base_url, self._user_traffic_page)))
|
||||
if self._user_detail_page:
|
||||
self._parse_user_detail_info(self._get_page_content(urljoin(self._base_url, self._user_detail_page)))
|
||||
|
||||
self._parse_seeding_pages()
|
||||
self.seeding_info = json.dumps(self.seeding_info)
|
||||
|
||||
def _pase_unread_msgs(self):
|
||||
"""
|
||||
解析所有未读消息标题和内容
|
||||
:return:
|
||||
"""
|
||||
unread_msg_links = []
|
||||
if self.message_unread > 0:
|
||||
links = {self._user_mail_unread_page, self._sys_mail_unread_page}
|
||||
for link in links:
|
||||
if not link:
|
||||
continue
|
||||
|
||||
msg_links = []
|
||||
next_page = self._parse_message_unread_links(
|
||||
self._get_page_content(urljoin(self._base_url, link)), msg_links)
|
||||
while next_page:
|
||||
next_page = self._parse_message_unread_links(
|
||||
self._get_page_content(urljoin(self._base_url, next_page)), msg_links)
|
||||
|
||||
unread_msg_links.extend(msg_links)
|
||||
|
||||
for msg_link in unread_msg_links:
|
||||
logger.debug(f"{self.site_name} 信息链接 {msg_link}")
|
||||
head, date, content = self._parse_message_content(self._get_page_content(urljoin(self._base_url, msg_link)))
|
||||
logger.debug(f"{self.site_name} 标题 {head} 时间 {date} 内容 {content}")
|
||||
self.message_unread_contents.append((head, date, content))
|
||||
|
||||
def _parse_seeding_pages(self):
|
||||
if self._torrent_seeding_page:
|
||||
# 第一页
|
||||
next_page = self._parse_user_torrent_seeding_info(
|
||||
self._get_page_content(urljoin(self._base_url, self._torrent_seeding_page),
|
||||
self._torrent_seeding_params,
|
||||
self._torrent_seeding_headers))
|
||||
|
||||
# 其他页处理
|
||||
while next_page:
|
||||
next_page = self._parse_user_torrent_seeding_info(
|
||||
self._get_page_content(urljoin(urljoin(self._base_url, self._torrent_seeding_page), next_page),
|
||||
self._torrent_seeding_params,
|
||||
self._torrent_seeding_headers),
|
||||
multi_page=True)
|
||||
|
||||
@staticmethod
|
||||
def _prepare_html_text(html_text):
|
||||
"""
|
||||
处理掉HTML中的干扰部分
|
||||
"""
|
||||
return re.sub(r"#\d+", "", re.sub(r"\d+px", "", html_text))
|
||||
|
||||
@abstractmethod
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
"""
|
||||
获取未阅读消息链接
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
|
||||
def _get_page_content(self, url: str, params: dict = None, headers: dict = None):
|
||||
"""
|
||||
:param url: 网页地址
|
||||
:param params: post参数
|
||||
:param headers: 额外的请求头
|
||||
:return:
|
||||
"""
|
||||
req_headers = None
|
||||
proxies = settings.PROXY if self._proxy else None
|
||||
if self._ua or headers or self._addition_headers:
|
||||
req_headers = {}
|
||||
if headers:
|
||||
req_headers.update(headers)
|
||||
|
||||
req_headers.update({
|
||||
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
|
||||
"User-Agent": f"{self._ua}"
|
||||
})
|
||||
|
||||
if self._addition_headers:
|
||||
req_headers.update(self._addition_headers)
|
||||
|
||||
if params:
|
||||
res = RequestUtils(cookies=self._site_cookie,
|
||||
session=self._session,
|
||||
timeout=60,
|
||||
proxies=proxies,
|
||||
headers=req_headers).post_res(url=url, data=params)
|
||||
else:
|
||||
res = RequestUtils(cookies=self._site_cookie,
|
||||
session=self._session,
|
||||
timeout=60,
|
||||
proxies=proxies,
|
||||
headers=req_headers).get_res(url=url)
|
||||
if res is not None and res.status_code in (200, 500, 403):
|
||||
# 如果cloudflare 有防护,尝试使用浏览器仿真
|
||||
if under_challenge(res.text):
|
||||
logger.warn(
|
||||
f"{self.site_name} 检测到Cloudflare,请更新Cookie和UA")
|
||||
return ""
|
||||
if re.search(r"charset=\"?utf-8\"?", res.text, re.IGNORECASE):
|
||||
res.encoding = "utf-8"
|
||||
else:
|
||||
res.encoding = res.apparent_encoding
|
||||
return res.text
|
||||
|
||||
return ""
|
||||
|
||||
@abstractmethod
|
||||
def _parse_site_page(self, html_text: str):
|
||||
"""
|
||||
解析站点相关信息页面
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
"""
|
||||
解析用户基础信息
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
|
||||
def _parse_logged_in(self, html_text):
|
||||
"""
|
||||
解析用户是否已经登陆
|
||||
:param html_text:
|
||||
:return: True/False
|
||||
"""
|
||||
logged_in = SiteUtils.is_logged_in(html_text)
|
||||
if not logged_in:
|
||||
self.err_msg = "未检测到已登陆,请检查cookies是否过期"
|
||||
logger.warn(f"{self.site_name} 未登录,跳过后续操作")
|
||||
|
||||
return logged_in
|
||||
|
||||
@abstractmethod
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
"""
|
||||
解析用户的上传,下载,分享率等信息
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
解析用户的做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
"""
|
||||
解析用户的详细信息
|
||||
加入时间/等级/魔力值等
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _parse_message_content(self, html_text):
|
||||
"""
|
||||
解析短消息内容
|
||||
:param html_text:
|
||||
:return: head: message, date: time, content: message content
|
||||
"""
|
||||
pass
|
||||
|
||||
def to_dict(self):
|
||||
"""
|
||||
转化为字典
|
||||
"""
|
||||
attributes = [
|
||||
attr for attr in dir(self)
|
||||
if not callable(getattr(self, attr)) and not attr.startswith("_")
|
||||
]
|
||||
return {
|
||||
attr: getattr(self, attr).value
|
||||
if isinstance(getattr(self, attr), SiteSchema)
|
||||
else getattr(self, attr) for attr in attributes
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class DiscuzUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.DiscuzX
|
||||
order = SITE_BASE_ORDER + 10
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return False
|
||||
|
||||
printable_text = html.xpath("string(.)") if html else ""
|
||||
return 'Powered by Discuz!' in printable_text
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
user_info = html.xpath('//a[contains(@href, "&uid=")]')
|
||||
if user_info:
|
||||
user_id_match = re.search(r"&uid=(\d+)", user_info[0].attrib['href'])
|
||||
if user_id_match and user_id_match.group().strip():
|
||||
self.userid = user_id_match.group(1)
|
||||
self._torrent_seeding_page = f"forum.php?&mod=torrents&cat_5up=on"
|
||||
self._user_detail_page = user_info[0].attrib['href']
|
||||
self.username = user_info[0].text.strip()
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
# TODO
|
||||
pass
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
"""
|
||||
解析用户额外信息,加入时间,等级
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
# 用户等级
|
||||
user_levels_text = html.xpath('//a[contains(@href, "usergroup")]/text()')
|
||||
if user_levels_text:
|
||||
self.user_level = user_levels_text[-1].strip()
|
||||
|
||||
# 加入日期
|
||||
join_at_text = html.xpath('//li[em[text()="注册时间"]]/text()')
|
||||
if join_at_text:
|
||||
self.join_at = StringUtils.unify_datetime_str(join_at_text[0].strip())
|
||||
|
||||
# 分享率
|
||||
ratio_text = html.xpath('//li[contains(.//text(), "分享率")]//text()')
|
||||
if ratio_text:
|
||||
ratio_match = re.search(r"\(([\d,.]+)\)", ratio_text[0])
|
||||
if ratio_match and ratio_match.group(1).strip():
|
||||
self.bonus = StringUtils.str_float(ratio_match.group(1))
|
||||
|
||||
# 积分
|
||||
bouns_text = html.xpath('//li[em[text()="积分"]]/text()')
|
||||
if bouns_text:
|
||||
self.bonus = StringUtils.str_float(bouns_text[0].strip())
|
||||
|
||||
# 上传
|
||||
upload_text = html.xpath('//li[em[contains(text(),"上传量")]]/text()')
|
||||
if upload_text:
|
||||
self.upload = StringUtils.num_filesize(upload_text[0].strip().split('/')[-1])
|
||||
|
||||
# 下载
|
||||
download_text = html.xpath('//li[em[contains(text(),"下载量")]]/text()')
|
||||
if download_text:
|
||||
self.download = StringUtils.num_filesize(download_text[0].strip().split('/')[-1])
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
size_col = 3
|
||||
seeders_col = 4
|
||||
# 搜索size列
|
||||
if html.xpath('//tr[position()=1]/td[.//img[@class="size"] and .//img[@alt="size"]]'):
|
||||
size_col = len(html.xpath('//tr[position()=1]/td[.//img[@class="size"] '
|
||||
'and .//img[@alt="size"]]/preceding-sibling::td')) + 1
|
||||
# 搜索seeders列
|
||||
if html.xpath('//tr[position()=1]/td[.//img[@class="seeders"] and .//img[@alt="seeders"]]'):
|
||||
seeders_col = len(html.xpath('//tr[position()=1]/td[.//img[@class="seeders"] '
|
||||
'and .//img[@alt="seeders"]]/preceding-sibling::td')) + 1
|
||||
|
||||
page_seeding = 0
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
seeding_sizes = html.xpath(f'//tr[position()>1]/td[{size_col}]')
|
||||
seeding_seeders = html.xpath(f'//tr[position()>1]/td[{seeders_col}]//text()')
|
||||
if seeding_sizes and seeding_seeders:
|
||||
page_seeding = len(seeding_sizes)
|
||||
|
||||
for i in range(0, len(seeding_sizes)):
|
||||
size = StringUtils.num_filesize(seeding_sizes[i].xpath("string(.)").strip())
|
||||
seeders = StringUtils.str_int(seeding_seeders[i])
|
||||
|
||||
page_seeding_size += size
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
self.seeding += page_seeding
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
next_page_text = html.xpath('//a[contains(.//text(), "下一页") or contains(.//text(), "下一頁")]/@href')
|
||||
if next_page_text:
|
||||
next_page = next_page_text[-1].strip()
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
pass
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
return None, None, None
|
||||
@@ -1,118 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class FileListSiteUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.FileList
|
||||
order = SITE_BASE_ORDER + 50
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return False
|
||||
|
||||
printable_text = html.xpath("string(.)") if html else ""
|
||||
return 'Powered by FileList' in printable_text
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
|
||||
user_detail = re.search(r"userdetails.php\?id=(\d+)", html_text)
|
||||
if user_detail and user_detail.group().strip():
|
||||
self._user_detail_page = user_detail.group().strip().lstrip('/')
|
||||
self.userid = user_detail.group(1)
|
||||
|
||||
self._torrent_seeding_page = f"snatchlist.php?id={self.userid}&action=torrents&type=seeding"
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
ret = html.xpath(f'//a[contains(@href, "userdetails") and contains(@href, "{self.userid}")]//text()')
|
||||
if ret:
|
||||
self.username = str(ret[0])
|
||||
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
"""
|
||||
上传/下载/分享率 [做种数/魔力值]
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
return
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
upload_html = html.xpath('//table//tr/td[text()="Uploaded"]/following-sibling::td//text()')
|
||||
if upload_html:
|
||||
self.upload = StringUtils.num_filesize(upload_html[0])
|
||||
download_html = html.xpath('//table//tr/td[text()="Downloaded"]/following-sibling::td//text()')
|
||||
if download_html:
|
||||
self.download = StringUtils.num_filesize(download_html[0])
|
||||
|
||||
self.ratio = 0 if self.download == 0 else self.upload / self.download
|
||||
|
||||
user_level_html = html.xpath('//table//tr/td[text()="Class"]/following-sibling::td//text()')
|
||||
if user_level_html:
|
||||
self.user_level = user_level_html[0].strip()
|
||||
|
||||
join_at_html = html.xpath('//table//tr/td[contains(text(), "Join")]/following-sibling::td//text()')
|
||||
if join_at_html:
|
||||
self.join_at = StringUtils.unify_datetime_str(join_at_html[0].strip())
|
||||
|
||||
bonus_html = html.xpath('//a[contains(@href, "shop.php")]')
|
||||
if bonus_html:
|
||||
self.bonus = StringUtils.str_float(bonus_html[0].xpath("string(.)").strip())
|
||||
pass
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
size_col = 6
|
||||
seeders_col = 7
|
||||
|
||||
page_seeding = 0
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
seeding_sizes = html.xpath(f'//table/tr[position()>1]/td[{size_col}]')
|
||||
seeding_seeders = html.xpath(f'//table/tr[position()>1]/td[{seeders_col}]')
|
||||
if seeding_sizes and seeding_seeders:
|
||||
page_seeding = len(seeding_sizes)
|
||||
|
||||
for i in range(0, len(seeding_sizes)):
|
||||
size = StringUtils.num_filesize(seeding_sizes[i].xpath("string(.)").strip())
|
||||
seeders = StringUtils.str_int(seeding_seeders[i].xpath("string(.)").strip())
|
||||
|
||||
page_seeding_size += size
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
self.seeding += page_seeding
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
return None, None, None
|
||||
@@ -1,163 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class GazelleSiteUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.Gazelle
|
||||
order = SITE_BASE_ORDER
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return False
|
||||
|
||||
printable_text = html.xpath("string(.)") if html else ""
|
||||
|
||||
return "Powered by Gazelle" in printable_text or "DIC Music" in printable_text
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
tmps = html.xpath('//a[contains(@href, "user.php?id=")]')
|
||||
if tmps:
|
||||
user_id_match = re.search(r"user.php\?id=(\d+)", tmps[0].attrib['href'])
|
||||
if user_id_match and user_id_match.group().strip():
|
||||
self.userid = user_id_match.group(1)
|
||||
self._torrent_seeding_page = f"torrents.php?type=seeding&userid={self.userid}"
|
||||
self._user_detail_page = f"user.php?id={self.userid}"
|
||||
self.username = tmps[0].text.strip()
|
||||
|
||||
tmps = html.xpath('//*[@id="header-uploaded-value"]/@data-value')
|
||||
if tmps:
|
||||
self.upload = StringUtils.num_filesize(tmps[0])
|
||||
else:
|
||||
tmps = html.xpath('//li[@id="stats_seeding"]/span/text()')
|
||||
if tmps:
|
||||
self.upload = StringUtils.num_filesize(tmps[0])
|
||||
|
||||
tmps = html.xpath('//*[@id="header-downloaded-value"]/@data-value')
|
||||
if tmps:
|
||||
self.download = StringUtils.num_filesize(tmps[0])
|
||||
else:
|
||||
tmps = html.xpath('//li[@id="stats_leeching"]/span/text()')
|
||||
if tmps:
|
||||
self.download = StringUtils.num_filesize(tmps[0])
|
||||
|
||||
self.ratio = 0.0 if self.download <= 0.0 else round(self.upload / self.download, 3)
|
||||
|
||||
tmps = html.xpath('//a[contains(@href, "bonus.php")]/@data-tooltip')
|
||||
if tmps:
|
||||
bonus_match = re.search(r"([\d,.]+)", tmps[0])
|
||||
if bonus_match and bonus_match.group(1).strip():
|
||||
self.bonus = StringUtils.str_float(bonus_match.group(1))
|
||||
else:
|
||||
tmps = html.xpath('//a[contains(@href, "bonus.php")]')
|
||||
if tmps:
|
||||
bonus_text = tmps[0].xpath("string(.)")
|
||||
bonus_match = re.search(r"([\d,.]+)", bonus_text)
|
||||
if bonus_match and bonus_match.group(1).strip():
|
||||
self.bonus = StringUtils.str_float(bonus_match.group(1))
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
# TODO
|
||||
pass
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
"""
|
||||
解析用户额外信息,加入时间,等级
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
# 用户等级
|
||||
user_levels_text = html.xpath('//*[@id="class-value"]/@data-value')
|
||||
if user_levels_text:
|
||||
self.user_level = user_levels_text[0].strip()
|
||||
else:
|
||||
user_levels_text = html.xpath('//li[contains(text(), "用户等级")]/text()')
|
||||
if user_levels_text:
|
||||
self.user_level = user_levels_text[0].split(':')[1].strip()
|
||||
|
||||
# 加入日期
|
||||
join_at_text = html.xpath('//*[@id="join-date-value"]/@data-value')
|
||||
if join_at_text:
|
||||
self.join_at = StringUtils.unify_datetime_str(join_at_text[0].strip())
|
||||
else:
|
||||
join_at_text = html.xpath(
|
||||
'//div[contains(@class, "box_userinfo_stats")]//li[contains(text(), "加入时间")]/span/text()')
|
||||
if join_at_text:
|
||||
self.join_at = StringUtils.unify_datetime_str(join_at_text[0].strip())
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
size_col = 3
|
||||
# 搜索size列
|
||||
if html.xpath('//table[contains(@id, "torrent")]//tr[1]/td'):
|
||||
size_col = len(html.xpath('//table[contains(@id, "torrent")]//tr[1]/td')) - 3
|
||||
# 搜索seeders列
|
||||
seeders_col = size_col + 2
|
||||
|
||||
page_seeding = 0
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
seeding_sizes = html.xpath(f'//table[contains(@id, "torrent")]//tr[position()>1]/td[{size_col}]')
|
||||
seeding_seeders = html.xpath(f'//table[contains(@id, "torrent")]//tr[position()>1]/td[{seeders_col}]/text()')
|
||||
if seeding_sizes and seeding_seeders:
|
||||
page_seeding = len(seeding_sizes)
|
||||
|
||||
for i in range(0, len(seeding_sizes)):
|
||||
size = StringUtils.num_filesize(seeding_sizes[i].xpath("string(.)").strip())
|
||||
seeders = int(seeding_seeders[i])
|
||||
|
||||
page_seeding_size += size
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
if multi_page:
|
||||
self.seeding += page_seeding
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
else:
|
||||
if not self.seeding:
|
||||
self.seeding = page_seeding
|
||||
if not self.seeding_size:
|
||||
self.seeding_size = page_seeding_size
|
||||
if not self.seeding_info:
|
||||
self.seeding_info = page_seeding_info
|
||||
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
next_page_text = html.xpath('//a[contains(.//text(), "Next") or contains(.//text(), "下一页")]/@href')
|
||||
if next_page_text:
|
||||
next_page = next_page_text[-1].strip()
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
# TODO
|
||||
pass
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
return None, None, None
|
||||
@@ -1,93 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class IptSiteUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.Ipt
|
||||
order = SITE_BASE_ORDER + 35
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
return 'IPTorrents' in html_text
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
tmps = html.xpath('//a[contains(@href, "/u/")]//text()')
|
||||
tmps_id = html.xpath('//a[contains(@href, "/u/")]/@href')
|
||||
if tmps:
|
||||
self.username = str(tmps[-1])
|
||||
if tmps_id:
|
||||
user_id_match = re.search(r"/u/(\d+)", tmps_id[0])
|
||||
if user_id_match and user_id_match.group().strip():
|
||||
self.userid = user_id_match.group(1)
|
||||
self._user_detail_page = f"user.php?u={self.userid}"
|
||||
self._torrent_seeding_page = f"peers?u={self.userid}"
|
||||
|
||||
tmps = html.xpath('//div[@class = "stats"]/div/div')
|
||||
if tmps:
|
||||
self.upload = StringUtils.num_filesize(str(tmps[0].xpath('span/text()')[1]).strip())
|
||||
self.download = StringUtils.num_filesize(str(tmps[0].xpath('span/text()')[2]).strip())
|
||||
self.seeding = StringUtils.str_int(tmps[0].xpath('a')[2].xpath('text()')[0])
|
||||
self.leeching = StringUtils.str_int(tmps[0].xpath('a')[2].xpath('text()')[1])
|
||||
self.ratio = StringUtils.str_float(str(tmps[0].xpath('span/text()')[0]).strip().replace('-', '0'))
|
||||
self.bonus = StringUtils.str_float(tmps[0].xpath('a')[3].xpath('text()')[0])
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
# TODO
|
||||
pass
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return
|
||||
|
||||
user_levels_text = html.xpath('//tr/th[text()="Class"]/following-sibling::td[1]/text()')
|
||||
if user_levels_text:
|
||||
self.user_level = user_levels_text[0].strip()
|
||||
|
||||
# 加入日期
|
||||
join_at_text = html.xpath('//tr/th[text()="Join date"]/following-sibling::td[1]/text()')
|
||||
if join_at_text:
|
||||
self.join_at = StringUtils.unify_datetime_str(join_at_text[0].split(' (')[0])
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return
|
||||
# seeding start
|
||||
seeding_end_pos = 3
|
||||
if html.xpath('//tr/td[text() = "Leechers"]'):
|
||||
seeding_end_pos = len(html.xpath('//tr/td[text() = "Leechers"]/../preceding-sibling::tr')) + 1
|
||||
seeding_end_pos = seeding_end_pos - 3
|
||||
|
||||
page_seeding = 0
|
||||
page_seeding_size = 0
|
||||
seeding_torrents = html.xpath('//tr/td[text() = "Seeders"]/../following-sibling::tr/td[position()=6]/text()')
|
||||
if seeding_torrents:
|
||||
page_seeding = seeding_end_pos
|
||||
for per_size in seeding_torrents[:seeding_end_pos]:
|
||||
if '(' in per_size and ')' in per_size:
|
||||
per_size = per_size.split('(')[-1]
|
||||
per_size = per_size.split(')')[0]
|
||||
|
||||
page_seeding_size += StringUtils.num_filesize(per_size)
|
||||
|
||||
self.seeding = page_seeding
|
||||
self.seeding_size = page_seeding_size
|
||||
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
# TODO
|
||||
pass
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
return None, None, None
|
||||
@@ -1,61 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import SITE_BASE_ORDER, SiteSchema
|
||||
from app.plugins.sitestatistic.siteuserinfo.nexus_php import NexusPhpSiteUserInfo
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class NexusHhanclubSiteUserInfo(NexusPhpSiteUserInfo):
|
||||
schema = SiteSchema.NexusHhanclub
|
||||
order = SITE_BASE_ORDER + 20
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
return 'hhanclub.top' in html_text
|
||||
|
||||
def _parse_user_traffic_info(self, html_text):
|
||||
super()._parse_user_traffic_info(html_text)
|
||||
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
# 上传、下载、分享率
|
||||
upload_match = re.search(r"[_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+[KMGTPI]*B)",
|
||||
html.xpath('//*[@id="user-info-panel"]/div[2]/div[2]/div[4]/text()')[0])
|
||||
download_match = re.search(r"[_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+[KMGTPI]*B)",
|
||||
html.xpath('//*[@id="user-info-panel"]/div[2]/div[2]/div[5]/text()')[0])
|
||||
ratio_match = re.search(r"分享率][::_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+)",
|
||||
html.xpath('//*[@id="user-info-panel"]/div[2]/div[1]/div[1]/div/text()')[0])
|
||||
|
||||
# 计算分享率
|
||||
self.upload = StringUtils.num_filesize(upload_match.group(1).strip()) if upload_match else 0
|
||||
self.download = StringUtils.num_filesize(download_match.group(1).strip()) if download_match else 0
|
||||
# 优先使用页面上的分享率
|
||||
calc_ratio = 0.0 if self.download <= 0.0 else round(self.upload / self.download, 3)
|
||||
self.ratio = StringUtils.str_float(ratio_match.group(1)) if (
|
||||
ratio_match and ratio_match.group(1).strip()) else calc_ratio
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
"""
|
||||
解析用户额外信息,加入时间,等级
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
super()._parse_user_detail_info(html_text)
|
||||
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return
|
||||
# 加入时间
|
||||
join_at_text = html.xpath('//*[@id="mainContent"]/div/div[2]/div[4]/div[3]/span[2]/text()[1]')
|
||||
if join_at_text:
|
||||
self.join_at = StringUtils.unify_datetime_str(join_at_text[0].split(' (')[0].strip())
|
||||
|
||||
def _get_user_level(self, html):
|
||||
super()._get_user_level(html)
|
||||
user_level_path = html.xpath('//*[@id="mainContent"]/div/div[2]/div[2]/div[4]/span[2]/img/@title')
|
||||
if user_level_path:
|
||||
self.user_level = user_level_path[0]
|
||||
@@ -1,392 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.log import logger
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class NexusPhpSiteUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.NexusPhp
|
||||
order = SITE_BASE_ORDER * 2
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
"""
|
||||
默认使用NexusPhp解析
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
return True
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
|
||||
user_detail = re.search(r"userdetails.php\?id=(\d+)", html_text)
|
||||
if user_detail and user_detail.group().strip():
|
||||
self._user_detail_page = user_detail.group().strip().lstrip('/')
|
||||
self.userid = user_detail.group(1)
|
||||
self._torrent_seeding_page = f"getusertorrentlistajax.php?userid={self.userid}&type=seeding"
|
||||
else:
|
||||
user_detail = re.search(r"(userdetails)", html_text)
|
||||
if user_detail and user_detail.group().strip():
|
||||
self._user_detail_page = user_detail.group().strip().lstrip('/')
|
||||
self.userid = None
|
||||
self._torrent_seeding_page = None
|
||||
|
||||
def _parse_message_unread(self, html_text):
|
||||
"""
|
||||
解析未读短消息数量
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return
|
||||
|
||||
message_labels = html.xpath('//a[@href="messages.php"]/..')
|
||||
message_labels.extend(html.xpath('//a[contains(@href, "messages.php")]/..'))
|
||||
if message_labels:
|
||||
message_text = message_labels[0].xpath("string(.)")
|
||||
|
||||
logger.debug(f"{self.site_name} 消息原始信息 {message_text}")
|
||||
message_unread_match = re.findall(r"[^Date](信息箱\s*|\(|你有\xa0)(\d+)", message_text)
|
||||
|
||||
if message_unread_match and len(message_unread_match[-1]) == 2:
|
||||
self.message_unread = StringUtils.str_int(message_unread_match[-1][1])
|
||||
elif message_text.isdigit():
|
||||
self.message_unread = StringUtils.str_int(message_text)
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
# 合并解析,减少额外请求调用
|
||||
self._parse_user_traffic_info(html_text)
|
||||
self._user_traffic_page = None
|
||||
|
||||
self._parse_message_unread(html_text)
|
||||
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return
|
||||
|
||||
ret = html.xpath(f'//a[contains(@href, "userdetails") and contains(@href, "{self.userid}")]//b//text()')
|
||||
if ret:
|
||||
self.username = str(ret[0])
|
||||
return
|
||||
ret = html.xpath(f'//a[contains(@href, "userdetails") and contains(@href, "{self.userid}")]//text()')
|
||||
if ret:
|
||||
self.username = str(ret[0])
|
||||
|
||||
ret = html.xpath('//a[contains(@href, "userdetails")]//strong//text()')
|
||||
if ret:
|
||||
self.username = str(ret[0])
|
||||
return
|
||||
|
||||
def _parse_user_traffic_info(self, html_text):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
upload_match = re.search(r"[^总]上[传傳]量?[::_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+[KMGTPI]*B)", html_text,
|
||||
re.IGNORECASE)
|
||||
self.upload = StringUtils.num_filesize(upload_match.group(1).strip()) if upload_match else 0
|
||||
download_match = re.search(r"[^总子影力]下[载載]量?[::_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+[KMGTPI]*B)", html_text,
|
||||
re.IGNORECASE)
|
||||
self.download = StringUtils.num_filesize(download_match.group(1).strip()) if download_match else 0
|
||||
ratio_match = re.search(r"分享率[::_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+)", html_text)
|
||||
# 计算分享率
|
||||
calc_ratio = 0.0 if self.download <= 0.0 else round(self.upload / self.download, 3)
|
||||
# 优先使用页面上的分享率
|
||||
self.ratio = StringUtils.str_float(ratio_match.group(1)) if (
|
||||
ratio_match and ratio_match.group(1).strip()) else calc_ratio
|
||||
leeching_match = re.search(r"(Torrents leeching|下载中)[\u4E00-\u9FA5\D\s]+(\d+)[\s\S]+<", html_text)
|
||||
self.leeching = StringUtils.str_int(leeching_match.group(2)) if leeching_match and leeching_match.group(
|
||||
2).strip() else 0
|
||||
html = etree.HTML(html_text)
|
||||
has_ucoin, self.bonus = self._parse_ucoin(html)
|
||||
if has_ucoin:
|
||||
return
|
||||
tmps = html.xpath('//a[contains(@href,"mybonus")]/text()') if html else None
|
||||
if tmps:
|
||||
bonus_text = str(tmps[0]).strip()
|
||||
bonus_match = re.search(r"([\d,.]+)", bonus_text)
|
||||
if bonus_match and bonus_match.group(1).strip():
|
||||
self.bonus = StringUtils.str_float(bonus_match.group(1))
|
||||
return
|
||||
bonus_match = re.search(r"mybonus.[\[\]::<>/a-zA-Z_\-=\"'\s#;.(使用魔力值豆]+\s*([\d,.]+)[<()&\s]", html_text)
|
||||
try:
|
||||
if bonus_match and bonus_match.group(1).strip():
|
||||
self.bonus = StringUtils.str_float(bonus_match.group(1))
|
||||
return
|
||||
bonus_match = re.search(r"[魔力值|\]][\[\]::<>/a-zA-Z_\-=\"'\s#;]+\s*([\d,.]+|\"[\d,.]+\")[<>()&\s]",
|
||||
html_text,
|
||||
flags=re.S)
|
||||
if bonus_match and bonus_match.group(1).strip():
|
||||
self.bonus = StringUtils.str_float(bonus_match.group(1).strip('"'))
|
||||
except Exception as err:
|
||||
logger.error(f"{self.site_name} 解析魔力值出错, 错误信息: {str(err)}")
|
||||
|
||||
@staticmethod
|
||||
def _parse_ucoin(html):
|
||||
"""
|
||||
解析ucoin, 统一转换为铜币
|
||||
:param html:
|
||||
:return:
|
||||
"""
|
||||
if html:
|
||||
gold, silver, copper = None, None, None
|
||||
|
||||
golds = html.xpath('//span[@class = "ucoin-symbol ucoin-gold"]//text()')
|
||||
if golds:
|
||||
gold = StringUtils.str_float(str(golds[-1]))
|
||||
silvers = html.xpath('//span[@class = "ucoin-symbol ucoin-silver"]//text()')
|
||||
if silvers:
|
||||
silver = StringUtils.str_float(str(silvers[-1]))
|
||||
coppers = html.xpath('//span[@class = "ucoin-symbol ucoin-copper"]//text()')
|
||||
if coppers:
|
||||
copper = StringUtils.str_float(str(coppers[-1]))
|
||||
if gold or silver or copper:
|
||||
gold = gold if gold else 0
|
||||
silver = silver if silver else 0
|
||||
copper = copper if copper else 0
|
||||
return True, gold * 100 * 100 + silver * 100 + copper
|
||||
return False, 0.0
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
html = etree.HTML(str(html_text).replace(r'\/', '/'))
|
||||
if not html:
|
||||
return None
|
||||
|
||||
# 首页存在扩展链接,使用扩展链接
|
||||
seeding_url_text = html.xpath('//a[contains(@href,"torrents.php") '
|
||||
'and contains(@href,"seeding")]/@href')
|
||||
if multi_page is False and seeding_url_text and seeding_url_text[0].strip():
|
||||
self._torrent_seeding_page = seeding_url_text[0].strip()
|
||||
return self._torrent_seeding_page
|
||||
|
||||
size_col = 3
|
||||
seeders_col = 4
|
||||
# 搜索size列
|
||||
size_col_xpath = '//tr[position()=1]/' \
|
||||
'td[(img[@class="size"] and img[@alt="size"])' \
|
||||
' or (text() = "大小")' \
|
||||
' or (a/img[@class="size" and @alt="size"])]'
|
||||
if html.xpath(size_col_xpath):
|
||||
size_col = len(html.xpath(f'{size_col_xpath}/preceding-sibling::td')) + 1
|
||||
# 搜索seeders列
|
||||
seeders_col_xpath = '//tr[position()=1]/' \
|
||||
'td[(img[@class="seeders"] and img[@alt="seeders"])' \
|
||||
' or (text() = "在做种")' \
|
||||
' or (a/img[@class="seeders" and @alt="seeders"])]'
|
||||
if html.xpath(seeders_col_xpath):
|
||||
seeders_col = len(html.xpath(f'{seeders_col_xpath}/preceding-sibling::td')) + 1
|
||||
|
||||
page_seeding = 0
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
# 如果 table class="torrents",则增加table[@class="torrents"]
|
||||
table_class = '//table[@class="torrents"]' if html.xpath('//table[@class="torrents"]') else ''
|
||||
seeding_sizes = html.xpath(f'{table_class}//tr[position()>1]/td[{size_col}]')
|
||||
seeding_seeders = html.xpath(f'{table_class}//tr[position()>1]/td[{seeders_col}]/b/a/text()')
|
||||
if not seeding_seeders:
|
||||
seeding_seeders = html.xpath(f'{table_class}//tr[position()>1]/td[{seeders_col}]//text()')
|
||||
if seeding_sizes and seeding_seeders:
|
||||
page_seeding = len(seeding_sizes)
|
||||
|
||||
for i in range(0, len(seeding_sizes)):
|
||||
size = StringUtils.num_filesize(seeding_sizes[i].xpath("string(.)").strip())
|
||||
seeders = StringUtils.str_int(seeding_seeders[i])
|
||||
|
||||
page_seeding_size += size
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
self.seeding += page_seeding
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
next_page_text = html.xpath('//a[contains(.//text(), "下一页") or contains(.//text(), "下一頁")]/@href')
|
||||
if next_page_text:
|
||||
next_page = next_page_text[-1].strip()
|
||||
# fix up page url
|
||||
if self.userid not in next_page:
|
||||
next_page = f'{next_page}&userid={self.userid}&type=seeding'
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
"""
|
||||
解析用户额外信息,加入时间,等级
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return
|
||||
|
||||
self._get_user_level(html)
|
||||
|
||||
self._fixup_traffic_info(html)
|
||||
|
||||
# 加入日期
|
||||
join_at_text = html.xpath(
|
||||
'//tr/td[text()="加入日期" or text()="注册日期" or *[text()="加入日期"]]/following-sibling::td[1]//text()'
|
||||
'|//div/b[text()="加入日期"]/../text()')
|
||||
if join_at_text:
|
||||
self.join_at = StringUtils.unify_datetime_str(join_at_text[0].split(' (')[0].strip())
|
||||
|
||||
# 做种体积 & 做种数
|
||||
# seeding 页面获取不到的话,此处再获取一次
|
||||
seeding_sizes = html.xpath('//tr/td[text()="当前上传"]/following-sibling::td[1]//'
|
||||
'table[tr[1][td[4 and text()="尺寸"]]]//tr[position()>1]/td[4]')
|
||||
seeding_seeders = html.xpath('//tr/td[text()="当前上传"]/following-sibling::td[1]//'
|
||||
'table[tr[1][td[5 and text()="做种者"]]]//tr[position()>1]/td[5]//text()')
|
||||
tmp_seeding = len(seeding_sizes)
|
||||
tmp_seeding_size = 0
|
||||
tmp_seeding_info = []
|
||||
for i in range(0, len(seeding_sizes)):
|
||||
size = StringUtils.num_filesize(seeding_sizes[i].xpath("string(.)").strip())
|
||||
seeders = StringUtils.str_int(seeding_seeders[i])
|
||||
|
||||
tmp_seeding_size += size
|
||||
tmp_seeding_info.append([seeders, size])
|
||||
|
||||
if not self.seeding_size:
|
||||
self.seeding_size = tmp_seeding_size
|
||||
if not self.seeding:
|
||||
self.seeding = tmp_seeding
|
||||
if not self.seeding_info:
|
||||
self.seeding_info = tmp_seeding_info
|
||||
|
||||
seeding_sizes = html.xpath('//tr/td[text()="做种统计"]/following-sibling::td[1]//text()')
|
||||
if seeding_sizes:
|
||||
seeding_match = re.search(r"总做种数:\s+(\d+)", seeding_sizes[0], re.IGNORECASE)
|
||||
seeding_size_match = re.search(r"总做种体积:\s+([\d,.\s]+[KMGTPI]*B)", seeding_sizes[0], re.IGNORECASE)
|
||||
tmp_seeding = StringUtils.str_int(seeding_match.group(1)) if (
|
||||
seeding_match and seeding_match.group(1)) else 0
|
||||
tmp_seeding_size = StringUtils.num_filesize(
|
||||
seeding_size_match.group(1).strip()) if seeding_size_match else 0
|
||||
if not self.seeding_size:
|
||||
self.seeding_size = tmp_seeding_size
|
||||
if not self.seeding:
|
||||
self.seeding = tmp_seeding
|
||||
|
||||
self._fixup_torrent_seeding_page(html)
|
||||
|
||||
def _fixup_torrent_seeding_page(self, html):
|
||||
"""
|
||||
修正种子页面链接
|
||||
:param html:
|
||||
:return:
|
||||
"""
|
||||
# 单独的种子页面
|
||||
seeding_url_text = html.xpath('//a[contains(@href,"getusertorrentlist.php") '
|
||||
'and contains(@href,"seeding")]/@href')
|
||||
if seeding_url_text:
|
||||
self._torrent_seeding_page = seeding_url_text[0].strip()
|
||||
# 从JS调用种获取用户ID
|
||||
seeding_url_text = html.xpath('//a[contains(@href, "javascript: getusertorrentlistajax") '
|
||||
'and contains(@href,"seeding")]/@href')
|
||||
csrf_text = html.xpath('//meta[@name="x-csrf"]/@content')
|
||||
if not self._torrent_seeding_page and seeding_url_text:
|
||||
user_js = re.search(r"javascript: getusertorrentlistajax\(\s*'(\d+)", seeding_url_text[0])
|
||||
if user_js and user_js.group(1).strip():
|
||||
self.userid = user_js.group(1).strip()
|
||||
self._torrent_seeding_page = f"getusertorrentlistajax.php?userid={self.userid}&type=seeding"
|
||||
elif seeding_url_text and csrf_text:
|
||||
if csrf_text[0].strip():
|
||||
self._torrent_seeding_page \
|
||||
= f"ajax_getusertorrentlist.php"
|
||||
self._torrent_seeding_params = {'userid': self.userid, 'type': 'seeding', 'csrf': csrf_text[0].strip()}
|
||||
|
||||
# 分类做种模式
|
||||
# 临时屏蔽
|
||||
# seeding_url_text = html.xpath('//tr/td[text()="当前做种"]/following-sibling::td[1]'
|
||||
# '/table//td/a[contains(@href,"seeding")]/@href')
|
||||
# if seeding_url_text:
|
||||
# self._torrent_seeding_page = seeding_url_text
|
||||
|
||||
def _get_user_level(self, html):
|
||||
# 等级 获取同一行等级数据,图片格式等级,取title信息,否则取文本信息
|
||||
user_levels_text = html.xpath('//tr/td[text()="等級" or text()="等级" or *[text()="等级"]]/'
|
||||
'following-sibling::td[1]/img[1]/@title')
|
||||
if user_levels_text:
|
||||
self.user_level = user_levels_text[0].strip()
|
||||
return
|
||||
|
||||
user_levels_text = html.xpath('//tr/td[text()="等級" or text()="等级"]/'
|
||||
'following-sibling::td[1 and not(img)]'
|
||||
'|//tr/td[text()="等級" or text()="等级"]/'
|
||||
'following-sibling::td[1 and img[not(@title)]]')
|
||||
if user_levels_text:
|
||||
self.user_level = user_levels_text[0].xpath("string(.)").strip()
|
||||
return
|
||||
|
||||
user_levels_text = html.xpath('//tr/td[text()="等級" or text()="等级"]/'
|
||||
'following-sibling::td[1]')
|
||||
if user_levels_text:
|
||||
self.user_level = user_levels_text[0].xpath("string(.)").strip()
|
||||
return
|
||||
|
||||
user_levels_text = html.xpath('//a[contains(@href, "userdetails")]/text()')
|
||||
if not self.user_level and user_levels_text:
|
||||
for user_level_text in user_levels_text:
|
||||
user_level_match = re.search(r"\[(.*)]", user_level_text)
|
||||
if user_level_match and user_level_match.group(1).strip():
|
||||
self.user_level = user_level_match.group(1).strip()
|
||||
break
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
message_links = html.xpath('//tr[not(./td/img[@alt="Read"])]/td/a[contains(@href, "viewmessage")]/@href')
|
||||
msg_links.extend(message_links)
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
next_page_text = html.xpath('//a[contains(.//text(), "下一页") or contains(.//text(), "下一頁")]/@href')
|
||||
if next_page_text:
|
||||
next_page = next_page_text[-1].strip()
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None, None, None
|
||||
# 标题
|
||||
message_head_text = None
|
||||
message_head = html.xpath('//h1/text()'
|
||||
'|//div[@class="layui-card-header"]/span[1]/text()')
|
||||
if message_head:
|
||||
message_head_text = message_head[-1].strip()
|
||||
|
||||
# 消息时间
|
||||
message_date_text = None
|
||||
message_date = html.xpath('//h1/following-sibling::table[.//tr/td[@class="colhead"]]//tr[2]/td[2]'
|
||||
'|//div[@class="layui-card-header"]/span[2]/span[2]')
|
||||
if message_date:
|
||||
message_date_text = message_date[0].xpath("string(.)").strip()
|
||||
|
||||
# 消息内容
|
||||
message_content_text = None
|
||||
message_content = html.xpath('//h1/following-sibling::table[.//tr/td[@class="colhead"]]//tr[3]/td'
|
||||
'|//div[contains(@class,"layui-card-body")]')
|
||||
if message_content:
|
||||
message_content_text = message_content[0].xpath("string(.)").strip()
|
||||
|
||||
return message_head_text, message_date_text, message_content_text
|
||||
|
||||
def _fixup_traffic_info(self, html):
|
||||
# fixup bonus
|
||||
if not self.bonus:
|
||||
bonus_text = html.xpath('//tr/td[text()="魔力值" or text()="猫粮"]/following-sibling::td[1]/text()')
|
||||
if bonus_text:
|
||||
self.bonus = StringUtils.str_float(bonus_text[0].strip())
|
||||
@@ -1,24 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import SITE_BASE_ORDER, SiteSchema
|
||||
from app.plugins.sitestatistic.siteuserinfo.nexus_php import NexusPhpSiteUserInfo
|
||||
|
||||
|
||||
class NexusProjectSiteUserInfo(NexusPhpSiteUserInfo):
|
||||
schema = SiteSchema.NexusProject
|
||||
order = SITE_BASE_ORDER + 25
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
return 'Nexus Project' in html_text
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
|
||||
user_detail = re.search(r"userdetails.php\?id=(\d+)", html_text)
|
||||
if user_detail and user_detail.group().strip():
|
||||
self._user_detail_page = user_detail.group().strip().lstrip('/')
|
||||
self.userid = user_detail.group(1)
|
||||
|
||||
self._torrent_seeding_page = f"viewusertorrents.php?id={self.userid}&show=seeding"
|
||||
@@ -1,57 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import json
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.log import logger
|
||||
from app.plugins.sitestatistic.siteuserinfo import SITE_BASE_ORDER, SiteSchema
|
||||
from app.plugins.sitestatistic.siteuserinfo.nexus_php import NexusPhpSiteUserInfo
|
||||
|
||||
|
||||
class NexusRabbitSiteUserInfo(NexusPhpSiteUserInfo):
|
||||
schema = SiteSchema.NexusRabbit
|
||||
order = SITE_BASE_ORDER + 5
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return False
|
||||
|
||||
printable_text = html.xpath("string(.)") if html else ""
|
||||
return 'Style by Rabbit' in printable_text
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
super()._parse_site_page(html_text)
|
||||
self._torrent_seeding_page = f"getusertorrentlistajax.php?page=1&limit=5000000&type=seeding&uid={self.userid}"
|
||||
self._torrent_seeding_headers = {"Accept": "application/json, text/javascript, */*; q=0.01"}
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
|
||||
try:
|
||||
torrents = json.loads(html_text).get('data')
|
||||
except Exception as e:
|
||||
logger.error(f"解析做种信息失败: {str(e)}")
|
||||
return
|
||||
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
|
||||
page_seeding = len(torrents)
|
||||
for torrent in torrents:
|
||||
seeders = int(torrent.get('seeders', 0))
|
||||
size = int(torrent.get('size', 0))
|
||||
page_seeding_size += int(torrent.get('size', 0))
|
||||
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
self.seeding += page_seeding
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
@@ -1,110 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class SmallHorseSiteUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.SmallHorse
|
||||
order = SITE_BASE_ORDER + 30
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
return 'Small Horse' in html_text
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
|
||||
user_detail = re.search(r"user.php\?id=(\d+)", html_text)
|
||||
if user_detail and user_detail.group().strip():
|
||||
self._user_detail_page = user_detail.group().strip().lstrip('/')
|
||||
self.userid = user_detail.group(1)
|
||||
self._torrent_seeding_page = f"torrents.php?type=seeding&userid={self.userid}"
|
||||
self._user_traffic_page = f"user.php?id={self.userid}"
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
ret = html.xpath('//a[contains(@href, "user.php")]//text()')
|
||||
if ret:
|
||||
self.username = str(ret[0])
|
||||
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
"""
|
||||
上传/下载/分享率 [做种数/魔力值]
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
tmps = html.xpath('//ul[@class = "stats nobullet"]')
|
||||
if tmps:
|
||||
if tmps[1].xpath("li") and tmps[1].xpath("li")[0].xpath("span//text()"):
|
||||
self.join_at = StringUtils.unify_datetime_str(tmps[1].xpath("li")[0].xpath("span//text()")[0])
|
||||
self.upload = StringUtils.num_filesize(str(tmps[1].xpath("li")[2].xpath("text()")[0]).split(":")[1].strip())
|
||||
self.download = StringUtils.num_filesize(
|
||||
str(tmps[1].xpath("li")[3].xpath("text()")[0]).split(":")[1].strip())
|
||||
if tmps[1].xpath("li")[4].xpath("span//text()"):
|
||||
self.ratio = StringUtils.str_float(str(tmps[1].xpath("li")[4].xpath("span//text()")[0]).replace('∞', '0'))
|
||||
else:
|
||||
self.ratio = StringUtils.str_float(str(tmps[1].xpath("li")[5].xpath("text()")[0]).split(":")[1])
|
||||
self.bonus = StringUtils.str_float(str(tmps[1].xpath("li")[5].xpath("text()")[0]).split(":")[1])
|
||||
self.user_level = str(tmps[3].xpath("li")[0].xpath("text()")[0]).split(":")[1].strip()
|
||||
self.leeching = StringUtils.str_int(
|
||||
(tmps[4].xpath("li")[6].xpath("text()")[0]).split(":")[1].replace("[", ""))
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
pass
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
size_col = 6
|
||||
seeders_col = 8
|
||||
|
||||
page_seeding = 0
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
seeding_sizes = html.xpath(f'//table[@id="torrent_table"]//tr[position()>1]/td[{size_col}]')
|
||||
seeding_seeders = html.xpath(f'//table[@id="torrent_table"]//tr[position()>1]/td[{seeders_col}]')
|
||||
if seeding_sizes and seeding_seeders:
|
||||
page_seeding = len(seeding_sizes)
|
||||
|
||||
for i in range(0, len(seeding_sizes)):
|
||||
size = StringUtils.num_filesize(seeding_sizes[i].xpath("string(.)").strip())
|
||||
seeders = StringUtils.str_int(seeding_seeders[i].xpath("string(.)").strip())
|
||||
|
||||
page_seeding_size += size
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
self.seeding += page_seeding
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
next_pages = html.xpath('//ul[@class="pagination"]/li[contains(@class,"active")]/following-sibling::li')
|
||||
if next_pages and len(next_pages) > 1:
|
||||
page_num = next_pages[0].xpath("string(.)").strip()
|
||||
if page_num.isdigit():
|
||||
next_page = f"{self._torrent_seeding_page}&page={page_num}"
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
return None, None, None
|
||||
@@ -1,103 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import json
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class TNodeSiteUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.TNode
|
||||
order = SITE_BASE_ORDER + 60
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
return 'Powered By TNode' in html_text
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
|
||||
# <meta name="x-csrf-token" content="fd169876a7b4846f3a7a16fcd5cccf8d">
|
||||
csrf_token = re.search(r'<meta name="x-csrf-token" content="(.+?)">', html_text)
|
||||
if csrf_token:
|
||||
self._addition_headers = {'X-CSRF-TOKEN': csrf_token.group(1)}
|
||||
self._user_detail_page = "api/user/getMainInfo"
|
||||
self._torrent_seeding_page = "api/user/listTorrentActivity?id=&type=seeding&page=1&size=20000"
|
||||
|
||||
def _parse_logged_in(self, html_text):
|
||||
"""
|
||||
判断是否登录成功, 通过判断是否存在用户信息
|
||||
暂时跳过检测,待后续优化
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
return True
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
self.username = self.userid
|
||||
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
pass
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
detail = json.loads(html_text)
|
||||
if detail.get("status") != 200:
|
||||
return
|
||||
|
||||
user_info = detail.get("data", {})
|
||||
self.userid = user_info.get("id")
|
||||
self.username = user_info.get("username")
|
||||
self.user_level = user_info.get("class", {}).get("name")
|
||||
self.join_at = user_info.get("regTime", 0)
|
||||
self.join_at = StringUtils.unify_datetime_str(str(self.join_at))
|
||||
|
||||
self.upload = user_info.get("upload")
|
||||
self.download = user_info.get("download")
|
||||
self.ratio = 0 if self.download <= 0 else round(self.upload / self.download, 3)
|
||||
self.bonus = user_info.get("bonus")
|
||||
|
||||
self.message_unread = user_info.get("unreadAdmin", 0) + user_info.get("unreadInbox", 0) + user_info.get(
|
||||
"unreadSystem", 0)
|
||||
pass
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
解析用户做种信息
|
||||
"""
|
||||
seeding_info = json.loads(html_text)
|
||||
if seeding_info.get("status") != 200:
|
||||
return
|
||||
|
||||
torrents = seeding_info.get("data", {}).get("torrents", [])
|
||||
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
for torrent in torrents:
|
||||
size = torrent.get("size", 0)
|
||||
seeders = torrent.get("seeding", 0)
|
||||
|
||||
page_seeding_size += size
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
self.seeding += len(torrents)
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
"""
|
||||
系统信息 api/message/listSystem?page=1&size=20
|
||||
收件箱信息 api/message/listInbox?page=1&size=20
|
||||
管理员信息 api/message/listAdmin?page=1&size=20
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
return None, None, None
|
||||
@@ -1,109 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class TorrentLeechSiteUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.TorrentLeech
|
||||
order = SITE_BASE_ORDER + 40
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
return 'TorrentLeech' in html_text
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
|
||||
user_detail = re.search(r"/profile/([^/]+)/", html_text)
|
||||
if user_detail and user_detail.group().strip():
|
||||
self._user_detail_page = user_detail.group().strip().lstrip('/')
|
||||
self.userid = user_detail.group(1)
|
||||
self._user_traffic_page = f"profile/{self.userid}/view"
|
||||
self._torrent_seeding_page = f"profile/{self.userid}/seeding"
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
self.username = self.userid
|
||||
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
"""
|
||||
上传/下载/分享率 [做种数/魔力值]
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
upload_html = html.xpath('//div[contains(@class,"profile-uploaded")]//span/text()')
|
||||
if upload_html:
|
||||
self.upload = StringUtils.num_filesize(upload_html[0])
|
||||
download_html = html.xpath('//div[contains(@class,"profile-downloaded")]//span/text()')
|
||||
if download_html:
|
||||
self.download = StringUtils.num_filesize(download_html[0])
|
||||
ratio_html = html.xpath('//div[contains(@class,"profile-ratio")]//span/text()')
|
||||
if ratio_html:
|
||||
self.ratio = StringUtils.str_float(ratio_html[0].replace('∞', '0'))
|
||||
|
||||
user_level_html = html.xpath('//table[contains(@class, "profileViewTable")]'
|
||||
'//tr/td[text()="Class"]/following-sibling::td/text()')
|
||||
if user_level_html:
|
||||
self.user_level = user_level_html[0].strip()
|
||||
|
||||
join_at_html = html.xpath('//table[contains(@class, "profileViewTable")]'
|
||||
'//tr/td[text()="Registration date"]/following-sibling::td/text()')
|
||||
if join_at_html:
|
||||
self.join_at = StringUtils.unify_datetime_str(join_at_html[0].strip())
|
||||
|
||||
bonus_html = html.xpath('//span[contains(@class, "total-TL-points")]/text()')
|
||||
if bonus_html:
|
||||
self.bonus = StringUtils.str_float(bonus_html[0].strip())
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
pass
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
size_col = 2
|
||||
seeders_col = 7
|
||||
|
||||
page_seeding = 0
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
seeding_sizes = html.xpath(f'//tbody/tr/td[{size_col}]')
|
||||
seeding_seeders = html.xpath(f'//tbody/tr/td[{seeders_col}]/text()')
|
||||
if seeding_sizes and seeding_seeders:
|
||||
page_seeding = len(seeding_sizes)
|
||||
|
||||
for i in range(0, len(seeding_sizes)):
|
||||
size = StringUtils.num_filesize(seeding_sizes[i].xpath("string(.)").strip())
|
||||
seeders = StringUtils.str_int(seeding_seeders[i])
|
||||
|
||||
page_seeding_size += size
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
self.seeding += page_seeding
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
return None, None, None
|
||||
@@ -1,130 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from app.plugins.sitestatistic.siteuserinfo import ISiteUserInfo, SITE_BASE_ORDER, SiteSchema
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class Unit3dSiteUserInfo(ISiteUserInfo):
|
||||
schema = SiteSchema.Unit3d
|
||||
order = SITE_BASE_ORDER + 15
|
||||
|
||||
@classmethod
|
||||
def match(cls, html_text: str) -> bool:
|
||||
return "unit3d.js" in html_text
|
||||
|
||||
def _parse_user_base_info(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
html = etree.HTML(html_text)
|
||||
|
||||
tmps = html.xpath('//a[contains(@href, "/users/") and contains(@href, "settings")]/@href')
|
||||
if tmps:
|
||||
user_name_match = re.search(r"/users/(.+)/settings", tmps[0])
|
||||
if user_name_match and user_name_match.group().strip():
|
||||
self.username = user_name_match.group(1)
|
||||
self._torrent_seeding_page = f"/users/{self.username}/active?perPage=100&client=&seeding=include"
|
||||
self._user_detail_page = f"/users/{self.username}"
|
||||
|
||||
tmps = html.xpath('//a[contains(@href, "bonus/earnings")]')
|
||||
if tmps:
|
||||
bonus_text = tmps[0].xpath("string(.)")
|
||||
bonus_match = re.search(r"([\d,.]+)", bonus_text)
|
||||
if bonus_match and bonus_match.group(1).strip():
|
||||
self.bonus = StringUtils.str_float(bonus_match.group(1))
|
||||
|
||||
def _parse_site_page(self, html_text: str):
|
||||
# TODO
|
||||
pass
|
||||
|
||||
def _parse_user_detail_info(self, html_text: str):
|
||||
"""
|
||||
解析用户额外信息,加入时间,等级
|
||||
:param html_text:
|
||||
:return:
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
# 用户等级
|
||||
user_levels_text = html.xpath('//div[contains(@class, "content")]//span[contains(@class, "badge-user")]/text()')
|
||||
if user_levels_text:
|
||||
self.user_level = user_levels_text[0].strip()
|
||||
|
||||
# 加入日期
|
||||
join_at_text = html.xpath('//div[contains(@class, "content")]//h4[contains(text(), "注册日期") '
|
||||
'or contains(text(), "註冊日期") '
|
||||
'or contains(text(), "Registration date")]/text()')
|
||||
if join_at_text:
|
||||
self.join_at = StringUtils.unify_datetime_str(
|
||||
join_at_text[0].replace('注册日期', '').replace('註冊日期', '').replace('Registration date', ''))
|
||||
|
||||
def _parse_user_torrent_seeding_info(self, html_text: str, multi_page: bool = False) -> Optional[str]:
|
||||
"""
|
||||
做种相关信息
|
||||
:param html_text:
|
||||
:param multi_page: 是否多页数据
|
||||
:return: 下页地址
|
||||
"""
|
||||
html = etree.HTML(html_text)
|
||||
if not html:
|
||||
return None
|
||||
|
||||
size_col = 9
|
||||
seeders_col = 2
|
||||
# 搜索size列
|
||||
if html.xpath('//thead//th[contains(@class,"size")]'):
|
||||
size_col = len(html.xpath('//thead//th[contains(@class,"size")][1]/preceding-sibling::th')) + 1
|
||||
# 搜索seeders列
|
||||
if html.xpath('//thead//th[contains(@class,"seeders")]'):
|
||||
seeders_col = len(html.xpath('//thead//th[contains(@class,"seeders")]/preceding-sibling::th')) + 1
|
||||
|
||||
page_seeding = 0
|
||||
page_seeding_size = 0
|
||||
page_seeding_info = []
|
||||
seeding_sizes = html.xpath(f'//tr[position()]/td[{size_col}]')
|
||||
seeding_seeders = html.xpath(f'//tr[position()]/td[{seeders_col}]')
|
||||
if seeding_sizes and seeding_seeders:
|
||||
page_seeding = len(seeding_sizes)
|
||||
|
||||
for i in range(0, len(seeding_sizes)):
|
||||
size = StringUtils.num_filesize(seeding_sizes[i].xpath("string(.)").strip())
|
||||
seeders = StringUtils.str_int(seeding_seeders[i].xpath("string(.)").strip())
|
||||
|
||||
page_seeding_size += size
|
||||
page_seeding_info.append([seeders, size])
|
||||
|
||||
self.seeding += page_seeding
|
||||
self.seeding_size += page_seeding_size
|
||||
self.seeding_info.extend(page_seeding_info)
|
||||
|
||||
# 是否存在下页数据
|
||||
next_page = None
|
||||
next_pages = html.xpath('//ul[@class="pagination"]/li[contains(@class,"active")]/following-sibling::li')
|
||||
if next_pages and len(next_pages) > 1:
|
||||
page_num = next_pages[0].xpath("string(.)").strip()
|
||||
if page_num.isdigit():
|
||||
next_page = f"{self._torrent_seeding_page}&page={page_num}"
|
||||
|
||||
return next_page
|
||||
|
||||
def _parse_user_traffic_info(self, html_text: str):
|
||||
html_text = self._prepare_html_text(html_text)
|
||||
upload_match = re.search(r"[^总]上[传傳]量?[::_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+[KMGTPI]*B)", html_text,
|
||||
re.IGNORECASE)
|
||||
self.upload = StringUtils.num_filesize(upload_match.group(1).strip()) if upload_match else 0
|
||||
download_match = re.search(r"[^总子影力]下[载載]量?[::_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+[KMGTPI]*B)", html_text,
|
||||
re.IGNORECASE)
|
||||
self.download = StringUtils.num_filesize(download_match.group(1).strip()) if download_match else 0
|
||||
ratio_match = re.search(r"分享率[::_<>/a-zA-Z-=\"'\s#;]+([\d,.\s]+)", html_text)
|
||||
self.ratio = StringUtils.str_float(ratio_match.group(1)) if (
|
||||
ratio_match and ratio_match.group(1).strip()) else 0.0
|
||||
|
||||
def _parse_message_unread_links(self, html_text: str, msg_links: list) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def _parse_message_content(self, html_text):
|
||||
return None, None, None
|
||||
@@ -1,629 +0,0 @@
|
||||
import ipaddress
|
||||
from typing import List, Tuple, Dict, Any
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.modules.emby import Emby
|
||||
from app.modules.jellyfin import Jellyfin
|
||||
from app.modules.plex import Plex
|
||||
from app.modules.qbittorrent import Qbittorrent
|
||||
from app.modules.transmission import Transmission
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import NotificationType, WebhookEventInfo
|
||||
from app.schemas.types import EventType
|
||||
from app.utils.ip import IpUtils
|
||||
|
||||
|
||||
class SpeedLimiter(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "播放限速"
|
||||
# 插件描述
|
||||
plugin_desc = "外网播放媒体库视频时,自动对下载器进行限速。"
|
||||
# 插件图标
|
||||
plugin_icon = "SpeedLimiter.jpg"
|
||||
# 主题色
|
||||
plugin_color = "#183883"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "Shurelol"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/Shurelol"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "speedlimit_"
|
||||
# 加载顺序
|
||||
plugin_order = 11
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_scheduler = None
|
||||
_qb = None
|
||||
_tr = None
|
||||
_enabled: bool = False
|
||||
_notify: bool = False
|
||||
_interval: int = 60
|
||||
_downloader: list = []
|
||||
_play_up_speed: float = 0
|
||||
_play_down_speed: float = 0
|
||||
_noplay_up_speed: float = 0
|
||||
_noplay_down_speed: float = 0
|
||||
_bandwidth: float = 0
|
||||
_allocation_ratio: str = ""
|
||||
_auto_limit: bool = False
|
||||
_limit_enabled: bool = False
|
||||
# 不限速地址
|
||||
_unlimited_ips = {}
|
||||
# 当前限速状态
|
||||
_current_state = ""
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._notify = config.get("notify")
|
||||
self._play_up_speed = float(config.get("play_up_speed")) if config.get("play_up_speed") else 0
|
||||
self._play_down_speed = float(config.get("play_down_speed")) if config.get("play_down_speed") else 0
|
||||
self._noplay_up_speed = float(config.get("noplay_up_speed")) if config.get("noplay_up_speed") else 0
|
||||
self._noplay_down_speed = float(config.get("noplay_down_speed")) if config.get("noplay_down_speed") else 0
|
||||
self._current_state = f"U:{self._noplay_up_speed},D:{self._noplay_down_speed}"
|
||||
try:
|
||||
# 总带宽
|
||||
self._bandwidth = int(float(config.get("bandwidth") or 0)) * 1000000
|
||||
# 自动限速开关
|
||||
if self._bandwidth > 0:
|
||||
self._auto_limit = True
|
||||
else:
|
||||
self._auto_limit = False
|
||||
except Exception as e:
|
||||
logger.error(f"智能限速上行带宽设置错误:{str(e)}")
|
||||
self._bandwidth = 0
|
||||
|
||||
# 限速服务开关
|
||||
self._limit_enabled = True if (self._play_up_speed
|
||||
or self._play_down_speed
|
||||
or self._auto_limit) else False
|
||||
self._allocation_ratio = config.get("allocation_ratio") or ""
|
||||
# 不限速地址
|
||||
self._unlimited_ips["ipv4"] = config.get("ipv4") or ""
|
||||
self._unlimited_ips["ipv6"] = config.get("ipv6") or ""
|
||||
|
||||
self._downloader = config.get("downloader") or []
|
||||
if self._downloader:
|
||||
if 'qbittorrent' in self._downloader:
|
||||
self._qb = Qbittorrent()
|
||||
if 'transmission' in self._downloader:
|
||||
self._tr = Transmission()
|
||||
|
||||
# 移出现有任务
|
||||
self.stop_service()
|
||||
|
||||
# 启动限速任务
|
||||
if self._enabled and self._limit_enabled:
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
self._scheduler.add_job(func=self.check_playing_sessions,
|
||||
trigger='interval',
|
||||
seconds=self._interval,
|
||||
name="播放限速检查")
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
logger.info("播放限速检查服务启动")
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '发送通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'model': 'downloader',
|
||||
'label': '下载器',
|
||||
'items': [
|
||||
{'title': 'Qbittorrent', 'value': 'qbittorrent'},
|
||||
{'title': 'Transmission', 'value': 'transmission'},
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'play_up_speed',
|
||||
'label': '播放限速(上传)',
|
||||
'placeholder': 'KB/s'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'play_down_speed',
|
||||
'label': '播放限速(下载)',
|
||||
'placeholder': 'KB/s'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'noplay_up_speed',
|
||||
'label': '未播放限速(上传)',
|
||||
'placeholder': 'KB/s'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'noplay_down_speed',
|
||||
'label': '未播放限速(下载)',
|
||||
'placeholder': 'KB/s'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'bandwidth',
|
||||
'label': '智能限速上行带宽',
|
||||
'placeholder': 'Mbps'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'allocation_ratio',
|
||||
'label': '智能限速分配比例',
|
||||
'items': [
|
||||
{'title': '平均', 'value': ''},
|
||||
{'title': '1:9', 'value': '1:9'},
|
||||
{'title': '2:8', 'value': '2:8'},
|
||||
{'title': '3:7', 'value': '3:7'},
|
||||
{'title': '4:6', 'value': '4:6'},
|
||||
{'title': '6:4', 'value': '6:4'},
|
||||
{'title': '7:3', 'value': '7:3'},
|
||||
{'title': '8:2', 'value': '8:2'},
|
||||
{'title': '9:1', 'value': '9:1'},
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'ipv4',
|
||||
'label': '不限速地址范围(ipv4)',
|
||||
'placeholder': '留空默认不限速内网ipv4'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'ipv6',
|
||||
'label': '不限速地址范围(ipv6)',
|
||||
'placeholder': '留空默认不限速内网ipv6'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"notify": True,
|
||||
"downloader": [],
|
||||
"play_up_speed": None,
|
||||
"play_down_speed": None,
|
||||
"noplay_up_speed": None,
|
||||
"noplay_down_speed": None,
|
||||
"bandwidth": None,
|
||||
"allocation_ratio": "",
|
||||
"ipv4": "",
|
||||
"ipv6": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
@eventmanager.register(EventType.WebhookMessage)
|
||||
def check_playing_sessions(self, event: Event = None):
|
||||
"""
|
||||
检查播放会话
|
||||
"""
|
||||
if not self._qb and not self._tr:
|
||||
return
|
||||
if not self._enabled:
|
||||
return
|
||||
if event:
|
||||
event_data: WebhookEventInfo = event.event_data
|
||||
if event_data.event not in [
|
||||
"playback.start",
|
||||
"PlaybackStart",
|
||||
"media.play",
|
||||
"media.stop",
|
||||
"PlaybackStop",
|
||||
"playback.stop"
|
||||
]:
|
||||
return
|
||||
# 当前播放的总比特率
|
||||
total_bit_rate = 0
|
||||
# 媒体服务器类型,多个以,分隔
|
||||
if not settings.MEDIASERVER:
|
||||
return
|
||||
media_servers = settings.MEDIASERVER.split(',')
|
||||
# 查询所有媒体服务器状态
|
||||
for media_server in media_servers:
|
||||
# 查询播放中会话
|
||||
playing_sessions = []
|
||||
if media_server == "emby":
|
||||
req_url = "[HOST]emby/Sessions?api_key=[APIKEY]"
|
||||
try:
|
||||
res = Emby().get_data(req_url)
|
||||
if res and res.status_code == 200:
|
||||
sessions = res.json()
|
||||
for session in sessions:
|
||||
if session.get("NowPlayingItem") and not session.get("PlayState", {}).get("IsPaused"):
|
||||
playing_sessions.append(session)
|
||||
except Exception as e:
|
||||
logger.error(f"获取Emby播放会话失败:{str(e)}")
|
||||
continue
|
||||
# 计算有效比特率
|
||||
for session in playing_sessions:
|
||||
# 设置了不限速范围则判断session ip是否在不限速范围内
|
||||
if self._unlimited_ips["ipv4"] or self._unlimited_ips["ipv6"]:
|
||||
if not self.__allow_access(self._unlimited_ips, session.get("RemoteEndPoint")) \
|
||||
and session.get("NowPlayingItem", {}).get("MediaType") == "Video":
|
||||
total_bit_rate += int(session.get("NowPlayingItem", {}).get("Bitrate") or 0)
|
||||
# 未设置不限速范围,则默认不限速内网ip
|
||||
elif not IpUtils.is_private_ip(session.get("RemoteEndPoint")) \
|
||||
and session.get("NowPlayingItem", {}).get("MediaType") == "Video":
|
||||
total_bit_rate += int(session.get("NowPlayingItem", {}).get("Bitrate") or 0)
|
||||
elif media_server == "jellyfin":
|
||||
req_url = "[HOST]Sessions?api_key=[APIKEY]"
|
||||
try:
|
||||
res = Jellyfin().get_data(req_url)
|
||||
if res and res.status_code == 200:
|
||||
sessions = res.json()
|
||||
for session in sessions:
|
||||
if session.get("NowPlayingItem") and not session.get("PlayState", {}).get("IsPaused"):
|
||||
playing_sessions.append(session)
|
||||
except Exception as e:
|
||||
logger.error(f"获取Jellyfin播放会话失败:{str(e)}")
|
||||
continue
|
||||
# 计算有效比特率
|
||||
for session in playing_sessions:
|
||||
# 设置了不限速范围则判断session ip是否在不限速范围内
|
||||
if self._unlimited_ips["ipv4"] or self._unlimited_ips["ipv6"]:
|
||||
if not self.__allow_access(self._unlimited_ips, session.get("RemoteEndPoint")) \
|
||||
and session.get("NowPlayingItem", {}).get("MediaType") == "Video":
|
||||
media_streams = session.get("NowPlayingItem", {}).get("MediaStreams") or []
|
||||
for media_stream in media_streams:
|
||||
total_bit_rate += int(media_stream.get("BitRate") or 0)
|
||||
# 未设置不限速范围,则默认不限速内网ip
|
||||
elif not IpUtils.is_private_ip(session.get("RemoteEndPoint")) \
|
||||
and session.get("NowPlayingItem", {}).get("MediaType") == "Video":
|
||||
media_streams = session.get("NowPlayingItem", {}).get("MediaStreams") or []
|
||||
for media_stream in media_streams:
|
||||
total_bit_rate += int(media_stream.get("BitRate") or 0)
|
||||
elif media_server == "plex":
|
||||
_plex = Plex().get_plex()
|
||||
if _plex:
|
||||
sessions = _plex.sessions()
|
||||
for session in sessions:
|
||||
bitrate = sum([m.bitrate or 0 for m in session.media])
|
||||
playing_sessions.append({
|
||||
"type": session.TAG,
|
||||
"bitrate": bitrate,
|
||||
"address": session.player.address
|
||||
})
|
||||
# 计算有效比特率
|
||||
for session in playing_sessions:
|
||||
# 设置了不限速范围则判断session ip是否在不限速范围内
|
||||
if self._unlimited_ips["ipv4"] or self._unlimited_ips["ipv6"]:
|
||||
if not self.__allow_access(self._unlimited_ips, session.get("address")) \
|
||||
and session.get("type") == "Video":
|
||||
total_bit_rate += int(session.get("bitrate") or 0)
|
||||
# 未设置不限速范围,则默认不限速内网ip
|
||||
elif not IpUtils.is_private_ip(session.get("address")) \
|
||||
and session.get("type") == "Video":
|
||||
total_bit_rate += int(session.get("bitrate") or 0)
|
||||
|
||||
if total_bit_rate:
|
||||
# 开启智能限速计算上传限速
|
||||
if self._auto_limit:
|
||||
play_up_speed = self.__calc_limit(total_bit_rate)
|
||||
else:
|
||||
play_up_speed = self._play_up_speed
|
||||
|
||||
# 当前正在播放,开始限速
|
||||
self.__set_limiter(limit_type="播放", upload_limit=play_up_speed,
|
||||
download_limit=self._play_down_speed)
|
||||
else:
|
||||
# 当前没有播放,取消限速
|
||||
self.__set_limiter(limit_type="未播放", upload_limit=self._noplay_up_speed,
|
||||
download_limit=self._noplay_down_speed)
|
||||
|
||||
def __calc_limit(self, total_bit_rate: float) -> float:
|
||||
"""
|
||||
计算智能上传限速
|
||||
"""
|
||||
if not self._bandwidth:
|
||||
return 10
|
||||
return round((self._bandwidth - total_bit_rate) / 8 / 1024, 2)
|
||||
|
||||
def __set_limiter(self, limit_type: str, upload_limit: float, download_limit: float):
|
||||
"""
|
||||
设置限速
|
||||
"""
|
||||
if not self._qb and not self._tr:
|
||||
return
|
||||
state = f"U:{upload_limit},D:{download_limit}"
|
||||
if self._current_state == state:
|
||||
# 限速状态没有改变
|
||||
return
|
||||
else:
|
||||
self._current_state = state
|
||||
|
||||
try:
|
||||
cnt = 0
|
||||
for download in self._downloader:
|
||||
if self._auto_limit and limit_type == "播放":
|
||||
# 开启了播放智能限速
|
||||
if len(self._downloader) == 1:
|
||||
# 只有一个下载器
|
||||
upload_limit = int(upload_limit)
|
||||
else:
|
||||
# 多个下载器
|
||||
if not self._allocation_ratio:
|
||||
# 平均
|
||||
upload_limit = int(upload_limit / len(self._downloader))
|
||||
else:
|
||||
# 按比例
|
||||
allocation_count = sum([int(i) for i in self._allocation_ratio.split(":")])
|
||||
upload_limit = int(upload_limit * int(self._allocation_ratio.split(":")[cnt]) / allocation_count)
|
||||
cnt += 1
|
||||
if upload_limit:
|
||||
text = f"上传:{upload_limit} KB/s"
|
||||
else:
|
||||
text = f"上传:未限速"
|
||||
if download_limit:
|
||||
text = f"{text}\n下载:{download_limit} KB/s"
|
||||
else:
|
||||
text = f"{text}\n下载:未限速"
|
||||
if str(download) == 'qbittorrent':
|
||||
if self._qb:
|
||||
self._qb.set_speed_limit(download_limit=download_limit, upload_limit=upload_limit)
|
||||
# 发送通知
|
||||
if self._notify:
|
||||
title = "【播放限速】"
|
||||
if upload_limit or download_limit:
|
||||
subtitle = f"Qbittorrent 开始{limit_type}限速"
|
||||
self.post_message(
|
||||
mtype=NotificationType.MediaServer,
|
||||
title=title,
|
||||
text=f"{subtitle}\n{text}"
|
||||
)
|
||||
else:
|
||||
self.post_message(
|
||||
mtype=NotificationType.MediaServer,
|
||||
title=title,
|
||||
text=f"Qbittorrent 已取消限速"
|
||||
)
|
||||
else:
|
||||
if self._tr:
|
||||
self._tr.set_speed_limit(download_limit=download_limit, upload_limit=upload_limit)
|
||||
# 发送通知
|
||||
if self._notify:
|
||||
title = "【播放限速】"
|
||||
if upload_limit or download_limit:
|
||||
subtitle = f"Transmission 开始{limit_type}限速"
|
||||
self.post_message(
|
||||
mtype=NotificationType.MediaServer,
|
||||
title=title,
|
||||
text=f"{subtitle}\n{text}"
|
||||
)
|
||||
else:
|
||||
self.post_message(
|
||||
mtype=NotificationType.MediaServer,
|
||||
title=title,
|
||||
text=f"Transmission 已取消限速"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"设置限速失败:{str(e)}")
|
||||
|
||||
@staticmethod
|
||||
def __allow_access(allow_ips: dict, ip: str) -> bool:
|
||||
"""
|
||||
判断IP是否合法
|
||||
:param allow_ips: 充许的IP范围 {"ipv4":, "ipv6":}
|
||||
:param ip: 需要检查的ip
|
||||
"""
|
||||
if not allow_ips:
|
||||
return True
|
||||
try:
|
||||
ipaddr = ipaddress.ip_address(ip)
|
||||
if ipaddr.version == 4:
|
||||
if not allow_ips.get('ipv4'):
|
||||
return True
|
||||
allow_ipv4s = allow_ips.get('ipv4').split(",")
|
||||
for allow_ipv4 in allow_ipv4s:
|
||||
if ipaddr in ipaddress.ip_network(allow_ipv4, strict=False):
|
||||
return True
|
||||
elif ipaddr.ipv4_mapped:
|
||||
if not allow_ips.get('ipv4'):
|
||||
return True
|
||||
allow_ipv4s = allow_ips.get('ipv4').split(",")
|
||||
for allow_ipv4 in allow_ipv4s:
|
||||
if ipaddr.ipv4_mapped in ipaddress.ip_network(allow_ipv4, strict=False):
|
||||
return True
|
||||
else:
|
||||
if not allow_ips.get('ipv6'):
|
||||
return True
|
||||
allow_ipv6s = allow_ips.get('ipv6').split(",")
|
||||
for allow_ipv6 in allow_ipv6s:
|
||||
if ipaddr in ipaddress.ip_network(allow_ipv6, strict=False):
|
||||
return True
|
||||
except Exception as err:
|
||||
print(str(err))
|
||||
return False
|
||||
return False
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
@@ -1,581 +0,0 @@
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from app.core.config import settings
|
||||
from app.db.downloadhistory_oper import DownloadHistoryOper
|
||||
from app.db.transferhistory_oper import TransferHistoryOper
|
||||
from app.log import logger
|
||||
from app.modules.qbittorrent import Qbittorrent
|
||||
from app.modules.transmission import Transmission
|
||||
from app.plugins import _PluginBase
|
||||
|
||||
|
||||
class SyncDownloadFiles(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "下载器文件同步"
|
||||
# 插件描述
|
||||
plugin_desc = "同步下载器的文件信息到数据库,删除文件时联动删除下载任务。"
|
||||
# 插件图标
|
||||
plugin_icon = "sync_file.png"
|
||||
# 主题色
|
||||
plugin_color = "#4686E3"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/thsrite"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "syncdownloadfiles_"
|
||||
# 加载顺序
|
||||
plugin_order = 20
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_enabled = False
|
||||
# 任务执行间隔
|
||||
_time = None
|
||||
qb = None
|
||||
tr = None
|
||||
_onlyonce = False
|
||||
_history = False
|
||||
_clear = False
|
||||
_downloaders = []
|
||||
_dirs = None
|
||||
downloadhis = None
|
||||
transferhis = None
|
||||
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
self.qb = Qbittorrent()
|
||||
self.tr = Transmission()
|
||||
self.downloadhis = DownloadHistoryOper()
|
||||
self.transferhis = TransferHistoryOper()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get('enabled')
|
||||
self._time = config.get('time') or 6
|
||||
self._history = config.get('history')
|
||||
self._clear = config.get('clear')
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._downloaders = config.get('downloaders') or []
|
||||
self._dirs = config.get("dirs") or ""
|
||||
|
||||
if self._clear:
|
||||
# 清理下载器文件记录
|
||||
self.downloadhis.truncate_files()
|
||||
# 清理下载器最后处理记录
|
||||
for downloader in self._downloaders:
|
||||
# 获取最后同步时间
|
||||
self.del_data(f"last_sync_time_{downloader}")
|
||||
# 关闭clear
|
||||
self._clear = False
|
||||
self.__update_config()
|
||||
|
||||
if self._onlyonce:
|
||||
# 执行一次
|
||||
# 关闭onlyonce
|
||||
self._onlyonce = False
|
||||
self.__update_config()
|
||||
|
||||
self.sync()
|
||||
|
||||
if self._enabled:
|
||||
# 定时服务
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
if self._time:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.sync,
|
||||
trigger="interval",
|
||||
hours=float(str(self._time).strip()),
|
||||
name="自动同步下载器文件记录")
|
||||
logger.info(f"自动同步下载器文件记录服务启动,时间间隔 {self._time} 小时")
|
||||
except Exception as err:
|
||||
logger.error(f"定时任务配置错误:{str(err)}")
|
||||
|
||||
# 启动任务
|
||||
if self._scheduler.get_jobs():
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
else:
|
||||
self._enabled = False
|
||||
self.__update_config()
|
||||
|
||||
def sync(self):
|
||||
"""
|
||||
同步所选下载器种子记录
|
||||
"""
|
||||
start_time = datetime.now()
|
||||
logger.info("开始同步下载器任务文件记录")
|
||||
|
||||
if not self._downloaders:
|
||||
logger.error("未选择同步下载器,停止运行")
|
||||
return
|
||||
|
||||
# 遍历下载器同步记录
|
||||
for downloader in self._downloaders:
|
||||
# 获取最后同步时间
|
||||
last_sync_time = self.get_data(f"last_sync_time_{downloader}")
|
||||
|
||||
logger.info(f"开始扫描下载器 {downloader} ...")
|
||||
downloader_obj = self.__get_downloader(downloader)
|
||||
# 获取下载器中已完成的种子
|
||||
torrents = downloader_obj.get_completed_torrents()
|
||||
if torrents:
|
||||
logger.info(f"下载器 {downloader} 已完成种子数:{len(torrents)}")
|
||||
else:
|
||||
logger.info(f"下载器 {downloader} 没有已完成种子")
|
||||
continue
|
||||
|
||||
# 把种子按照名称和种子大小分组,获取添加时间最早的一个,认定为是源种子,其余为辅种
|
||||
torrents = self.__get_origin_torrents(torrents, downloader)
|
||||
logger.info(f"下载器 {downloader} 去除辅种,获取到源种子数:{len(torrents)}")
|
||||
|
||||
for torrent in torrents:
|
||||
# 返回false,标识后续种子已被同步
|
||||
sync_flag = self.__compare_time(torrent, downloader, last_sync_time)
|
||||
|
||||
if not sync_flag:
|
||||
logger.info(f"最后同步时间{last_sync_time}, 之前种子已被同步,结束当前下载器 {downloader} 任务")
|
||||
break
|
||||
|
||||
# 获取种子hash
|
||||
hash_str = self.__get_hash(torrent, downloader)
|
||||
|
||||
# 判断是否是mp下载,判断download_hash是否在downloadhistory表中,是则不处理
|
||||
downloadhis = self.downloadhis.get_by_hash(hash_str)
|
||||
if downloadhis:
|
||||
downlod_files = self.downloadhis.get_files_by_hash(hash_str)
|
||||
if downlod_files:
|
||||
logger.info(f"种子 {hash_str} 通过MoviePilot下载,跳过处理")
|
||||
continue
|
||||
|
||||
# 获取种子download_dir
|
||||
download_dir = self.__get_download_dir(torrent, downloader)
|
||||
|
||||
# 处理路径映射
|
||||
if self._dirs:
|
||||
paths = self._dirs.split("\n")
|
||||
for path in paths:
|
||||
sub_paths = path.split(":")
|
||||
download_dir = download_dir.replace(sub_paths[0], sub_paths[1]).replace('\\', '/')
|
||||
|
||||
# 获取种子name
|
||||
torrent_name = self.__get_torrent_name(torrent, downloader)
|
||||
# 种子保存目录
|
||||
save_path = Path(download_dir).joinpath(torrent_name)
|
||||
# 获取种子文件
|
||||
torrent_files = self.__get_torrent_files(torrent, downloader, downloader_obj)
|
||||
logger.info(f"开始同步种子 {hash_str}, 文件数 {len(torrent_files)}")
|
||||
|
||||
download_files = []
|
||||
for file in torrent_files:
|
||||
# 过滤掉没下载的文件
|
||||
if not self.__is_download(file, downloader):
|
||||
continue
|
||||
# 种子文件路径
|
||||
file_path_str = self.__get_file_path(file, downloader)
|
||||
file_path = Path(file_path_str)
|
||||
# 只处理视频格式
|
||||
if not file_path.suffix \
|
||||
or file_path.suffix not in settings.RMT_MEDIAEXT:
|
||||
continue
|
||||
# 种子文件根路程
|
||||
root_path = file_path.parts[0]
|
||||
# 不含种子名称的种子文件相对路径
|
||||
if root_path == torrent_name:
|
||||
rel_path = str(file_path.relative_to(root_path))
|
||||
else:
|
||||
rel_path = str(file_path)
|
||||
# 完整路径
|
||||
full_path = save_path.joinpath(rel_path)
|
||||
if self._history:
|
||||
transferhis = self.transferhis.get_by_src(str(full_path))
|
||||
if transferhis and not transferhis.download_hash:
|
||||
logger.info(f"开始补充转移记录:{transferhis.id} download_hash {hash_str}")
|
||||
self.transferhis.update_download_hash(historyid=transferhis.id,
|
||||
download_hash=hash_str)
|
||||
|
||||
# 种子文件记录
|
||||
download_files.append(
|
||||
{
|
||||
"download_hash": hash_str,
|
||||
"downloader": downloader,
|
||||
"fullpath": str(full_path),
|
||||
"savepath": str(save_path),
|
||||
"filepath": rel_path,
|
||||
"torrentname": torrent_name,
|
||||
}
|
||||
)
|
||||
|
||||
if download_files:
|
||||
# 登记下载文件
|
||||
self.downloadhis.add_files(download_files)
|
||||
logger.info(f"种子 {hash_str} 同步完成")
|
||||
|
||||
logger.info(f"下载器种子文件同步完成!")
|
||||
self.save_data(f"last_sync_time_{downloader}",
|
||||
time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
|
||||
|
||||
# 计算耗时
|
||||
end_time = datetime.now()
|
||||
|
||||
logger.info(f"下载器任务文件记录已同步完成。总耗时 {(end_time - start_time).seconds} 秒")
|
||||
|
||||
def __update_config(self):
|
||||
self.update_config({
|
||||
"enabled": self._enabled,
|
||||
"time": self._time,
|
||||
"history": self._history,
|
||||
"clear": self._clear,
|
||||
"onlyonce": self._onlyonce,
|
||||
"downloaders": self._downloaders,
|
||||
"dirs": self._dirs
|
||||
})
|
||||
|
||||
@staticmethod
|
||||
def __get_origin_torrents(torrents: Any, dl_tpe: str):
|
||||
# 把种子按照名称和种子大小分组,获取添加时间最早的一个,认定为是源种子,其余为辅种
|
||||
grouped_data = {}
|
||||
|
||||
# 排序种子,根据种子添加时间倒序
|
||||
if dl_tpe == "qbittorrent":
|
||||
torrents = sorted(torrents, key=lambda x: x.get("added_on"), reverse=True)
|
||||
# 遍历原始数组,按照size和name进行分组
|
||||
for torrent in torrents:
|
||||
size = torrent.get('size')
|
||||
name = torrent.get('name')
|
||||
key = (size, name) # 使用元组作为字典的键
|
||||
|
||||
# 如果分组键不存在,则将当前元素作为最小元素添加到字典中
|
||||
if key not in grouped_data:
|
||||
grouped_data[key] = torrent
|
||||
else:
|
||||
# 如果分组键已存在,则比较当前元素的time是否更小,如果更小则更新字典中的元素
|
||||
if torrent.get('added_on') < grouped_data[key].get('added_on'):
|
||||
grouped_data[key] = torrent
|
||||
else:
|
||||
torrents = sorted(torrents, key=lambda x: x.added_date, reverse=True)
|
||||
# 遍历原始数组,按照size和name进行分组
|
||||
for torrent in torrents:
|
||||
size = torrent.total_size
|
||||
name = torrent.name
|
||||
key = (size, name) # 使用元组作为字典的键
|
||||
|
||||
# 如果分组键不存在,则将当前元素作为最小元素添加到字典中
|
||||
if key not in grouped_data:
|
||||
grouped_data[key] = torrent
|
||||
else:
|
||||
# 如果分组键已存在,则比较当前元素的time是否更小,如果更小则更新字典中的元素
|
||||
if torrent.added_date < grouped_data[key].added_date:
|
||||
grouped_data[key] = torrent
|
||||
|
||||
# 新的数组
|
||||
return list(grouped_data.values())
|
||||
|
||||
@staticmethod
|
||||
def __compare_time(torrent: Any, dl_tpe: str, last_sync_time: str = None):
|
||||
if last_sync_time:
|
||||
# 获取种子时间
|
||||
if dl_tpe == "qbittorrent":
|
||||
torrent_date = time.gmtime(torrent.get("added_on")) # 将时间戳转换为时间元组
|
||||
torrent_date = time.strftime("%Y-%m-%d %H:%M:%S", torrent_date) # 格式化时间
|
||||
else:
|
||||
torrent_date = torrent.added_date
|
||||
|
||||
# 之后的种子已经同步了
|
||||
if last_sync_time > str(torrent_date):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def __is_download(file: Any, dl_type: str):
|
||||
"""
|
||||
判断文件是否被下载
|
||||
"""
|
||||
try:
|
||||
if dl_type == "qbittorrent":
|
||||
return True
|
||||
else:
|
||||
return file.completed and file.completed > 0
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def __get_file_path(file: Any, dl_type: str):
|
||||
"""
|
||||
获取文件路径
|
||||
"""
|
||||
try:
|
||||
return file.get("name") if dl_type == "qbittorrent" else file.name
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def __get_torrent_files(torrent: Any, dl_type: str, downloader_obj):
|
||||
"""
|
||||
获取种子文件
|
||||
"""
|
||||
try:
|
||||
return torrent.files if dl_type == "qbittorrent" else downloader_obj.get_files(tid=torrent.id)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def __get_torrent_name(torrent: Any, dl_type: str):
|
||||
"""
|
||||
获取种子name
|
||||
"""
|
||||
try:
|
||||
return torrent.get("name") if dl_type == "qbittorrent" else torrent.name
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def __get_download_dir(torrent: Any, dl_type: str):
|
||||
"""
|
||||
获取种子download_dir
|
||||
"""
|
||||
try:
|
||||
return torrent.get("save_path") if dl_type == "qbittorrent" else torrent.download_dir
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def __get_hash(torrent: Any, dl_type: str):
|
||||
"""
|
||||
获取种子hash
|
||||
"""
|
||||
try:
|
||||
return torrent.get("hash") if dl_type == "qbittorrent" else torrent.hashString
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return ""
|
||||
|
||||
def __get_downloader(self, dtype: str):
|
||||
"""
|
||||
根据类型返回下载器实例
|
||||
"""
|
||||
if dtype == "qbittorrent":
|
||||
return self.qb
|
||||
elif dtype == "transmission":
|
||||
return self.tr
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '开启插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'history',
|
||||
'label': '补充整理历史记录',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'clear',
|
||||
'label': '清理数据',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'time',
|
||||
'label': '同步时间间隔'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'model': 'downloaders',
|
||||
'label': '同步下载器',
|
||||
'items': [
|
||||
{'title': 'Qbittorrent', 'value': 'qbittorrent'},
|
||||
{'title': 'Transmission', 'value': 'transmission'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'dirs',
|
||||
'label': '目录映射',
|
||||
'rows': 5,
|
||||
'placeholder': '每一行一个目录,下载器保存目录:MoviePilot映射目录'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '适用于非MoviePilot下载的任务;下载器种子数据较多时,同步时间将会较长,请耐心等候,可查看实时日志了解同步进度;时间间隔建议最少每6小时执行一次,防止上次任务没处理完。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"onlyonce": False,
|
||||
"history": False,
|
||||
"clear": False,
|
||||
"time": 6,
|
||||
"dirs": "",
|
||||
"downloaders": []
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._scheduler.shutdown()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
@@ -1,766 +0,0 @@
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Tuple, Dict, Any, Optional
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.core.config import settings
|
||||
from app.log import logger
|
||||
from app.modules.qbittorrent import Qbittorrent
|
||||
from app.modules.transmission import Transmission
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import NotificationType
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
class TorrentRemover(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "自动删种"
|
||||
# 插件描述
|
||||
plugin_desc = "自动删除下载器中的下载任务。"
|
||||
# 插件图标
|
||||
plugin_icon = "torrent.png"
|
||||
# 主题色
|
||||
plugin_color = "#02853F"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "torrentremover_"
|
||||
# 加载顺序
|
||||
plugin_order = 8
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
qb = None
|
||||
tr = None
|
||||
_event = threading.Event()
|
||||
_scheduler = None
|
||||
_enabled = False
|
||||
_onlyonce = False
|
||||
_notify = False
|
||||
# pause/delete
|
||||
_downloaders = []
|
||||
_action = "pause"
|
||||
_cron = None
|
||||
_samedata = False
|
||||
_mponly = False
|
||||
_size = None
|
||||
_ratio = None
|
||||
_time = None
|
||||
_upspeed = None
|
||||
_labels = None
|
||||
_pathkeywords = None
|
||||
_trackerkeywords = None
|
||||
_errorkeywords = None
|
||||
_torrentstates = None
|
||||
_torrentcategorys = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._notify = config.get("notify")
|
||||
self._downloaders = config.get("downloaders") or []
|
||||
self._action = config.get("action")
|
||||
self._cron = config.get("cron")
|
||||
self._samedata = config.get("samedata")
|
||||
self._mponly = config.get("mponly")
|
||||
self._size = config.get("size") or ""
|
||||
self._ratio = config.get("ratio")
|
||||
self._time = config.get("time")
|
||||
self._upspeed = config.get("upspeed")
|
||||
self._labels = config.get("labels") or ""
|
||||
self._pathkeywords = config.get("pathkeywords") or ""
|
||||
self._trackerkeywords = config.get("trackerkeywords") or ""
|
||||
self._errorkeywords = config.get("errorkeywords") or ""
|
||||
self._torrentstates = config.get("torrentstates") or ""
|
||||
self._torrentcategorys = config.get("torrentcategorys") or ""
|
||||
|
||||
self.stop_service()
|
||||
|
||||
if self.get_state() or self._onlyonce:
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
self.qb = Qbittorrent()
|
||||
self.tr = Transmission()
|
||||
if self._cron:
|
||||
try:
|
||||
self._scheduler.add_job(func=self.delete_torrents,
|
||||
trigger=CronTrigger.from_crontab(self._cron),
|
||||
name="自动删种服务")
|
||||
logger.info(f"自动删种服务启动,周期:{self._cron}")
|
||||
except Exception as err:
|
||||
logger.error(f"自动删种服务启动失败:{str(err)}")
|
||||
self.systemmessage.put(f"自动删种服务启动失败:{str(err)}")
|
||||
if self._onlyonce:
|
||||
logger.info(f"自动删种服务启动,立即运行一次")
|
||||
self._scheduler.add_job(func=self.delete_torrents, trigger='date',
|
||||
run_date=datetime.now(
|
||||
tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3)
|
||||
)
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
# 保存设置
|
||||
self.update_config({
|
||||
"enabled": self._enabled,
|
||||
"notify": self._notify,
|
||||
"onlyonce": self._onlyonce,
|
||||
"action": self._action,
|
||||
"cron": self._cron,
|
||||
"downloaders": self._downloaders,
|
||||
"samedata": self._samedata,
|
||||
"mponly": self._mponly,
|
||||
"size": self._size,
|
||||
"ratio": self._ratio,
|
||||
"time": self._time,
|
||||
"upspeed": self._upspeed,
|
||||
"labels": self._labels,
|
||||
"pathkeywords": self._pathkeywords,
|
||||
"trackerkeywords": self._trackerkeywords,
|
||||
"errorkeywords": self._errorkeywords,
|
||||
"torrentstates": self._torrentstates,
|
||||
"torrentcategorys": self._torrentcategorys
|
||||
|
||||
})
|
||||
if self._scheduler.get_jobs():
|
||||
# 启动服务
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return True if self._enabled and self._cron and self._downloaders else False
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '发送通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '0 */12 * * *'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'action',
|
||||
'label': '动作',
|
||||
'items': [
|
||||
{'title': '暂停', 'value': 'pause'},
|
||||
{'title': '删除种子', 'value': 'delete'},
|
||||
{'title': '删除种子和文件', 'value': 'deletefile'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'model': 'downloaders',
|
||||
'label': '下载器',
|
||||
'items': [
|
||||
{'title': 'Qbittorrent', 'value': 'qbittorrent'},
|
||||
{'title': 'Transmission', 'value': 'transmission'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'size',
|
||||
'label': '种子大小(GB)',
|
||||
'placeholder': '例如1-10'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'ratio',
|
||||
'label': '分享率',
|
||||
'placeholder': ''
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'time',
|
||||
'label': '做种时间(小时)',
|
||||
'placeholder': ''
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'upspeed',
|
||||
'label': '平均上传速度',
|
||||
'placeholder': ''
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'labels',
|
||||
'label': '标签',
|
||||
'placeholder': '用,分隔多个标签'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'pathkeywords',
|
||||
'label': '保存路径关键词',
|
||||
'placeholder': '支持正式表达式'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'trackerkeywords',
|
||||
'label': 'Tracker关键词',
|
||||
'placeholder': '支持正式表达式'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'errorkeywords',
|
||||
'label': '错误信息关键词(TR)',
|
||||
'placeholder': '支持正式表达式,仅适用于TR'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'torrentstates',
|
||||
'label': '任务状态(QB)',
|
||||
'placeholder': '用,分隔多个状态,仅适用于QB'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'torrentcategorys',
|
||||
'label': '任务分类',
|
||||
'placeholder': '用,分隔多个分类'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'samedata',
|
||||
'label': '处理辅种',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'mponly',
|
||||
'label': '仅MoviePilot任务',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '自动删种存在风险,如设置不当可能导致数据丢失!建议动作先选择暂停,确定条件正确后再改成删除。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"notify": False,
|
||||
"onlyonce": False,
|
||||
"action": 'pause',
|
||||
'downloaders': [],
|
||||
"cron": '0 */12 * * *',
|
||||
"samedata": False,
|
||||
"mponly": False,
|
||||
"size": "",
|
||||
"ratio": "",
|
||||
"time": "",
|
||||
"upspeed": "",
|
||||
"labels": "",
|
||||
"pathkeywords": "",
|
||||
"trackerkeywords": "",
|
||||
"errorkeywords": "",
|
||||
"torrentstates": "",
|
||||
"torrentcategorys": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._event.set()
|
||||
self._scheduler.shutdown()
|
||||
self._event.clear()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
|
||||
def __get_downloader(self, dtype: str):
|
||||
"""
|
||||
根据类型返回下载器实例
|
||||
"""
|
||||
if dtype == "qbittorrent":
|
||||
return self.qb
|
||||
elif dtype == "transmission":
|
||||
return self.tr
|
||||
else:
|
||||
return None
|
||||
|
||||
def delete_torrents(self):
|
||||
"""
|
||||
定时删除下载器中的下载任务
|
||||
"""
|
||||
for downloader in self._downloaders:
|
||||
try:
|
||||
with lock:
|
||||
# 获取需删除种子列表
|
||||
torrents = self.get_remove_torrents(downloader)
|
||||
logger.info(f"自动删种任务 获取符合处理条件种子数 {len(torrents)}")
|
||||
# 下载器
|
||||
downlader_obj = self.__get_downloader(downloader)
|
||||
if self._action == "pause":
|
||||
message_text = f"{downloader.title()} 共暂停{len(torrents)}个种子"
|
||||
for torrent in torrents:
|
||||
if self._event.is_set():
|
||||
logger.info(f"自动删种服务停止")
|
||||
return
|
||||
text_item = f"{torrent.get('name')} " \
|
||||
f"来自站点:{torrent.get('site')} " \
|
||||
f"大小:{StringUtils.str_filesize(torrent.get('size'))}"
|
||||
# 暂停种子
|
||||
downlader_obj.stop_torrents(ids=[torrent.get("id")])
|
||||
logger.info(f"自动删种任务 暂停种子:{text_item}")
|
||||
message_text = f"{message_text}\n{text_item}"
|
||||
elif self._action == "delete":
|
||||
message_text = f"{downloader.title()} 共删除{len(torrents)}个种子"
|
||||
for torrent in torrents:
|
||||
if self._event.is_set():
|
||||
logger.info(f"自动删种服务停止")
|
||||
return
|
||||
text_item = f"{torrent.get('name')} " \
|
||||
f"来自站点:{torrent.get('site')} " \
|
||||
f"大小:{StringUtils.str_filesize(torrent.get('size'))}"
|
||||
# 删除种子
|
||||
downlader_obj.delete_torrents(delete_file=False,
|
||||
ids=[torrent.get("id")])
|
||||
logger.info(f"自动删种任务 删除种子:{text_item}")
|
||||
message_text = f"{message_text}\n{text_item}"
|
||||
elif self._action == "deletefile":
|
||||
message_text = f"{downloader.title()} 共删除{len(torrents)}个种子及文件"
|
||||
for torrent in torrents:
|
||||
if self._event.is_set():
|
||||
logger.info(f"自动删种服务停止")
|
||||
return
|
||||
text_item = f"{torrent.get('name')} " \
|
||||
f"来自站点:{torrent.get('site')} " \
|
||||
f"大小:{StringUtils.str_filesize(torrent.get('size'))}"
|
||||
# 删除种子
|
||||
downlader_obj.delete_torrents(delete_file=True,
|
||||
ids=[torrent.get("id")])
|
||||
logger.info(f"自动删种任务 删除种子及文件:{text_item}")
|
||||
message_text = f"{message_text}\n{text_item}"
|
||||
else:
|
||||
continue
|
||||
if torrents and message_text and self._notify:
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【自动删种任务完成】",
|
||||
text=message_text
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"自动删种任务异常:{str(e)}")
|
||||
|
||||
def __get_qb_torrent(self, torrent: Any) -> Optional[dict]:
|
||||
"""
|
||||
检查QB下载任务是否符合条件
|
||||
"""
|
||||
# 完成时间
|
||||
date_done = torrent.completion_on if torrent.completion_on > 0 else torrent.added_on
|
||||
# 现在时间
|
||||
date_now = int(time.mktime(datetime.now().timetuple()))
|
||||
# 做种时间
|
||||
torrent_seeding_time = date_now - date_done if date_done else 0
|
||||
# 平均上传速度
|
||||
torrent_upload_avs = torrent.uploaded / torrent_seeding_time if torrent_seeding_time else 0
|
||||
# 大小 单位:GB
|
||||
sizes = self._size.split('-') if self._size else []
|
||||
minsize = sizes[0] * 1024 * 1024 * 1024 if sizes else 0
|
||||
maxsize = sizes[-1] * 1024 * 1024 * 1024 if sizes else 0
|
||||
# 分享率
|
||||
if self._ratio and torrent.ratio <= float(self._ratio):
|
||||
return None
|
||||
# 做种时间 单位:小时
|
||||
if self._time and torrent_seeding_time <= float(self._time) * 3600:
|
||||
return None
|
||||
# 文件大小
|
||||
if self._size and (torrent.size >= int(maxsize) or torrent.size <= int(minsize)):
|
||||
return None
|
||||
if self._upspeed and torrent_upload_avs >= float(self._upspeed) * 1024:
|
||||
return None
|
||||
if self._pathkeywords and not re.findall(self._pathkeywords, torrent.save_path, re.I):
|
||||
return None
|
||||
if self._trackerkeywords and not re.findall(self._trackerkeywords, torrent.tracker, re.I):
|
||||
return None
|
||||
if self._torrentstates and torrent.state not in self._torrentstates:
|
||||
return None
|
||||
if self._torrentcategorys and (not torrent.category or torrent.category not in self._torrentcategorys):
|
||||
return None
|
||||
return {
|
||||
"id": torrent.hash,
|
||||
"name": torrent.name,
|
||||
"site": StringUtils.get_url_sld(torrent.tracker),
|
||||
"size": torrent.size
|
||||
}
|
||||
|
||||
def __get_tr_torrent(self, torrent: Any) -> Optional[dict]:
|
||||
"""
|
||||
检查TR下载任务是否符合条件
|
||||
"""
|
||||
# 完成时间
|
||||
date_done = torrent.date_done or torrent.date_added
|
||||
# 现在时间
|
||||
date_now = int(time.mktime(datetime.now().timetuple()))
|
||||
# 做种时间
|
||||
torrent_seeding_time = date_now - int(time.mktime(date_done.timetuple())) if date_done else 0
|
||||
# 上传量
|
||||
torrent_uploaded = torrent.ratio * torrent.total_size
|
||||
# 平均上传速茺
|
||||
torrent_upload_avs = torrent_uploaded / torrent_seeding_time if torrent_seeding_time else 0
|
||||
# 大小 单位:GB
|
||||
sizes = self._size.split('-') if self._size else []
|
||||
minsize = sizes[0] * 1024 * 1024 * 1024 if sizes else 0
|
||||
maxsize = sizes[-1] * 1024 * 1024 * 1024 if sizes else 0
|
||||
# 分享率
|
||||
if self._ratio and torrent.ratio <= float(self._ratio):
|
||||
return None
|
||||
if self._time and torrent_seeding_time <= float(self._time) * 3600:
|
||||
return None
|
||||
if self._size and (torrent.total_size >= int(maxsize) or torrent.total_size <= int(minsize)):
|
||||
return None
|
||||
if self._upspeed and torrent_upload_avs >= float(self._upspeed) * 1024:
|
||||
return None
|
||||
if self._pathkeywords and not re.findall(self._pathkeywords, torrent.download_dir, re.I):
|
||||
return None
|
||||
if self._trackerkeywords:
|
||||
if not torrent.trackers:
|
||||
return None
|
||||
else:
|
||||
tacker_key_flag = False
|
||||
for tracker in torrent.trackers:
|
||||
if re.findall(self._trackerkeywords, tracker.get("announce", ""), re.I):
|
||||
tacker_key_flag = True
|
||||
break
|
||||
if not tacker_key_flag:
|
||||
return None
|
||||
if self._errorkeywords and not re.findall(self._errorkeywords, torrent.error_string, re.I):
|
||||
return None
|
||||
return {
|
||||
"id": torrent.hashString,
|
||||
"name": torrent.name,
|
||||
"site": torrent.trackers[0].get("sitename") if torrent.trackers else "",
|
||||
"size": torrent.total_size
|
||||
}
|
||||
|
||||
def get_remove_torrents(self, downloader: str):
|
||||
"""
|
||||
获取自动删种任务种子
|
||||
"""
|
||||
remove_torrents = []
|
||||
# 下载器对象
|
||||
downloader_obj = self.__get_downloader(downloader)
|
||||
# 标题
|
||||
if self._labels:
|
||||
tags = self._labels.split(',')
|
||||
else:
|
||||
tags = []
|
||||
if self._mponly:
|
||||
tags.extend(settings.TORRENT_TAG)
|
||||
# 查询种子
|
||||
torrents, error_flag = downloader_obj.get_torrents(tags=tags or None)
|
||||
if error_flag:
|
||||
return []
|
||||
# 处理种子
|
||||
for torrent in torrents:
|
||||
if downloader == "qbittorrent":
|
||||
item = self.__get_qb_torrent(torrent)
|
||||
else:
|
||||
item = self.__get_tr_torrent(torrent)
|
||||
if not item:
|
||||
continue
|
||||
remove_torrents.append(item)
|
||||
# 处理辅种
|
||||
if self._samedata and remove_torrents:
|
||||
remove_ids = [t.get("id") for t in remove_torrents]
|
||||
remove_torrents_plus = []
|
||||
for remove_torrent in remove_torrents:
|
||||
name = remove_torrent.get("name")
|
||||
size = remove_torrent.get("size")
|
||||
for torrent in torrents:
|
||||
if downloader == "qbittorrent":
|
||||
plus_id = torrent.hash
|
||||
plus_name = torrent.name
|
||||
plus_size = torrent.size
|
||||
plus_site = StringUtils.get_url_sld(torrent.tracker)
|
||||
else:
|
||||
plus_id = torrent.hashString
|
||||
plus_name = torrent.name
|
||||
plus_size = torrent.total_size
|
||||
plus_site = torrent.trackers[0].get("sitename") if torrent.trackers else ""
|
||||
# 比对名称和大小
|
||||
if plus_name == name \
|
||||
and plus_size == size \
|
||||
and plus_id not in remove_ids:
|
||||
remove_torrents_plus.append(
|
||||
{
|
||||
"id": plus_id,
|
||||
"name": plus_name,
|
||||
"site": plus_site,
|
||||
"size": plus_size
|
||||
}
|
||||
)
|
||||
if remove_torrents_plus:
|
||||
remove_torrents.extend(remove_torrents_plus)
|
||||
return remove_torrents
|
||||
@@ -1,822 +0,0 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from threading import Event
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from bencode import bdecode, bencode
|
||||
|
||||
from app.core.config import settings
|
||||
from app.helper.torrent import TorrentHelper
|
||||
from app.log import logger
|
||||
from app.modules.qbittorrent import Qbittorrent
|
||||
from app.modules.transmission import Transmission
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import NotificationType
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class TorrentTransfer(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "自动转移做种"
|
||||
# 插件描述
|
||||
plugin_desc = "定期转移下载器中的做种任务到另一个下载器。"
|
||||
# 插件图标
|
||||
plugin_icon = "torrenttransfer.jpg"
|
||||
# 主题色
|
||||
plugin_color = "#272636"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "torrenttransfer_"
|
||||
# 加载顺序
|
||||
plugin_order = 18
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
_scheduler = None
|
||||
qb = None
|
||||
tr = None
|
||||
torrent = None
|
||||
# 开关
|
||||
_enabled = False
|
||||
_cron = None
|
||||
_onlyonce = False
|
||||
_fromdownloader = None
|
||||
_todownloader = None
|
||||
_frompath = None
|
||||
_topath = None
|
||||
_notify = False
|
||||
_nolabels = None
|
||||
_nopaths = None
|
||||
_deletesource = False
|
||||
_fromtorrentpath = None
|
||||
_autostart = False
|
||||
# 退出事件
|
||||
_event = Event()
|
||||
# 待检查种子清单
|
||||
_recheck_torrents = {}
|
||||
_is_recheck_running = False
|
||||
# 任务标签
|
||||
_torrent_tags = ["已整理", "转移做种"]
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.torrent = TorrentHelper()
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
self._cron = config.get("cron")
|
||||
self._notify = config.get("notify")
|
||||
self._nolabels = config.get("nolabels")
|
||||
self._frompath = config.get("frompath")
|
||||
self._topath = config.get("topath")
|
||||
self._fromdownloader = config.get("fromdownloader")
|
||||
self._todownloader = config.get("todownloader")
|
||||
self._deletesource = config.get("deletesource")
|
||||
self._fromtorrentpath = config.get("fromtorrentpath")
|
||||
self._nopaths = config.get("nopaths")
|
||||
self._autostart = config.get("autostart")
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
# 启动定时任务 & 立即运行一次
|
||||
if self.get_state() or self._onlyonce:
|
||||
self.qb = Qbittorrent()
|
||||
self.tr = Transmission()
|
||||
# 检查配置
|
||||
if self._fromtorrentpath and not Path(self._fromtorrentpath).exists():
|
||||
logger.error(f"源下载器种子文件保存路径不存在:{self._fromtorrentpath}")
|
||||
self.systemmessage.put(f"源下载器种子文件保存路径不存在:{self._fromtorrentpath}")
|
||||
return
|
||||
if self._fromdownloader == self._todownloader:
|
||||
logger.error(f"源下载器和目的下载器不能相同")
|
||||
self.systemmessage.put(f"源下载器和目的下载器不能相同")
|
||||
return
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
if self._cron:
|
||||
logger.info(f"转移做种服务启动,周期:{self._cron}")
|
||||
try:
|
||||
self._scheduler.add_job(self.transfer,
|
||||
CronTrigger.from_crontab(self._cron))
|
||||
except Exception as e:
|
||||
logger.error(f"转移做种服务启动失败:{str(e)}")
|
||||
self.systemmessage.put(f"转移做种服务启动失败:{str(e)}")
|
||||
return
|
||||
if self._onlyonce:
|
||||
logger.info(f"转移做种服务启动,立即运行一次")
|
||||
self._scheduler.add_job(self.transfer, 'date',
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(
|
||||
seconds=3))
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
self.update_config({
|
||||
"enabled": self._enabled,
|
||||
"onlyonce": self._onlyonce,
|
||||
"cron": self._cron,
|
||||
"notify": self._notify,
|
||||
"nolabels": self._nolabels,
|
||||
"frompath": self._frompath,
|
||||
"topath": self._topath,
|
||||
"fromdownloader": self._fromdownloader,
|
||||
"todownloader": self._todownloader,
|
||||
"deletesource": self._deletesource,
|
||||
"fromtorrentpath": self._fromtorrentpath,
|
||||
"nopaths": self._nopaths,
|
||||
"autostart": self._autostart
|
||||
})
|
||||
if self._scheduler.get_jobs():
|
||||
if self._autostart:
|
||||
# 追加种子校验服务
|
||||
self._scheduler.add_job(self.check_recheck, 'interval', minutes=3)
|
||||
# 启动服务
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def get_state(self):
|
||||
return True if self._enabled \
|
||||
and self._cron \
|
||||
and self._fromdownloader \
|
||||
and self._todownloader \
|
||||
and self._fromtorrentpath else False
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '发送通知',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '0 0 0 ? *'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'nolabels',
|
||||
'label': '不转移种子标签',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'fromdownloader',
|
||||
'label': '源下载器',
|
||||
'items': [
|
||||
{'title': 'Qbittorrent', 'value': 'qbittorrent'},
|
||||
{'title': 'Transmission', 'value': 'transmission'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'fromtorrentpath',
|
||||
'label': '源下载器种子文件路径',
|
||||
'placeholder': 'BT_backup、torrents'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'frompath',
|
||||
'label': '源数据文件根路径',
|
||||
'placeholder': '根路径,留空不进行路径转换'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'todownloader',
|
||||
'label': '目的下载器',
|
||||
'items': [
|
||||
{'title': 'Qbittorrent', 'value': 'qbittorrent'},
|
||||
{'title': 'Transmission', 'value': 'transmission'}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'topath',
|
||||
'label': '目的数据文件根路径',
|
||||
'placeholder': '根路径,留空不进行路径转换'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextarea',
|
||||
'props': {
|
||||
'model': 'nopaths',
|
||||
'label': '不转移数据文件目录',
|
||||
'rows': 3,
|
||||
'placeholder': '每一行一个目录'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'autostart',
|
||||
'label': '校验完成后自动开始',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'deletesource',
|
||||
'label': '删除源种子',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"notify": False,
|
||||
"onlyonce": False,
|
||||
"cron": "",
|
||||
"nolabels": "",
|
||||
"frompath": "",
|
||||
"topath": "",
|
||||
"fromdownloader": "",
|
||||
"todownloader": "",
|
||||
"deletesource": False,
|
||||
"fromtorrentpath": "",
|
||||
"nopaths": "",
|
||||
"autostart": True
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def __get_downloader(self, dtype: str):
|
||||
"""
|
||||
根据类型返回下载器实例
|
||||
"""
|
||||
if dtype == "qbittorrent":
|
||||
return self.qb
|
||||
elif dtype == "transmission":
|
||||
return self.tr
|
||||
else:
|
||||
return None
|
||||
|
||||
def __download(self, downloader: str, content: bytes,
|
||||
save_path: str) -> Optional[str]:
|
||||
"""
|
||||
添加下载任务
|
||||
"""
|
||||
if downloader == "qbittorrent":
|
||||
# 生成随机Tag
|
||||
tag = StringUtils.generate_random_str(10)
|
||||
state = self.qb.add_torrent(content=content,
|
||||
download_dir=save_path,
|
||||
is_paused=True,
|
||||
tag=["已整理", "转移做种", tag])
|
||||
if not state:
|
||||
return None
|
||||
else:
|
||||
# 获取种子Hash
|
||||
torrent_hash = self.qb.get_torrent_id_by_tag(tags=tag)
|
||||
if not torrent_hash:
|
||||
logger.error(f"{downloader} 获取种子Hash失败")
|
||||
return None
|
||||
return torrent_hash
|
||||
elif downloader == "transmission":
|
||||
# 添加任务
|
||||
torrent = self.tr.add_torrent(content=content,
|
||||
download_dir=save_path,
|
||||
is_paused=True,
|
||||
labels=["已整理", "转移做种"])
|
||||
if not torrent:
|
||||
return None
|
||||
else:
|
||||
return torrent.hashString
|
||||
|
||||
logger.error(f"不支持的下载器:{downloader}")
|
||||
return None
|
||||
|
||||
def transfer(self):
|
||||
"""
|
||||
开始转移做种
|
||||
"""
|
||||
logger.info("开始转移做种任务 ...")
|
||||
|
||||
# 源下载器
|
||||
downloader = self._fromdownloader
|
||||
# 目的下载器
|
||||
todownloader = self._todownloader
|
||||
|
||||
# 获取下载器中已完成的种子
|
||||
downloader_obj = self.__get_downloader(downloader)
|
||||
torrents = downloader_obj.get_completed_torrents()
|
||||
if torrents:
|
||||
logger.info(f"下载器 {downloader} 已完成种子数:{len(torrents)}")
|
||||
else:
|
||||
logger.info(f"下载器 {downloader} 没有已完成种子")
|
||||
return
|
||||
|
||||
# 过滤种子,记录保存目录
|
||||
trans_torrents = []
|
||||
for torrent in torrents:
|
||||
if self._event.is_set():
|
||||
logger.info(f"转移服务停止")
|
||||
return
|
||||
|
||||
# 获取种子hash
|
||||
hash_str = self.__get_hash(torrent, downloader)
|
||||
# 获取保存路径
|
||||
save_path = self.__get_save_path(torrent, downloader)
|
||||
|
||||
if self._nopaths and save_path:
|
||||
# 过滤不需要转移的路径
|
||||
nopath_skip = False
|
||||
for nopath in self._nopaths.split('\n'):
|
||||
if os.path.normpath(save_path).startswith(os.path.normpath(nopath)):
|
||||
logger.info(f"种子 {hash_str} 保存路径 {save_path} 不需要转移,跳过 ...")
|
||||
nopath_skip = True
|
||||
break
|
||||
if nopath_skip:
|
||||
continue
|
||||
|
||||
# 获取种子标签
|
||||
torrent_labels = self.__get_label(torrent, downloader)
|
||||
if torrent_labels and self._nolabels:
|
||||
is_skip = False
|
||||
for label in self._nolabels.split(','):
|
||||
if label in torrent_labels:
|
||||
logger.info(f"种子 {hash_str} 含有不转移标签 {label},跳过 ...")
|
||||
is_skip = True
|
||||
break
|
||||
if is_skip:
|
||||
continue
|
||||
|
||||
# 添加转移数据
|
||||
trans_torrents.append({
|
||||
"hash": hash_str,
|
||||
"save_path": save_path,
|
||||
"torrent": torrent
|
||||
})
|
||||
|
||||
# 开始转移任务
|
||||
if trans_torrents:
|
||||
logger.info(f"需要转移的种子数:{len(trans_torrents)}")
|
||||
# 记数
|
||||
total = len(trans_torrents)
|
||||
# 总成功数
|
||||
success = 0
|
||||
# 总失败数
|
||||
fail = 0
|
||||
# 跳过数
|
||||
skip = 0
|
||||
|
||||
for torrent_item in trans_torrents:
|
||||
# 检查种子文件是否存在
|
||||
torrent_file = Path(self._fromtorrentpath) / f"{torrent_item.get('hash')}.torrent"
|
||||
if not torrent_file.exists():
|
||||
logger.error(f"种子文件不存在:{torrent_file}")
|
||||
# 失败计数
|
||||
fail += 1
|
||||
continue
|
||||
|
||||
# 查询hash值是否已经在目的下载器中
|
||||
todownloader_obj = self.__get_downloader(todownloader)
|
||||
torrent_info, _ = todownloader_obj.get_torrents(ids=[torrent_item.get('hash')])
|
||||
if torrent_info:
|
||||
logger.info(f"{torrent_item.get('hash')} 已在目的下载器中,跳过 ...")
|
||||
# 跳过计数
|
||||
skip += 1
|
||||
continue
|
||||
|
||||
# 转换保存路径
|
||||
download_dir = self.__convert_save_path(torrent_item.get('save_path'),
|
||||
self._frompath,
|
||||
self._topath)
|
||||
if not download_dir:
|
||||
logger.error(f"转换保存路径失败:{torrent_item.get('save_path')}")
|
||||
# 失败计数
|
||||
fail += 1
|
||||
continue
|
||||
|
||||
# 如果源下载器是QB检查是否有Tracker,没有的话额外获取
|
||||
if downloader == "qbittorrent":
|
||||
# 读取种子内容、解析种子文件
|
||||
content = torrent_file.read_bytes()
|
||||
if not content:
|
||||
logger.warn(f"读取种子文件失败:{torrent_file}")
|
||||
fail += 1
|
||||
continue
|
||||
# 读取trackers
|
||||
try:
|
||||
torrent_main = bdecode(content)
|
||||
main_announce = torrent_main.get('announce')
|
||||
except Exception as err:
|
||||
logger.warn(f"解析种子文件 {torrent_file} 失败:{str(err)}")
|
||||
fail += 1
|
||||
continue
|
||||
|
||||
if not main_announce:
|
||||
logger.info(f"{torrent_item.get('hash')} 未发现tracker信息,尝试补充tracker信息...")
|
||||
# 读取fastresume文件
|
||||
fastresume_file = Path(self._fromtorrentpath) / f"{torrent_item.get('hash')}.fastresume"
|
||||
if not fastresume_file.exists():
|
||||
logger.warn(f"fastresume文件不存在:{fastresume_file}")
|
||||
fail += 1
|
||||
continue
|
||||
# 尝试补充trackers
|
||||
try:
|
||||
# 解析fastresume文件
|
||||
fastresume = fastresume_file.read_bytes()
|
||||
torrent_fastresume = bdecode(fastresume)
|
||||
# 读取trackers
|
||||
fastresume_trackers = torrent_fastresume.get('trackers')
|
||||
if isinstance(fastresume_trackers, list) \
|
||||
and len(fastresume_trackers) > 0 \
|
||||
and fastresume_trackers[0]:
|
||||
# 重新赋值
|
||||
torrent_main['announce'] = fastresume_trackers[0][0]
|
||||
# 替换种子文件路径
|
||||
torrent_file = settings.TEMP_PATH / f"{torrent_item.get('hash')}.torrent"
|
||||
# 编码并保存到临时文件
|
||||
torrent_file.write_bytes(bencode(torrent_main))
|
||||
except Exception as err:
|
||||
logger.error(f"解析fastresume文件 {fastresume_file} 出错:{str(err)}")
|
||||
fail += 1
|
||||
continue
|
||||
|
||||
# 发送到另一个下载器中下载:默认暂停、传输下载路径、关闭自动管理模式
|
||||
logger.info(f"添加转移做种任务到下载器 {todownloader}:{torrent_file}")
|
||||
download_id = self.__download(downloader=todownloader,
|
||||
content=torrent_file.read_bytes(),
|
||||
save_path=download_dir)
|
||||
if not download_id:
|
||||
# 下载失败
|
||||
fail += 1
|
||||
logger.error(f"添加下载任务失败:{torrent_file}")
|
||||
continue
|
||||
else:
|
||||
# 下载成功
|
||||
logger.info(f"成功添加转移做种任务,种子文件:{torrent_file}")
|
||||
|
||||
# TR会自动校验,QB需要手动校验
|
||||
if todownloader == "qbittorrent":
|
||||
logger.info(f"qbittorrent 开始校验 {download_id} ...")
|
||||
todownloader_obj.recheck_torrents(ids=[download_id])
|
||||
|
||||
# 追加校验任务
|
||||
logger.info(f"添加校验检查任务:{download_id} ...")
|
||||
if not self._recheck_torrents.get(todownloader):
|
||||
self._recheck_torrents[todownloader] = []
|
||||
self._recheck_torrents[todownloader].append(download_id)
|
||||
|
||||
# 删除源种子,不能删除文件!
|
||||
if self._deletesource:
|
||||
logger.info(f"删除源下载器任务(不含文件):{torrent_item.get('hash')} ...")
|
||||
downloader_obj.delete_torrents(delete_file=False, ids=[torrent_item.get('hash')])
|
||||
|
||||
# 成功计数
|
||||
success += 1
|
||||
# 插入转种记录
|
||||
history_key = "%s-%s" % (self._fromdownloader, torrent_item.get('hash'))
|
||||
self.save_data(key=history_key,
|
||||
value={
|
||||
"to_download": self._todownloader,
|
||||
"to_download_id": download_id,
|
||||
"delete_source": self._deletesource,
|
||||
})
|
||||
# 触发校验任务
|
||||
if success > 0 and self._autostart:
|
||||
self.check_recheck()
|
||||
|
||||
# 发送通知
|
||||
if self._notify:
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title="【转移做种任务执行完成】",
|
||||
text=f"总数:{total},成功:{success},失败:{fail},跳过:{skip}"
|
||||
)
|
||||
else:
|
||||
logger.info(f"没有需要转移的种子")
|
||||
logger.info("转移做种任务执行完成")
|
||||
|
||||
def check_recheck(self):
|
||||
"""
|
||||
定时检查下载器中种子是否校验完成,校验完成且完整的自动开始辅种
|
||||
"""
|
||||
if not self._recheck_torrents:
|
||||
return
|
||||
if not self._todownloader:
|
||||
return
|
||||
if self._is_recheck_running:
|
||||
return
|
||||
|
||||
# 校验下载器
|
||||
downloader = self._todownloader
|
||||
|
||||
# 需要检查的种子
|
||||
recheck_torrents = self._recheck_torrents.get(downloader, [])
|
||||
if not recheck_torrents:
|
||||
return
|
||||
|
||||
logger.info(f"开始检查下载器 {downloader} 的校验任务 ...")
|
||||
|
||||
# 运行状态
|
||||
self._is_recheck_running = True
|
||||
|
||||
# 获取任务
|
||||
downloader_obj = self.__get_downloader(downloader)
|
||||
torrents, _ = downloader_obj.get_torrents(ids=recheck_torrents)
|
||||
if torrents:
|
||||
# 可做种的种子
|
||||
can_seeding_torrents = []
|
||||
for torrent in torrents:
|
||||
# 获取种子hash
|
||||
hash_str = self.__get_hash(torrent, downloader)
|
||||
# 判断是否可做种
|
||||
if self.__can_seeding(torrent, downloader):
|
||||
can_seeding_torrents.append(hash_str)
|
||||
|
||||
if can_seeding_torrents:
|
||||
logger.info(f"共 {len(can_seeding_torrents)} 个任务校验完成,开始做种")
|
||||
# 开始做种
|
||||
downloader_obj.start_torrents(ids=can_seeding_torrents)
|
||||
# 去除已经处理过的种子
|
||||
self._recheck_torrents[downloader] = list(
|
||||
set(recheck_torrents).difference(set(can_seeding_torrents)))
|
||||
else:
|
||||
logger.info(f"没有新的任务校验完成,将在下次个周期继续检查 ...")
|
||||
|
||||
elif torrents is None:
|
||||
logger.info(f"下载器 {downloader} 查询校验任务失败,将在下次继续查询 ...")
|
||||
else:
|
||||
logger.info(f"下载器 {downloader} 中没有需要检查的校验任务,清空待处理列表")
|
||||
self._recheck_torrents[downloader] = []
|
||||
|
||||
self._is_recheck_running = False
|
||||
|
||||
@staticmethod
|
||||
def __get_hash(torrent: Any, dl_type: str):
|
||||
"""
|
||||
获取种子hash
|
||||
"""
|
||||
try:
|
||||
return torrent.get("hash") if dl_type == "qbittorrent" else torrent.hashString
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def __get_label(torrent: Any, dl_type: str):
|
||||
"""
|
||||
获取种子标签
|
||||
"""
|
||||
try:
|
||||
return [str(tag).strip() for tag in torrent.get("tags").split(',')] \
|
||||
if dl_type == "qbittorrent" else torrent.labels or []
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def __get_save_path(torrent: Any, dl_type: str):
|
||||
"""
|
||||
获取种子保存路径
|
||||
"""
|
||||
try:
|
||||
return torrent.get("save_path") if dl_type == "qbittorrent" else torrent.download_dir
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def __can_seeding(torrent: Any, dl_type: str):
|
||||
"""
|
||||
判断种子是否可以做种并处于暂停状态
|
||||
"""
|
||||
try:
|
||||
return (torrent.get("state") == "pausedUP") if dl_type == "qbittorrent" \
|
||||
else (torrent.status.stopped and torrent.percent_done == 1)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def __convert_save_path(save_path: str, from_root: str, to_root: str):
|
||||
"""
|
||||
转换保存路径
|
||||
"""
|
||||
try:
|
||||
# 没有保存目录,以目的根目录为准
|
||||
if not save_path:
|
||||
return to_root
|
||||
# 没有设置根目录时返回save_path
|
||||
if not to_root or not from_root:
|
||||
return save_path
|
||||
# 统一目录格式
|
||||
save_path = os.path.normpath(save_path).replace("\\", "/")
|
||||
from_root = os.path.normpath(from_root).replace("\\", "/")
|
||||
to_root = os.path.normpath(to_root).replace("\\", "/")
|
||||
# 替换根目录
|
||||
if save_path.startswith(from_root):
|
||||
return save_path.replace(from_root, to_root, 1)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return None
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
try:
|
||||
if self._scheduler:
|
||||
self._scheduler.remove_all_jobs()
|
||||
if self._scheduler.running:
|
||||
self._event.set()
|
||||
self._scheduler.shutdown()
|
||||
self._event.clear()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
@@ -1,184 +0,0 @@
|
||||
from app.plugins import _PluginBase
|
||||
from app.core.event import eventmanager
|
||||
from app.schemas.types import EventType
|
||||
from app.utils.http import RequestUtils
|
||||
from typing import Any, List, Dict, Tuple
|
||||
from app.log import logger
|
||||
|
||||
|
||||
class WebHook(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "Webhook"
|
||||
# 插件描述
|
||||
plugin_desc = "事件发生时向第三方地址发送请求。"
|
||||
# 插件图标
|
||||
plugin_icon = "webhook.png"
|
||||
# 主题色
|
||||
plugin_color = "#C73A63"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/jxxghp"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "webhook_"
|
||||
# 加载顺序
|
||||
plugin_order = 14
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_webhook_url = None
|
||||
_method = None
|
||||
_enabled = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._webhook_url = config.get("webhook_url")
|
||||
self._method = config.get('request_method')
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
request_options = ["POST", "GET"]
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'request_method',
|
||||
'label': '请求方式',
|
||||
'items': request_options
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 8
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'webhook_url',
|
||||
'label': 'webhook地址'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
], {
|
||||
"enabled": False,
|
||||
"request_method": "POST",
|
||||
"webhook_url": ""
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
@eventmanager.register(EventType)
|
||||
def send(self, event):
|
||||
"""
|
||||
向第三方Webhook发送请求
|
||||
"""
|
||||
if not self._enabled or not self._webhook_url:
|
||||
return
|
||||
|
||||
def __to_dict(_event):
|
||||
"""
|
||||
递归将对象转换为字典
|
||||
"""
|
||||
if isinstance(_event, dict):
|
||||
for k, v in _event.items():
|
||||
_event[k] = __to_dict(v)
|
||||
return _event
|
||||
elif isinstance(_event, list):
|
||||
for i in range(len(_event)):
|
||||
_event[i] = __to_dict(_event[i])
|
||||
return _event
|
||||
elif isinstance(_event, tuple):
|
||||
return tuple(__to_dict(list(_event)))
|
||||
elif isinstance(_event, set):
|
||||
return set(__to_dict(list(_event)))
|
||||
elif hasattr(_event, 'to_dict'):
|
||||
return __to_dict(_event.to_dict())
|
||||
elif hasattr(_event, '__dict__'):
|
||||
return __to_dict(_event.__dict__)
|
||||
elif isinstance(_event, (int, float, str, bool, type(None))):
|
||||
return _event
|
||||
else:
|
||||
return str(_event)
|
||||
|
||||
event_info = {
|
||||
"type": event.event_type,
|
||||
"data": __to_dict(event.event_data)
|
||||
}
|
||||
|
||||
if self._method == 'POST':
|
||||
ret = RequestUtils(content_type="application/json").post_res(self._webhook_url, json=event_info)
|
||||
else:
|
||||
ret = RequestUtils().get_res(self._webhook_url, params=event_info)
|
||||
if ret:
|
||||
logger.info("发送成功:%s" % self._webhook_url)
|
||||
elif ret is not None:
|
||||
logger.error(f"发送失败,状态码:{ret.status_code},返回信息:{ret.text} {ret.reason}")
|
||||
else:
|
||||
logger.error("发送失败,未获取到返回信息")
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
4
update
4
update
@@ -20,6 +20,9 @@ if [ "${MOVIEPILOT_AUTO_UPDATE_DEV}" = "true" ]; then
|
||||
pip install ${PIP_OPTIONS} -r /tmp/app/requirements.txt
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "安装依赖成功"
|
||||
# 下载插件
|
||||
echo "正在下载插件..."
|
||||
curl ${CURL_OPTIONS} "https://github.com/jxxghp/MoviePilot-Plugins/archive/refs/heads/main.zip" | busybox unzip -d /tmp -
|
||||
# 检查前端最新版本
|
||||
frontend_version=$(curl ${CURL_OPTIONS} "https://api.github.com/repos/jxxghp/MoviePilot-Frontend/releases/latest" | jq -r .tag_name)
|
||||
if [[ "${frontend_version}" == *v* ]]; then
|
||||
@@ -31,6 +34,7 @@ if [ "${MOVIEPILOT_AUTO_UPDATE_DEV}" = "true" ]; then
|
||||
mv /tmp/app /app
|
||||
rm -rf /public
|
||||
mv /tmp/dist /public
|
||||
mv /tmp/MoviePilot-Plugins-main/plugins/* /app/app/plugins/
|
||||
echo "程序更新成功,前端版本:${frontend_version}"
|
||||
else
|
||||
echo "前端程序下载失败,继续使用旧的程序来启动..."
|
||||
|
||||
Reference in New Issue
Block a user