Merge pull request #454 from EstrellaXD/3.1-dev

3.1.0
This commit is contained in:
Estrella Pan
2023-09-24 10:46:59 +08:00
committed by GitHub
308 changed files with 28724 additions and 2119 deletions

14
.gitattributes vendored Normal file
View File

@@ -0,0 +1,14 @@
# Don't allow people to merge changes to these generated files, because the result
# may be invalid. You need to run "rush update" again.
pnpm-lock.yaml merge=binary
shrinkwrap.yaml merge=binary
npm-shrinkwrap.json merge=binary
yarn.lock merge=binary
# Rush's JSON config files use JavaScript-style code comments. The rule below prevents pedantic
# syntax highlighters such as GitHub's from highlighting these comments as errors. Your text editor
# may also require a special configuration to allow comments in JSON.
#
# For more information, see this issue: https://github.com/microsoft/rushstack/issues/1088
#
*.json linguist-language=JSON-with-Comments

View File

@@ -1,32 +0,0 @@
name-template: 'v$RESOLVED_VERSION 🌈'
tag-template: 'v$RESOLVED_VERSION'
categories:
- title: '🚀 Features'
labels:
- 'feature'
- 'enhancement'
- 'new'
- title: '🐛 Bug Fixes'
labels:
- 'fix'
- 'bugfix'
- 'bug'
- title: '🧰 Maintenance'
label: 'chore'
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
version-resolver:
major:
labels:
- 'major'
minor:
labels:
- 'minor'
patch:
labels:
- 'patch'
default: patch
template: |
## Changes
$CHANGES

266
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,266 @@
name: Build Docker
on:
pull_request:
push:
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.11
uses: actions/setup-python@v3
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f backend/requirements.txt ]; then pip install -r backend/requirements.txt; fi
pip install pytest
- name: Test
working-directory: ./backend/src
run: |
mkdir -p config
pytest
version-info:
runs-on: ubuntu-latest
steps:
- name: If release
id: release
run: |
if [[${{ github.event_name }}== 'pull_request'] && [${{ github.event.pull_request.merged }} == true]] || \
[[ ${{ github.event_name }} == 'push' && (${{ github.ref }} == *'alpha'* || ${{ github.ref }} == *'beta'*) ]]; then
echo "release=1" >> $GITHUB_OUTPUT
else
echo "release=0" >> $GITHUB_OUTPUT
fi
- name: If dev
id: dev
run: |
if [[ ${{ github.event_name }} == 'push' && (${{ github.ref }} == *'alpha'* || ${{ github.ref }} == *'beta'*) ]]; then
echo "dev=1" >> $GITHUB_OUTPUT
else
echo "dev=0" >> $GITHUB_OUTPUT
fi
- name: Check version
id: version
run: |
if [${{ github.event_name }} == 'pull_request' && ${{ github.event.pull_request.merged }} == true]; then
echo "version=${{ github.event.pull_request.title }}" >> $GITHUB_OUTPUT
git config --local user.email
git config --local user.name "github-actions"
git tag -a ${{ github.event.pull_request.title }} -m ${{ github.event.pull_request.body }}
git push origin ${{ github.event.pull_request.title }}
elif [[ ${{ github.event_name }} == 'push' && (${{ github.ref }} == *'alpha'* || ${{ github.ref }} == *'beta'*) ]]; then
echo "version=${{ github.ref_name }}" >> $GITHUB_OUTPUT
else
echo "version=Test" >> $GITHUB_OUTPUT
fi
- name: Check result
run: |
echo "release: ${{ steps.release.outputs.release }}"
echo "dev: ${{ steps.dev.outputs.dev }}"
echo "version: ${{ steps.version.outputs.version }}"
outputs:
release: ${{ steps.release.outputs.release }}
dev: ${{ steps.dev.outputs.dev }}
version: ${{ steps.version.outputs.version }}
build-webui:
runs-on: ubuntu-latest
needs: [test, version-info]
if: ${{ needs.version-info.outputs.release == 1 || needs.version-info.outputs.dev == 1 }}
strategy:
matrix:
node-version: [18]
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: pnpm/action-setup@v2
with:
version: 8
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: 'pnpm'
cache-dependency-path: webui/pnpm-lock.yaml
- name: Install dependencies
run: cd webui && pnpm install
- name: Build
run: |
cd webui && pnpm build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: dist
path: webui/dist
build-docker:
runs-on: ubuntu-latest
needs: [build-webui, version-info]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Create Version info via tag
working-directory: ./backend/src
run: |
echo ${{ needs.version-info.outputs.version }}
echo "VERSION='${{ needs.version-info.outputs.version }}'" >> module/__version__.py
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
- name: Docker metadata main
if: ${{ needs.version-info.outputs.release == 1 && needs.version-info.outputs.dev != 1 }}
id: meta
uses: docker/metadata-action@v4
with:
images: |
estrellaxd/auto_bangumi
ghcr.io/${{ github.repository }}
tags: |
type=semver,pattern=${{ needs.version-info.outputs.version }}
type=raw,value=latest
- name: Docker metadata dev
if: ${{ needs.version-info.outputs.dev == 1 }}
id: meta-dev
uses: docker/metadata-action@v4
with:
images: |
estrellaxd/auto_bangumi
ghcr.io/${{ github.repository }}
tags: |
type=raw,value=${{ needs.version-info.outputs.version }}
type=raw,value=dev-latest
- name: Login to DockerHub
if: ${{ needs.version-info.outputs.release == 1 }}
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Login to ghcr.io
if: ${{ needs.version-info.outputs.release == 1 }}
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.ACCESS_TOKEN }}
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: dist
path: backend/src/dist
- name: Build and push
if: ${{ needs.version-info.outputs.release == 1 && needs.version-info.outputs.dev != 1 }}
uses: docker/build-push-action@v4
with:
context: .
builder: ${{ steps.buildx.output.name }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: ${{ github.event_name == 'push' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha, scope=${{ github.workflow }}
cache-to: type=gha, scope=${{ github.workflow }}
- name: Build and push dev
if: ${{ needs.version-info.outputs.dev == 1 }}
uses: docker/build-push-action@v4
with:
context: .
builder: ${{ steps.buildx.output.name }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: ${{ github.event_name == 'push' }}
tags: ${{ steps.meta-dev.outputs.tags }}
labels: ${{ steps.meta-dev.outputs.labels }}
cache-from: type=gha, scope=${{ github.workflow }}
cache-to: type=gha, scope=${{ github.workflow }}
- name: Build test
if: ${{ needs.version-info.outputs.release == 0 }}
uses: docker/build-push-action@v4
with:
context: .
builder: ${{ steps.buildx.output.name }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: false
tags: estrellaxd/auto_bangumi:test
cache-from: type=gha, scope=${{ github.workflow }}
cache-to: type=gha, scope=${{ github.workflow }}
release:
runs-on: ubuntu-latest
needs: [build-docker, version-info]
if: ${{ needs.version-info.outputs.release == 1 }}
outputs:
url: ${{ steps.release.outputs.url }}
version: ${{ needs.version-info.outputs.version }}
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: dist
path: webui/dist
- name: Zip webui
run: |
cd webui && ls -al && tree && zip -r dist.zip dist
- name: Generate Release info
id: release-info
run: |
if ${{ needs.version-info.outputs.dev == 1 }}; then
echo "version=🌙${{ needs.version-info.outputs.version }}" >> $GITHUB_OUTPUT
echo "pre_release=true" >> $GITHUB_OUTPUT
else
echo "version=🌟${{ needs.version-info.outputs.version }}" >> $GITHUB_OUTPUT
echo "pre_release=false" >> $GITHUB_OUTPUT
fi
- name: Release
id: release
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ needs.version-info.outputs.version }}
name: ${{ steps.release-info.outputs.version }}
body: ${{ github.event.pull_request.body }}
draft: false
prerelease: ${{ steps.release-info.outputs.pre_release == 'true' }}
files: |
webui/dist.zip
env:
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }}
telegram:
runs-on: ubuntu-latest
needs: [release]
steps:
- name: send telegram message on push
uses: appleboy/telegram-action@master
with:
to: ${{ secrets.TELEGRAM_TO }}
token: ${{ secrets.TELEGRAM_TOKEN }}
message: |
New release: ${{ needs.release.outputs.version }}
Link: ${{ needs.release.outputs.url }}

View File

@@ -1,94 +0,0 @@
name: Build Docker(dev)
on:
push:
tags:
- '\d+\.\d+\.\d+-beta\d+'
- '\d+\.\d+-beta\d+'
- '\d+\.\d+-alpha\d+'
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.11
uses: actions/setup-python@v3
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
pip install pytest
- name: Test
working-directory: ./src
run: |
mkdir -p config
pytest
dev-latest:
runs-on: ubuntu-latest
needs: [test]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Create Version info
working-directory: ./src
run: |
echo "VERSION = '$GITHUB_REF_NAME'" > module/__version__.py
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Docker metadata
id: meta
uses: docker/metadata-action@v4
with:
images: |
estrellaxd/auto_bangumi
ghcr.io/${{ github.repository }}
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=dev-latest
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Login to ghcr.io
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.ACCESS_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
file: Dockerfile
generate_release:
runs-on: ubuntu-latest
needs: [test]
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Generate Release
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ github.ref_name }}
name: 🌙${{ github.ref_name }}
draft: true
prerelease: true
env:
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }}

View File

@@ -1,86 +0,0 @@
name: Build Docker
on:
pull_request:
push:
tags:
- '\d+\.\d+\.\d+'
- '\d+\.\d+'
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.11
uses: actions/setup-python@v3
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
pip install pytest
- name: Test
working-directory: ./backend/src
run: |
mkdir -p config
pytest
build:
runs-on: ubuntu-latest
needs: [test]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Create Version info
working-directory: ./backend/src
run: |
echo "VERSION = '$GITHUB_REF_NAME'" > module/__version__.py
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
- name: Docker metadata
id: meta
uses: docker/metadata-action@v4
with:
images: |
ghcr.io/estrellaxd/auto_bangumi
estrellaxd/auto_bangumi
ghcr.io/${{ github.repository }}
tags: |
type=semver,pattern={{version}}
type=raw,value=latest
- name: Login to DockerHub
if: ${{ github.event_name == 'push' }}
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Login to ghcr.io
if: ${{ github.event_name == 'push' }}
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.ACCESS_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v4
with:
context: .
builder: ${{ steps.buildx.output.name }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: ${{ github.event_name == 'push' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha, scope=${{ github.workflow }}
cache-to: type=gha, scope=${{ github.workflow }}

View File

@@ -1,32 +0,0 @@
name: Create Pull Request
on:
push:
tags:
- '\d+\.\d+\.\d+'
- '\d+\.\d+'
jobs:
create-pull-request:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Generate pull request body
id: pr
run: |
echo "docs/changelog/${{ github.ref }}.md"
- name: Create Pull Request
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.ACCESS_TOKEN }}
commit-message: 'chore: release ${{ github.ref }}'
title: 'chore: release ${{ github.ref }}'
body: |
${{ steps.pr.outputs.body }}
branch: release/${{ github.ref }}
base: main
labels: release
draft: false
branch-suffix: timestamp
delete-branch: false

View File

@@ -1,45 +0,0 @@
name: Generate Release Draft
on:
pull_request:
types:
- closed
branches:
- main
jobs:
check_pr_title:
if: >
github.event.pull_request.merged == true &&
github.event.pull_request.base.ref == 'main'
runs-on: ubuntu-latest
outputs:
version_pr: ${{ steps.check.outputs.version_pr }}
steps:
- name: Check if PR title is version
id: check
run: |
if [[ "${{ github.event.pull_request.title }}" =~ ^([0-9]+\.[0-9]+\.[0-9]+)$ ]]; then
echo "::set-output name=version_pr::true"
else
echo "::set-output name=version_pr::false"
fi
generate_release_draft:
needs: check_pr_title
if:
needs.check_pr_title.outputs.version_pr == 'true'
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Generate Release
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ github.event.pull_request.title }}
name: 🌟${{ github.event.pull_request.title }}
body: ${{ github.event.pull_request.body }}
draft: false
prerelease: false
env:
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }}

View File

@@ -1,18 +0,0 @@
name: Telegram Notification
on:
release:
types: [published]
jobs:
telegram:
runs-on: ubuntu-latest
steps:
- name: send telegram message on push
uses: appleboy/telegram-action@master
with:
to: ${{ secrets.TELEGRAM_TO }}
token: ${{ secrets.TELEGRAM_TOKEN }}
message: |
New release: ${{ github.event.release.title }}
Link: ${{ github.event.release.html_url }}

42
.gitignore vendored
View File

@@ -121,7 +121,6 @@ celerybeat.pid
# Environments
.idea
.vscode
.env
.venv
env/
@@ -162,6 +161,8 @@ cython_debug/
#.idea/
# Custom
#
# backend
/backend/src/test.py
/backend/src/module/run_debug.sh
@@ -169,11 +170,46 @@ cython_debug/
/backend/src/module/__version__.py
/backend/src/data/
/backend/src/module/conf/config_dev.ini
/src/module/conf/config_dev.ini
test.*
.run
/backend/src/templates/
/backend/src/config/
/backend/src/debuger.py
/src/debuger.py
/backend/src/dist.zip
/pyrightconfig.json
# webui
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist.zip
dist-ssr
*.local
dev-dist
# Editor directories and files
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
# vitepress
/docs/.vitepress/cache/

3
.gitmodules vendored
View File

@@ -1,3 +0,0 @@
[submodule "docs/wiki"]
path = docs/wiki
url = https://github.com/EstrellaXD/Auto_Bangumi.wiki.git

18
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,18 @@
{
"recommendations": [
// https://marketplace.visualstudio.com/items?itemName=antfu.unocss
"antfu.unocss",
// https://marketplace.visualstudio.com/items?itemName=formulahendry.auto-rename-tag
"formulahendry.auto-rename-tag",
// https://marketplace.visualstudio.com/items?itemName=streetsidesoftware.code-spell-checker
"streetsidesoftware.code-spell-checker",
// https://marketplace.visualstudio.com/items?itemName=naumovs.color-highlight
"naumovs.color-highlight",
// https://marketplace.visualstudio.com/items?itemName=ms-python.vscode-pylance
"ms-python.vscode-pylance",
// https://marketplace.visualstudio.com/items?itemName=ms-python.python
"ms-python.python",
// https://marketplace.visualstudio.com/items?itemName=vue.volar
"vue.volar"
]
}

20
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,20 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Dev Backend",
"type": "python",
"request": "launch",
"cwd": "${workspaceFolder}/backend/src",
"program": "main.py",
"env": {
"HOST": "127.0.0.1",
},
"console": "integratedTerminal",
"justMyCode": true
}
]
}

19
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,19 @@
{
"files.associations": {
"settings.json": "json5",
"launch.json": "json5",
"extensions.json": "json5",
"tsconfig.json": "json5",
"tsconfig.*.json": "json5",
},
"[markdown]": {
"editor.wordWrap": "off",
},
"python.venvPath": "./backend/venv",
"cSpell.words": [
"Bangumi",
"fastapi",
"mikan",
"starlette"
],
}

47
CHANGELOG.md Normal file
View File

@@ -0,0 +1,47 @@
# [3.1] - 2023-08
- 合并了后端和前端仓库,优化了项目目录
- 优化了版本发布流程。
- Wiki 迁移至 Vitepress地址https://autobangumi.org
## Backend
### Features
- 新增 `RSS Engine` 模块从现在起AB 可以自主对 RSS 进行更新支持 `RSS` 订阅并且发送种子给下载器。
- 现在支持多个聚合 RSS 订阅源,可以通过 `RSS Engine` 模块进行管理。
- 支持下载去重功能,重复的订阅的种子不会被下载。
- 增加手动刷新 API可以手动刷新 RSS 订阅。
- 新增 RSS 订阅管理 API。
- 新增 `Search Engine`模块,可以通过关键词搜索种子并解析成收集或者订阅任务。
- 插件化的搜索引擎,可以通过插件的方式添加新的搜索目标,目前支持 `mikan``dmhy``nyaa`
- 新增对字幕组的特异性规则,可以针对不同的字幕组进行单独设置。
- 新增 IPv6 监听支持,需要在环境变量中设置 `IPV6=1`
- API 新增批量操作,可以批量管理规则和 RSS 订阅。
### Changes
- 数据库结构变更,更换为 `sqlmodel` 管理数据库。
- 新增版本管理,可以无缝更新软件数据。
- 调整 API 格式,更加统一。
- 增加 API 返回语言选项。
- 增加数据库 mock test。
- 优化代码。
### Bugfixes
- 修复了一些小问题。
- 增加了一些大问题。
## Frontend
### Features
- 增加 `i18n` 支持,目前支持 `zh-CN``en-US`
- 增加 pwa 支持。
- 增加 RSS 管理页面。
- 增加搜索顶栏。
### Changes
- 调整一些 UI 细节。

View File

@@ -1,14 +1,8 @@
# syntax=docker/dockerfile:1
FROM alpine:3.18 AS APP
FROM alpine:3.18
ENV S6_SERVICES_GRACETIME=30000 \
S6_KILL_GRACETIME=60000 \
S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0 \
S6_SYNC_DISKS=1 \
TERM="xterm" \
HOME="/ab" \
LANG="C.UTF-8" \
ENV LANG="C.UTF-8" \
TZ=Asia/Shanghai \
PUID=1000 \
PGID=1000 \
@@ -16,39 +10,33 @@ ENV S6_SERVICES_GRACETIME=30000 \
WORKDIR /app
COPY requirements.txt .
RUN apk add --no-cache \
COPY backend/requirements.txt .
RUN set -ex && \
apk add --no-cache \
bash \
ca-certificates \
coreutils \
curl \
jq \
netcat-openbsd \
procps-ng \
python3 \
py3-bcrypt \
py3-pip \
s6-overlay \
su-exec \
shadow \
tini \
tzdata && \
python3 -m pip install --upgrade pip && \
python3 -m pip install --no-cache-dir --upgrade pip && \
sed -i '/bcrypt/d' requirements.txt && \
pip install --no-cache-dir -r requirements.txt && \
# Download WebUI
curl -sL "https://github.com/Rewrite0/Auto_Bangumi_WebUI/releases/latest/download/dist.zip" | busybox unzip -q -d /app - && \
mv /app/dist /app/templates && \
# Add user
mkdir -p /home/ab && \
addgroup -S ab -g 911 && \
adduser -S ab -G ab -h /ab -s /bin/bash -u 911 && \
adduser -S ab -G ab -h /home/ab -s /sbin/nologin -u 911 && \
# Clear
rm -rf \
/root/.cache \
/tmp/*
COPY --chmod=755 backend/src/. .
COPY --chmod=755 backend/src/docker /
COPY --chmod=755 entrypoint.sh /entrypoint.sh
ENTRYPOINT [ "/init" ]
ENTRYPOINT ["tini", "-g", "--", "/entrypoint.sh"]
EXPOSE 7892
VOLUME [ "/app/config" , "/app/data" ]

View File

@@ -1,6 +1,6 @@
<p align="center">
<img src="docs/image/light-icon.png#gh-light-mode-only" width=50%/ alt="">
<img src="docs/image/dark-icon.png#gh-dark-mode-only" width=50%/ alt="">
<img src="docs/image/icons/light-icon.svg#gh-light-mode-only" width=50%/ alt="">
<img src="docs/image/icons/dark-icon.svg#gh-dark-mode-only" width=50%/ alt="">
</p>
<p align="center">
<img title="docker build version" src="https://img.shields.io/docker/v/estrellaxd/auto_bangumi" alt="">
@@ -16,7 +16,7 @@
# 项目说明
<p align="center">
<img title="AutoBangumi" src="docs/image/window.png" alt="" width=75%>
<img title="AutoBangumi" src="docs/image/preview/window.png" alt="" width=75%>
</p>
本项目是基于 [Mikan Project](https://mikanani.me)、[qBittorrent](https://qbittorrent.org) 的全自动追番整理下载工具。只需要在 [Mikan Project](https://mikanani.me) 上订阅番剧,就可以全自动追番。并且整理完成的名称和目录可以直接被 [Plex]()、[Jellyfin]() 等媒体库软件识别,无需二次刮削。

View File

@@ -5,4 +5,4 @@ repos:
- id: black
language: python
# TODO: add ruff lint check before committing
# TODO: add ruff lint check before committing.

8
backend/.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,8 @@
{
"python.formatting.provider": "none",
"python.formatting.blackPath": "black",
"editor.formatOnSave": true,
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter"
}
}

28
backend/dev.sh Executable file
View File

@@ -0,0 +1,28 @@
#!/usr/bin/env bash
# This script is used to run the development environment.
python3 -m venv venv
./venv/bin/python3 -m pip install -i https://pypi.tuna.tsinghua.edu.cn/simple -r requirements-dev.txt
# install git-hooks for pre-commit before committing.
./venv/bin/pre-commit install
cd src || exit
CONFIG_DIR="config"
if [ ! -d "$CONFIG_DIR" ]; then
echo "The directory '$CONFIG_DIR' is missing."
mkdir config
fi
VERSION_FILE="module/__version__.py"
if [ ! -f "$VERSION_FILE" ]; then
echo "The file '$VERSION_FILE' is missing."
echo "VERSION='DEV_VERSION'" >>"$VERSION_FILE"
fi
../venv/bin/uvicorn main:app --reload --port 7892

View File

@@ -1,15 +1,15 @@
[tool.ruff]
select = [
# pycodestyle(E): https://beta.ruff.rs/docs/rules/#pycodestyle-e-w
"E",
"E",
# Pyflakes(F): https://beta.ruff.rs/docs/rules/#pyflakes-f
"F",
"F",
# isort(I): https://beta.ruff.rs/docs/rules/#isort-i
"I"
]
ignore = [
# E501: https://beta.ruff.rs/docs/rules/line-too-long/
'E501',
'E501',
# F401: https://beta.ruff.rs/docs/rules/unused-import/
# avoid unused imports lint in `__init__.py`
'F401',

View File

@@ -0,0 +1,4 @@
-r requirements.txt
ruff
black
pre-commit

28
backend/requirements.txt Normal file
View File

@@ -0,0 +1,28 @@
anyio==3.7.0
bs4==0.0.1
certifi==2023.5.7
charset-normalizer==3.1.0
click==8.1.3
fastapi==0.97.0
h11==0.14.0
idna==3.4
pydantic~=1.10
PySocks==1.7.1
qbittorrent-api==2023.6.49
requests==2.31.0
six==1.16.0
sniffio==1.3.0
soupsieve==2.4.1
typing_extensions==4.6.3
urllib3==2.0.3
uvicorn==0.22.0
attrdict==2.0.1
Jinja2==3.1.2
python-dotenv==1.0.0
python-jose==3.3.0
passlib==1.7.4
bcrypt==4.0.1
python-multipart==0.0.6
sqlmodel==0.0.8
sse-starlette==1.6.5
semver==3.0.1

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env bash
#
# Usage:
# `bash scripts/pip-lock-version.sh`
#
# ```prompt
# Lock the library versions in `requirements.txt` to the current ones from `pip freeze` using shell script,
# but don't change any order in `requirements.txt`
# ```
#
# Create a temporary requirements file using pip freeze
pip freeze > pip_freeze.log
# Read the existing requirements.txt line by line
while IFS= read -r line
do
# Extract the library name without version
lib_name=$(echo $line | cut -d'=' -f1)
# Find the corresponding library in the temporary requirements file
lib_line=$(grep "^$lib_name==" pip_freeze.log)
# If the library is found, update the line
if [[ $lib_line ]]
then
echo $lib_line
else
echo $line
fi
# Redirect the output to a new requirements file
done < requirements.txt > new_requirements.log
# Remove the temporary requirements file
rm pip_freeze.log
# Replace the old requirements file with the new one
mv new_requirements.log requirements.txt

View File

@@ -1,14 +0,0 @@
#!/usr/bin/with-contenv bash
# shellcheck shell=bash
function __old_compatible {
umask ${UMASK}
if [ -f /config/bangumi.json ]; then
mv /config/bangumi.json /app/data/bangumi.json
fi
}
__old_compatible 2>&1 | sed "s#^#cont-init: info: $(realpath $0): &#g"

View File

@@ -1,13 +0,0 @@
#!/usr/bin/with-contenv bash
# shellcheck shell=bash
function __fixuser {
groupmod -o -g "${PGID}" ab
usermod -o -u "${PUID}" ab
chown ab:ab -R /app /ab
}
__fixuser 2>&1 | sed "s#^#cont-init: info: $(realpath $0): &#g"

View File

@@ -1,4 +0,0 @@
#!/usr/bin/with-contenv bash
# shellcheck shell=bash
pkill -f 'python3 main.py'

View File

@@ -1,16 +0,0 @@
#!/usr/bin/with-contenv bash
# shellcheck shell=bash
umask ${UMASK}
if [ -f /app/config/config.json ]; then
AB_PORT=$(jq '.program.webui_port' /app/config/config.json)
elif [ -f /app/config/config_dev.json ]; then
AB_PORT=$(jq '.program.webui_port' /app/config/config_dev.json)
else
AB_PORT=7892
fi
exec \
s6-notifyoncheck -d -n 300 -w 1000 -c "nc -z localhost ${AB_PORT}" \
cd /app s6-setuidgid ab python3 main.py

View File

@@ -1,8 +0,0 @@
_ ____ _
/\ | | | _ \ (_)
/ \ _ _| |_ ___ | |_) | __ _ _ __ __ _ _ _ _ __ ___ _
/ /\ \| | | | __/ _ \| _ < / _` | '_ \ / _` | | | | '_ ` _ \| |
/ ____ \ |_| | || (_) | |_) | (_| | | | | (_| | |_| | | | | | | |
/_/ \_\__,_|\__\___/|____/ \__,_|_| |_|\__, |\__,_|_| |_| |_|_|
__/ |
|___/

View File

@@ -1,10 +1,15 @@
import os
import logging
import uvicorn
from module.api import router
from module.conf import settings, setup_logger
import uvicorn
from fastapi import FastAPI, Request
from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from module.api import v1
from module.api.proxy import router as proxy_router
from module.conf import settings, setup_logger, VERSION
from starlette.types import ASGIApp
setup_logger(reset=True)
logger = logging.getLogger(__name__)
@@ -22,10 +27,82 @@ uvicorn_logging_config = {
},
}
def create_app() -> FastAPI:
app = FastAPI()
# mount routers
app.include_router(v1, prefix="/api")
app.include_router(proxy_router)
return app
app = create_app()
if VERSION != "DEV_VERSION":
app.mount("/assets", StaticFiles(directory="dist/assets"), name="assets")
app.mount("/images", StaticFiles(directory="dist/images"), name="images")
# app.mount("/icons", StaticFiles(directory="dist/icons"), name="icons")
templates = Jinja2Templates(directory="dist")
# Resource
# @app.get("/favicon.svg", tags=["html"])
# def favicon():
# return FileResponse("dist/favicon.svg")
#
# @app.get("/AutoBangumi.svg", tags=["html"])
# def logo():
# return FileResponse("dist/AutoBangumi.svg")
#
# @app.get("/favicon-light.svg", tags=["html"])
# def favicon_light():
# return FileResponse("dist/favicon-light.svg")
#
# @app.get("/robots.txt", tags=["html"])
# def robots():
# return FileResponse("dist/robots.txt")
#
# @app.get("/manifest.webmanifest", tags=["html"])
# def manifest():
# return FileResponse("dist/manifest.webmanifest")
#
# @app.get("/sw.js", tags=["html"])
# def sw():
# return FileResponse("dist/sw.js")
@app.get("/{path:path}")
def html(request: Request, path: str):
files = os.listdir("dist")
if path in files:
return FileResponse(f"dist/{path}")
else:
context = {"request": request}
return templates.TemplateResponse("index.html", context)
# HTML Response
# @app.get("/{path:path}", response_class=HTMLResponse, tags=["html"])
# def index(request: Request, path: str):
# print(request)
# print(path)
# context = {"request": request}
# return templates.TemplateResponse("index.html", context)
else:
@app.get("/", status_code=302, tags=["html"])
def index():
return RedirectResponse("/docs")
if __name__ == "__main__":
host = "::" if os.getenv("IPV6") else "0.0.0.0"
if os.getenv("IPV6"):
host = "::"
else:
host = os.getenv("HOST", "0.0.0.0")
uvicorn.run(
router,
app,
host=host,
port=settings.program.webui_port,
log_config=uvicorn_logging_config,

View File

@@ -1,6 +1,8 @@
import logging
import time
import threading
import time
from .timeout import timeout
logger = logging.getLogger(__name__)
lock = threading.Lock()

View File

@@ -0,0 +1,23 @@
import signal
def timeout(seconds):
def decorator(func):
def handler(signum, frame):
raise TimeoutError("Function timed out.")
def wrapper(*args, **kwargs):
# 设置信号处理程序当超时时触发TimeoutError异常
signal.signal(signal.SIGALRM, handler)
signal.alarm(seconds) # 设置alarm定时器
try:
result = func(*args, **kwargs)
finally:
signal.alarm(0) # 取消alarm定时器
return result
return wrapper
return decorator

View File

@@ -1 +1,21 @@
from .web import router
from fastapi import APIRouter
from .auth import router as auth_router
from .bangumi import router as bangumi_router
from .config import router as config_router
from .log import router as log_router
from .program import router as program_router
from .rss import router as rss_router
from .search import router as search_router
__all__ = "v1"
# API 1.0
v1 = APIRouter(prefix="/v1")
v1.include_router(auth_router)
v1.include_router(log_router)
v1.include_router(program_router)
v1.include_router(bangumi_router)
v1.include_router(config_router)
v1.include_router(rss_router)
v1.include_router(search_router)

View File

@@ -1,58 +1,66 @@
from datetime import timedelta
from fastapi import Depends, HTTPException, status
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm
from fastapi.responses import JSONResponse, Response
from module.security import (
create_access_token,
from .response import u_response
from module.models.user import User, UserUpdate
from module.models import APIResponse
from module.security.api import (
auth_user,
get_current_user,
update_user_info,
auth_user,
active_user
)
from module.models.user import User
from module.security.jwt import create_access_token
from .program import router
router = APIRouter(prefix="/auth", tags=["auth"])
@router.post("/api/v1/auth/login", response_model=dict, tags=["auth"])
async def login(form_data: OAuth2PasswordRequestForm = Depends()):
username = form_data.username
password = form_data.password
auth_user(username, password)
token = create_access_token(data={"sub": username}, expires_delta=timedelta(days=1))
return {"access_token": token, "token_type": "bearer", "expire": 86400}
@router.get("/api/v1/auth/refresh_token", response_model=dict, tags=["auth"])
async def refresh(current_user: User = Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
@router.post("/login", response_model=dict)
async def login(response: Response, form_data=Depends(OAuth2PasswordRequestForm)):
user = User(username=form_data.username, password=form_data.password)
resp = auth_user(user)
if resp.status:
token = create_access_token(
data={"sub": user.username}, expires_delta=timedelta(days=1)
)
token = create_access_token(data={"sub": current_user.username})
return {"access_token": token, "token_type": "bearer", "expire": 86400}
response.set_cookie(key="token", value=token, httponly=True, max_age=86400)
return {"access_token": token, "token_type": "bearer"}
return u_response(resp)
@router.get("/refresh_token", response_model=dict, dependencies=[Depends(get_current_user)])
async def refresh(response: Response):
token = create_access_token(
data={"sub": active_user[0]}, expires_delta=timedelta(days=1)
)
response.set_cookie(key="token", value=token, httponly=True, max_age=86400)
return {"access_token": token, "token_type": "bearer"}
@router.get("/api/v1/auth/logout", response_model=dict, tags=["auth"])
async def logout(current_user: User = Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
@router.get("/logout", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def logout(response: Response):
active_user.clear()
response.delete_cookie(key="token")
return JSONResponse(
status_code=200,
content={"msg_en": "Logout successfully.", "msg_zh": "登出成功。"},
)
@router.post("/update", response_model=dict, dependencies=[Depends(get_current_user)])
async def update_user(
user_data: UserUpdate, response: Response
):
old_user = active_user[0]
if update_user_info(user_data, old_user):
token = create_access_token(data={"sub": old_user}, expires_delta=timedelta(days=1))
response.set_cookie(
key="token",
value=token,
httponly=True,
max_age=86400,
)
return {"message": "logout success"}
@router.post("/api/v1/auth/update", response_model=dict, tags=["auth"])
async def update_user(user_data: User, current_user: User = Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
if update_user_info(user_data, current_user):
return {
"message": "update success",
"access_token": create_access_token({"sub": user_data.username}),
"token_type": "bearer",
"expire": 86400,
}
return {"access_token": token, "token_type": "bearer", "message": "update success"}

View File

@@ -1,95 +1,85 @@
import logging
from fastapi import Depends, HTTPException, status
from fastapi import APIRouter, Depends
from fastapi.responses import JSONResponse
from .log import router
from .response import u_response
from module.models import BangumiData
from module.manager import TorrentManager
from module.security import get_current_user
from module.models import Bangumi, BangumiUpdate, APIResponse
from module.security.api import get_current_user, UNAUTHORIZED
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/bangumi", tags=["bangumi"])
@router.get(
"/api/v1/bangumi/getAll", tags=["bangumi"], response_model=list[BangumiData]
)
async def get_all_data(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
with TorrentManager() as torrent:
return torrent.search_all()
def str_to_list(data: Bangumi):
data.filter = data.filter.split(",")
data.rss_link = data.rss_link.split(",")
return data
@router.get(
"/api/v1/bangumi/getData/{bangumi_id}", tags=["bangumi"], response_model=BangumiData
)
async def get_data(bangumi_id: str, current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
with TorrentManager() as torrent:
return torrent.search_one(bangumi_id)
@router.get("/get/all", response_model=list[Bangumi], dependencies=[Depends(get_current_user)])
async def get_all_data():
with TorrentManager() as manager:
return manager.bangumi.search_all()
@router.post("/api/v1/bangumi/updateRule", tags=["bangumi"])
async def update_rule(data: BangumiData, current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
with TorrentManager() as torrent:
try:
return torrent.update_rule(data)
except Exception as e:
logger.error(f"Failed to update rule: {e}")
return JSONResponse(status_code=500, content={"message": "Failed"})
@router.get("/get/{bangumi_id}", response_model=Bangumi, dependencies=[Depends(get_current_user)])
async def get_data(bangumi_id: str):
with TorrentManager() as manager:
resp = manager.search_one(bangumi_id)
return resp
@router.delete("/api/v1/bangumi/deleteRule/{bangumi_id}", tags=["bangumi"])
async def delete_rule(
bangumi_id: str, file: bool = False, current_user=Depends(get_current_user)
@router.patch("/update/{bangumi_id}", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def update_rule(
bangumi_id: int, data: BangumiUpdate,
):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
with TorrentManager() as manager:
resp = manager.update_rule(bangumi_id, data)
return u_response(resp)
@router.delete(path="/delete/{bangumi_id}", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def delete_rule(bangumi_id: str, file: bool = False):
with TorrentManager() as manager:
resp = manager.delete_rule(bangumi_id, file)
return u_response(resp)
@router.delete(path="/delete/many/", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def delete_many_rule(bangumi_id: list, file: bool = False):
with TorrentManager() as manager:
for i in bangumi_id:
resp = manager.delete_rule(i, file)
return u_response(resp)
@router.delete(path="/disable/{bangumi_id}", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def disable_rule(bangumi_id: str, file: bool = False):
with TorrentManager() as manager:
resp = manager.disable_rule(bangumi_id, file)
return u_response(resp)
@router.delete(path="/disable/many/", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def disable_many_rule(bangumi_id: list, file: bool = False):
with TorrentManager() as manager:
for i in bangumi_id:
resp = manager.disable_rule(i, file)
return u_response(resp)
@router.get(path="/enable/{bangumi_id}", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def enable_rule(bangumi_id: str):
with TorrentManager() as manager:
resp = manager.enable_rule(bangumi_id)
return u_response(resp)
@router.get("/reset/all", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def reset_all():
with TorrentManager() as manager:
manager.bangumi.delete_all()
return JSONResponse(
status_code=200,
content={"msg_en": "Reset all rules successfully.", "msg_zh": "重置所有规则成功。"},
)
with TorrentManager() as torrent:
return torrent.delete_rule(bangumi_id, file)
@router.delete("/api/v1/bangumi/disableRule/{bangumi_id}", tags=["bangumi"])
async def disable_rule(
bangumi_id: str, file: bool = False, current_user=Depends(get_current_user)
):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
with TorrentManager() as torrent:
return torrent.disable_rule(bangumi_id, file)
@router.get("/api/v1/bangumi/enableRule/{bangumi_id}", tags=["bangumi"])
async def enable_rule(bangumi_id: str, current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
with TorrentManager() as torrent:
return torrent.enable_rule(bangumi_id)
@router.get("/api/v1/bangumi/resetAll", tags=["bangumi"])
async def reset_all(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
with TorrentManager() as torrent:
torrent.delete_all()
return JSONResponse(status_code=200, content={"message": "OK"})

View File

@@ -1,36 +1,35 @@
import logging
from fastapi import Depends, HTTPException, status
from .bangumi import router
from fastapi import APIRouter, Depends
from fastapi.responses import JSONResponse
from module.conf import settings
from module.models import Config
from module.security import get_current_user
from module.models import Config, APIResponse
from module.security.api import get_current_user, UNAUTHORIZED
router = APIRouter(prefix="/config", tags=["config"])
logger = logging.getLogger(__name__)
@router.get("/api/v1/getConfig", tags=["config"], response_model=Config)
async def get_config(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
@router.get("/get", response_model=Config, dependencies=[Depends(get_current_user)])
async def get_config():
return settings
@router.post("/api/v1/updateConfig", tags=["config"])
async def update_config(config: Config, current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
@router.patch("/update", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def update_config(config: Config):
try:
settings.save(config_dict=config.dict())
settings.load()
# update_rss()
logger.info("Config updated")
return {"message": "Success"}
return JSONResponse(
status_code=200,
content={"msg_en": "Update config successfully.", "msg_zh": "更新配置成功。"}
)
except Exception as e:
logger.warning(e)
return {"message": "Failed to update config"}
return JSONResponse(
status_code=406,
content={"msg_en": "Update config failed.", "msg_zh": "更新配置失败。"}
)

View File

@@ -1,54 +0,0 @@
from fastapi import Depends, HTTPException, status
from .config import router
from module.models.api import *
from module.models import BangumiData
from module.manager import SeasonCollector
from module.rss import analyser
from module.security import get_current_user
@router.post("/api/v1/download/analysis", tags=["download"])
async def analysis(link: RssLink, current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
data = analyser.link_to_data(link.rss_link)
if data:
return data
else:
return {"status": "Failed to parse link"}
@router.post("/api/v1/download/collection", tags=["download"])
async def download_collection(
data: BangumiData, current_user=Depends(get_current_user)
):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
if data:
with SeasonCollector() as collector:
if collector.collect_season(data, data.rss_link[0], proxy=True):
return {"status": "Success"}
else:
return {"status": "Failed to add torrent"}
else:
return {"status": "Failed to parse link"}
@router.post("/api/v1/download/subscribe", tags=["download"])
async def subscribe(data: BangumiData, current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
if data:
with SeasonCollector() as collector:
collector.subscribe_season(data)
return {"status": "Success"}
else:
return {"status": "Failed to parse link"}

View File

@@ -1,34 +1,32 @@
import os
from fastapi import Response, HTTPException, Depends, status
from .auth import router
from fastapi import APIRouter, Depends, HTTPException, Response, status
from fastapi.responses import JSONResponse
from module.conf import LOG_PATH
from module.security import get_current_user
from module.security.api import get_current_user, UNAUTHORIZED
from module.models import APIResponse
router = APIRouter(prefix="/log", tags=["log"])
@router.get("/api/v1/log", tags=["log"])
async def get_log(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
if os.path.isfile(LOG_PATH):
@router.get("", response_model=str, dependencies=[Depends(get_current_user)])
async def get_log():
if LOG_PATH.exists():
with open(LOG_PATH, "rb") as f:
return Response(f.read(), media_type="text/plain")
else:
return Response("Log file not found", status_code=404)
@router.get("/api/v1/log/clear", tags=["log"])
async def clear_log(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
@router.get("/clear", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def clear_log():
if LOG_PATH.exists():
LOG_PATH.write_text("")
return JSONResponse(
status_code=200,
content={"msg_en": "Log cleared successfully.", "msg_zh": "日志清除成功。"},
)
if os.path.isfile(LOG_PATH):
with open(LOG_PATH, "w") as f:
f.write("")
return {"status": "ok"}
else:
return Response("Log file not found", status_code=404)
return JSONResponse(
status_code=406,
content={"msg_en": "Log file not found.", "msg_zh": "日志文件未找到。"},
)

View File

@@ -1,16 +1,20 @@
import signal
import logging
import os
import signal
from fastapi import HTTPException, status, Depends
from fastapi import FastAPI
from fastapi import APIRouter, Depends, HTTPException
from fastapi.responses import JSONResponse
from .response import u_response
from module.core import Program
from module.security import get_current_user
from module.models import APIResponse
from module.conf import VERSION
from module.security.api import get_current_user, UNAUTHORIZED
logger = logging.getLogger(__name__)
program = Program()
router = FastAPI()
router = APIRouter(tags=["program"])
@router.on_event("startup")
@@ -23,82 +27,73 @@ async def shutdown():
program.stop()
@router.get("/api/v1/restart", tags=["program"])
async def restart(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
@router.get("/restart", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def restart():
try:
program.restart()
return {"status": "ok"}
resp = program.restart()
return u_response(resp)
except Exception as e:
logger.debug(e)
logger.warning("Failed to restart program")
raise HTTPException(status_code=500, detail="Failed to restart program")
@router.get("/api/v1/start", tags=["program"])
async def start(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
status_code=500,
detail={
"msg_en": "Failed to restart program.",
"msg_zh": "重启程序失败。",
}
)
@router.get("/start", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def start():
try:
return program.start()
resp = program.start()
return u_response(resp)
except Exception as e:
logger.debug(e)
logger.warning("Failed to start program")
raise HTTPException(status_code=500, detail="Failed to start program")
@router.get("/api/v1/stop", tags=["program"])
async def stop(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
status_code=500,
detail={
"msg_en": "Failed to start program.",
"msg_zh": "启动程序失败。",
}
)
return program.stop()
@router.get("/api/v1/status", tags=["program"])
async def status(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
@router.get("/stop", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def stop():
return u_response(program.stop())
@router.get("/status", response_model=dict, dependencies=[Depends(get_current_user)])
async def program_status():
if not program.is_running:
return {"status": "stop"}
return {
"status": False,
"version": VERSION,
"first_run": program.first_run,
}
else:
return {"status": "running"}
return {
"status": True,
"version": VERSION,
"first_run": program.first_run,
}
@router.get("/api/v1/shutdown", tags=["program"])
async def shutdown_program(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
@router.get("/shutdown", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def shutdown_program():
program.stop()
logger.info("Shutting down program...")
os.kill(os.getpid(), signal.SIGINT)
return {"status": "ok"}
return JSONResponse(
status_code=200,
content={"msg_en": "Shutdown program successfully.", "msg_zh": "关闭程序成功。"},
)
# Check status
@router.get("/api/v1/check/downloader", tags=["check"])
async def check_downloader_status(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
@router.get("/check/downloader", tags=["check"], response_model=bool, dependencies=[Depends(get_current_user)])
async def check_downloader_status():
return program.check_downloader()
@router.get("/api/v1/check/rss", tags=["check"])
async def check_rss_status(current_user=Depends(get_current_user)):
if not current_user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid token"
)
return program.check_analyser()

View File

@@ -1,14 +1,14 @@
import re
import logging
import re
from fastapi.responses import Response
from fastapi import APIRouter
from fastapi.exceptions import HTTPException
from .download import router
from fastapi.responses import Response
from module.conf import settings
from module.network import RequestContent
router = APIRouter()
logger = logging.getLogger(__name__)

View File

@@ -0,0 +1,14 @@
from fastapi.responses import JSONResponse
from fastapi.exceptions import HTTPException
from module.models.response import ResponseModel
def u_response(response_model: ResponseModel):
return JSONResponse(
status_code=response_model.status_code,
content={
"msg_en": response_model.msg_en,
"msg_zh": response_model.msg_zh,
},
)

View File

@@ -0,0 +1,150 @@
from fastapi import APIRouter, Depends
from fastapi.responses import JSONResponse
from .response import u_response
from module.models import RSSItem, RSSUpdate, Torrent, APIResponse, Bangumi
from module.rss import RSSEngine, RSSAnalyser
from module.security.api import get_current_user, UNAUTHORIZED
from module.downloader import DownloadClient
from module.manager import SeasonCollector
router = APIRouter(prefix="/rss", tags=["rss"])
@router.get(path="", response_model=list[RSSItem], dependencies=[Depends(get_current_user)])
async def get_rss():
with RSSEngine() as engine:
return engine.rss.search_all()
@router.post(path="/add", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def add_rss(rss: RSSItem):
with RSSEngine() as engine:
result = engine.add_rss(rss.url, rss.name, rss.aggregate, rss.parser)
return u_response(result)
@router.post(path="/enable/many", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def enable_many_rss(rss_ids: list[int], ):
with RSSEngine() as engine:
result = engine.enable_list(rss_ids)
return u_response(result)
@router.delete(path="/delete/{rss_id}", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def delete_rss(rss_id: int):
with RSSEngine() as engine:
if engine.rss.delete(rss_id):
return JSONResponse(
status_code=200,
content={"msg_en": "Delete RSS successfully.", "msg_zh": "删除 RSS 成功。"},
)
else:
return JSONResponse(
status_code=406,
content={"msg_en": "Delete RSS failed.", "msg_zh": "删除 RSS 失败。"},
)
@router.post(path="/delete/many", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def delete_many_rss(rss_ids: list[int], ):
with RSSEngine() as engine:
result = engine.delete_list(rss_ids)
return u_response(result)
@router.patch(path="/disable/{rss_id}", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def disable_rss(rss_id: int):
with RSSEngine() as engine:
if engine.rss.disable(rss_id):
return JSONResponse(
status_code=200,
content={"msg_en": "Disable RSS successfully.", "msg_zh": "禁用 RSS 成功。"},
)
else:
return JSONResponse(
status_code=406,
content={"msg_en": "Disable RSS failed.", "msg_zh": "禁用 RSS 失败。"},
)
@router.post(path="/disable/many", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def disable_many_rss(rss_ids: list[int]):
with RSSEngine() as engine:
result = engine.disable_list(rss_ids)
return u_response(result)
@router.patch(path="/update/{rss_id}", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def update_rss(
rss_id: int, data: RSSUpdate, current_user=Depends(get_current_user)
):
if not current_user:
raise UNAUTHORIZED
with RSSEngine() as engine:
if engine.rss.update(rss_id, data):
return JSONResponse(
status_code=200,
content={"msg_en": "Update RSS successfully.", "msg_zh": "更新 RSS 成功。"},
)
else:
return JSONResponse(
status_code=406,
content={"msg_en": "Update RSS failed.", "msg_zh": "更新 RSS 失败。"},
)
@router.get(path="/refresh/all", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def refresh_all():
with RSSEngine() as engine, DownloadClient() as client:
engine.refresh_rss(client)
return JSONResponse(
status_code=200,
content={"msg_en": "Refresh all RSS successfully.", "msg_zh": "刷新 RSS 成功。"},
)
@router.get(path="/refresh/{rss_id}", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def refresh_rss(rss_id: int):
with RSSEngine() as engine, DownloadClient() as client:
engine.refresh_rss(client, rss_id)
return JSONResponse(
status_code=200,
content={"msg_en": "Refresh RSS successfully.", "msg_zh": "刷新 RSS 成功。"},
)
@router.get(path="/torrent/{rss_id}", response_model=list[Torrent], dependencies=[Depends(get_current_user)])
async def get_torrent(rss_id: int, ):
with RSSEngine() as engine:
return engine.get_rss_torrents(rss_id)
# Old API
analyser = RSSAnalyser()
@router.post("/analysis", response_model=Bangumi, dependencies=[Depends(get_current_user)])
async def analysis(rss: RSSItem):
data = analyser.link_to_data(rss)
if isinstance(data, Bangumi):
return data
else:
return u_response(data)
@router.post("/collect", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def download_collection(data: Bangumi):
with SeasonCollector() as collector:
resp = collector.collect_season(data, data.rss_link)
return u_response(resp)
@router.post("/subscribe", response_model=APIResponse, dependencies=[Depends(get_current_user)])
async def subscribe(data: Bangumi):
with SeasonCollector() as collector:
resp = collector.subscribe_season(data)
return u_response(resp)

View File

@@ -0,0 +1,31 @@
from fastapi import APIRouter, Query, Depends
from sse_starlette.sse import EventSourceResponse
from module.searcher import SearchTorrent, SEARCH_CONFIG
from module.security.api import get_current_user, UNAUTHORIZED
from module.models import Bangumi
router = APIRouter(prefix="/search", tags=["search"])
@router.get("/bangumi", response_model=list[Bangumi], dependencies=[Depends(get_current_user)])
async def search_torrents(
site: str = "mikan",
keywords: str = Query(None)
):
"""
Server Send Event for per Bangumi item
"""
if not keywords:
return []
keywords = keywords.split(" ")
with SearchTorrent() as st:
return EventSourceResponse(
content=st.analyse_keyword(keywords=keywords, site=site),
)
@router.get("/provider", response_model=list[str], dependencies=[Depends(get_current_user)])
async def search_provider():
return list(SEARCH_CONFIG.keys())

View File

@@ -1,38 +0,0 @@
from fastapi import Request
from fastapi.responses import HTMLResponse, RedirectResponse, FileResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from .proxy import router
from module.conf import VERSION
if VERSION != "DEV_VERSION":
router.mount("/assets", StaticFiles(directory="templates/assets"), name="assets")
templates = Jinja2Templates(directory="templates")
# Resource
@router.get("/favicon.svg", tags=["html"])
def favicon():
return FileResponse("templates/favicon.svg")
@router.get("/AutoBangumi.svg", tags=["html"])
def logo():
return FileResponse("templates/AutoBangumi.svg")
@router.get("/favicon-light.svg", tags=["html"])
def favicon_light():
return FileResponse("templates/favicon-light.svg")
# HTML Response
@router.get("/{full_path:path}", response_class=HTMLResponse, tags=["html"])
def index(request: Request):
context = {"request": request}
return templates.TemplateResponse("index.html", context)
else:
@router.get("/", status_code=302, tags=["html"])
def index():
return RedirectResponse("/docs")

View File

@@ -1,9 +1,13 @@
import os.path
import logging
import requests
from pathlib import Path
from module.conf import settings, VERSION
from module.downloader import DownloadClient
from module.network import RequestContent
from module.conf import settings
from module.models import Config
from module.update import version_check
logger = logging.getLogger(__name__)
class Checker:
@@ -24,30 +28,50 @@ class Checker:
else:
return False
@staticmethod
def check_downloader() -> bool:
with DownloadClient() as client:
if client.authed:
return True
else:
return False
@staticmethod
def check_torrents() -> bool:
with RequestContent() as req:
try:
torrents = req.get_torrents(settings.rss_link, retry=2)
if torrents:
return True
except AttributeError:
link = f"https://mikanani.me/RSS/MyBangumi?token={settings.rss_parser.token}"
if req.get_torrents(link):
return True
return False
@staticmethod
def check_first_run() -> bool:
if settings.dict() == Config().dict():
return True
else:
return False
@staticmethod
def check_version() -> bool:
return version_check()
@staticmethod
def check_database() -> bool:
db_path = Path("data/data.db")
if not db_path.exists():
return False
else:
return True
@staticmethod
def check_downloader() -> bool:
try:
url = f"http://{settings.downloader.host}" if "://" not in settings.downloader.host else f"{settings.downloader.host}"
response = requests.get(url, timeout=2)
if settings.downloader.type in response.text.lower():
with DownloadClient() as client:
if client.authed:
return True
else:
return False
else:
return False
except requests.exceptions.ReadTimeout:
logger.error("[Checker] Downloader connect timeout.")
return False
except requests.exceptions.ConnectionError:
logger.error("[Checker] Downloader connect failed.")
return False
except Exception as e:
logger.error(f"[Checker] Downloader connect failed: {e}")
return False
if __name__ == "__main__":
# print(Checker().check_downloader())
requests.get("http://162.200.20.1", timeout=2)

View File

@@ -1,8 +1,12 @@
from .log import setup_logger, LOG_PATH
from .config import VERSION, settings
from pathlib import Path
from .config import VERSION, settings
from .log import LOG_PATH, setup_logger
from .search_provider import SEARCH_CONFIG
TMDB_API = "32b19d6a05b512190a056fa4e747cbbc"
DATA_PATH = "data/data.db"
DATA_PATH = "sqlite:///data/data.db"
LEGACY_DATA_PATH = Path("data/data.json")
VERSION_PATH = Path("config/version.info")
PLATFORM = "Windows" if "\\" in settings.downloader.path else "Unix"

View File

@@ -1,31 +1,35 @@
import json
import os
import logging
import os
from pathlib import Path
from dotenv import load_dotenv
from .const import ENV_TO_ATTR
from module.models.config import Config
from .const import ENV_TO_ATTR
logger = logging.getLogger(__name__)
CONFIG_ROOT = Path("config")
try:
from module.__version__ import VERSION
if VERSION == "DEV_VERSION":
logger.info("Can't find version info, use DEV_VERSION instead")
CONFIG_PATH = "config/config_dev.json"
else:
CONFIG_PATH = f"config/config.json"
except ImportError:
logger.info("Can't find version info, use DEV_VERSION instead")
VERSION = "DEV_VERSION"
CONFIG_PATH = "config/config_dev.json"
CONFIG_PATH = (
CONFIG_ROOT / "config_dev.json"
if VERSION == "DEV_VERSION"
else CONFIG_ROOT / "config.json"
).resolve()
class Settings(Config):
def __init__(self):
super().__init__()
if os.path.exists(CONFIG_PATH):
if CONFIG_PATH.exists():
self.load()
self.save()
else:
@@ -36,7 +40,7 @@ class Settings(Config):
config = json.load(f)
config_obj = Config.parse_obj(config)
self.__dict__.update(config_obj.__dict__)
logger.info(f"Config loaded")
logger.info("Config loaded")
def save(self, config_dict: dict | None = None):
if not config_dict:
@@ -49,14 +53,6 @@ class Settings(Config):
self.__load_from_env()
self.save()
@property
def rss_link(self) -> str:
if "://" not in self.rss_parser.custom_url:
return f"https://{self.rss_parser.custom_url}/RSS/MyBangumi?token={self.rss_parser.token}"
return (
f"{self.rss_parser.custom_url}/RSS/MyBangumi?token={self.rss_parser.token}"
)
def __load_from_env(self):
config_dict = self.dict()
for key, section in ENV_TO_ATTR.items():
@@ -73,7 +69,7 @@ class Settings(Config):
config_dict[key][attr_name] = self.__val_from_env(env, attr)
config_obj = Config.parse_obj(config_dict)
self.__dict__.update(config_obj.__dict__)
logger.info(f"Config loaded from env")
logger.info("Config loaded from env")
@staticmethod
def __val_from_env(env: str, attr: tuple):
@@ -83,5 +79,9 @@ class Settings(Config):
else:
return os.environ[env]
@property
def group_rules(self):
return self.__dict__["group_rules"]
settings = Settings()

View File

@@ -1,6 +1,5 @@
# -*- encoding: utf-8 -*-
from urllib.parse import urlparse, parse_qs
from urllib.parse import parse_qs, urlparse
DEFAULT_SETTINGS = {
"program": {

View File

@@ -1,17 +1,19 @@
import os
import logging
from pathlib import Path
from .config import settings
LOG_PATH = "data/log.txt"
LOG_ROOT = Path("data")
LOG_PATH = LOG_ROOT / "log.txt"
def setup_logger(level: int = logging.INFO, reset: bool = False):
level = logging.DEBUG if settings.log.debug_enable else level
if not os.path.isdir("data"):
os.mkdir("data")
if reset and os.path.isfile(LOG_PATH):
os.remove(LOG_PATH)
LOG_ROOT.mkdir(exist_ok=True)
if reset and LOG_PATH.exists():
LOG_PATH.unlink(missing_ok=True)
logging.addLevelName(logging.DEBUG, "DEBUG:")
logging.addLevelName(logging.INFO, "INFO:")
logging.addLevelName(logging.WARNING, "WARNING:")

View File

@@ -0,0 +1,23 @@
from pathlib import Path
from module.utils import json_config
DEFAULT_PROVIDER = {
"mikan": "https://mikanani.me/RSS/Search?searchstr=%s",
"nyaa": "https://nyaa.si/?page=rss&q=%s&c=0_0&f=0",
"dmhy": "http://dmhy.org/topics/rss/rss.xml?keyword=%s"
}
PROVIDER_PATH = Path("config/search_provider.json")
def load_provider():
if PROVIDER_PATH.exists():
return json_config.load(PROVIDER_PATH)
else:
json_config.save(PROVIDER_PATH, DEFAULT_PROVIDER)
return DEFAULT_PROVIDER
SEARCH_CONFIG = load_provider()

View File

@@ -1,20 +1,30 @@
import logging
from .sub_thread import RenameThread, RSSThread
from .rss_feed import add_rss_feed
from module.conf import VERSION, settings
from module.update import data_migration, from_30_to_31, start_up, first_run
from module.models import ResponseModel
from module.conf import settings, VERSION
from module.update import data_migration
from .sub_thread import RenameThread, RSSThread
logger = logging.getLogger(__name__)
figlet = r"""
_ ____ _
/\ | | | _ \ (_)
/ \ _ _| |_ ___ | |_) | __ _ _ __ __ _ _ _ _ __ ___ _
/ /\ \| | | | __/ _ \| _ < / _` | '_ \ / _` | | | | '_ ` _ \| |
/ ____ \ |_| | || (_) | |_) | (_| | | | | (_| | |_| | | | | | | |
/_/ \_\__,_|\__\___/|____/ \__,_|_| |_|\__, |\__,_|_| |_| |_|_|
__/ |
|___/
"""
class Program(RenameThread, RSSThread):
@staticmethod
def __start_info():
with open("icon", "r") as f:
for line in f.readlines():
logger.info(line.strip("\n"))
for line in figlet.splitlines():
logger.info(line.strip("\n"))
logger.info(
f"Version {VERSION} Author: EstrellaXD Twitter: https://twitter.com/Estrella_Pan"
)
@@ -23,42 +33,78 @@ class Program(RenameThread, RSSThread):
def startup(self):
self.__start_info()
if self.first_run:
logger.info("First run detected, please configure the program in webui.")
if not self.database:
first_run()
logger.info("[Core] No db file exists, create database file.")
return {"status": "First run detected."}
if self.legacy_data:
logger.info(
"Legacy data detected, starting data migration, please wait patiently."
"[Core] Legacy data detected, starting data migration, please wait patiently."
)
data_migration()
elif self.version_update:
# Update database
from_30_to_31()
logger.info("[Core] Database updated.")
self.start()
def start(self):
if self.first_run:
return {"status": "Not ready to start."}
self.stop_event.clear()
settings.load()
if self.downloader_status:
if self.enable_renamer:
self.rename_start()
if self.enable_rss:
add_rss_feed()
self.rss_start()
logger.info("Program running.")
return {"status": "Program started."}
return ResponseModel(
status=True,
status_code=200,
msg_en="Program started.",
msg_zh="程序启动成功。",
)
else:
return {"status": "Can't connect to downloader. Program not paused."}
self.stop_event.set()
logger.warning("Program failed to start.")
return ResponseModel(
status=False,
status_code=406,
msg_en="Program failed to start.",
msg_zh="程序启动失败。",
)
def stop(self):
if self.is_running:
self.stop_event.set()
self.rename_stop()
self.rss_stop()
return {"status": "Program stopped."}
return ResponseModel(
status=True,
status_code=200,
msg_en="Program stopped.",
msg_zh="程序停止成功。",
)
else:
return {"status": "Program is not running."}
return ResponseModel(
status=False,
status_code=406,
msg_en="Program is not running.",
msg_zh="程序未运行。",
)
def restart(self):
self.stop()
self.start()
return {"status": "Program restarted."}
return ResponseModel(
status=True,
status_code=200,
msg_en="Program restarted.",
msg_zh="程序重启成功。",
)
def update_database(self):
if not self.version_update:
return {"status": "No update found."}
else:
start_up()
return {"status": "Database updated."}

View File

@@ -1,34 +0,0 @@
import logging
from module.downloader import DownloadClient
from module.conf import settings
logger = logging.getLogger(__name__)
def add_rss_feed():
with DownloadClient() as client:
# Check Feed if exists
add = True
remove = False
feeds = client.get_rss_feed()
for item_path, value in feeds.items():
if value.url == settings.rss_link:
add = False
break
elif item_path == "Mikan_RSS":
remove = True
if remove:
client.remove_rss_feed("Mikan_RSS")
logger.info("Remove Old RSS Feed: Mikan_RSS")
# Add Feed
if add:
client.add_rss_feed(settings.rss_link)
logger.info(f"Add RSS Feed: {settings.rss_link}")
if __name__ == "__main__":
from module.conf import setup_logger
setup_logger()
add_rss_feed()

View File

@@ -1,8 +1,8 @@
import os.path
import threading
import asyncio
import threading
from module.checker import Checker
from module.conf import LEGACY_DATA_PATH
class ProgramStatus(Checker):
@@ -31,12 +31,6 @@ class ProgramStatus(Checker):
self._downloader_status = self.check_downloader()
return self._downloader_status
@property
def torrents_status(self):
if not self._torrents_status:
self._torrents_status = self.check_torrents()
return self._torrents_status
@property
def enable_rss(self):
return self.check_analyser()
@@ -51,4 +45,12 @@ class ProgramStatus(Checker):
@property
def legacy_data(self):
return os.path.exists("data/data.json")
return LEGACY_DATA_PATH.exists()
@property
def version_update(self):
return not self.check_version()
@property
def database(self):
return self.check_database()

View File

@@ -1,14 +1,13 @@
import threading
import time
from .status import ProgramStatus
from module.rss import analyser
from module.conf import settings
from module.downloader import DownloadClient
from module.manager import Renamer, eps_complete
from module.notification import PostNotification
from module.database import BangumiDatabase
from module.conf import settings
from module.rss import RSSAnalyser, RSSEngine
from .status import ProgramStatus
class RSSThread(ProgramStatus):
@@ -17,21 +16,17 @@ class RSSThread(ProgramStatus):
self._rss_thread = threading.Thread(
target=self.rss_loop,
)
self.analyser = RSSAnalyser()
def rss_loop(self):
with DownloadClient() as client:
client.init_downloader()
while not self.stop_event.is_set():
# Analyse RSS
with BangumiDatabase() as db:
new_data = analyser.rss_to_data(rss_link=settings.rss_link, database=db)
if new_data:
db.insert_list(new_data)
bangumi_list = db.not_added()
if bangumi_list:
with DownloadClient() as client:
client.set_rules(bangumi_list)
db.update_list(bangumi_list)
with DownloadClient() as client, RSSEngine() as engine:
# Analyse RSS
rss_list = engine.rss.search_aggregate()
for rss in rss_list:
self.analyser.rss_to_data(rss, engine)
# Run RSS Engine
engine.refresh_rss(client)
if settings.bangumi_manage.eps_complete:
eps_complete()
self.stop_event.wait(settings.program.rss_time)

View File

@@ -1 +1,2 @@
from .bangumi import BangumiDatabase
from .combine import Database
from .engine import engine

View File

@@ -1,155 +1,110 @@
import logging
from module.database.connector import DataConnector
from module.models import BangumiData
from module.ab_decorator import locked
from sqlmodel import Session, select, delete, or_, and_
from sqlalchemy.sql import func
from typing import Optional
from module.models import Bangumi, BangumiUpdate
logger = logging.getLogger(__name__)
class BangumiDatabase(DataConnector):
def __init__(self):
super().__init__()
self.__table_name = "bangumi"
class BangumiDatabase:
def __init__(self, session: Session):
self.session = session
def update_table(self):
db_data = self.__data_to_db(BangumiData())
self._update_table(self.__table_name, db_data)
def add(self, data: Bangumi):
self.session.add(data)
self.session.commit()
logger.debug(f"[Database] Insert {data.official_title} into database.")
@staticmethod
def __data_to_db(data: BangumiData) -> dict:
db_data = data.dict()
for key, value in db_data.items():
if isinstance(value, bool):
db_data[key] = int(value)
elif isinstance(value, list):
db_data[key] = ",".join(value)
return db_data
def add_all(self, datas: list[Bangumi]):
self.session.add_all(datas)
self.session.commit()
logger.debug(f"[Database] Insert {len(datas)} bangumi into database.")
@staticmethod
def __db_to_data(db_data: dict) -> BangumiData:
for key, item in db_data.items():
if isinstance(item, int):
if key not in ["id", "offset", "season", "year"]:
db_data[key] = bool(item)
elif key in ["filter", "rss_link"]:
db_data[key] = item.split(",")
return BangumiData(**db_data)
def __fetch_data(self) -> list[BangumiData]:
values = self._cursor.fetchall()
if values is None:
return []
keys = [x[0] for x in self._cursor.description]
dict_data = [dict(zip(keys, value)) for value in values]
return [self.__db_to_data(x) for x in dict_data]
def insert(self, data: BangumiData):
if self.__check_exist(data):
self.update_one(data)
def update(self, data: Bangumi | BangumiUpdate, _id: int = None) -> bool:
if _id and isinstance(data, BangumiUpdate):
db_data = self.session.get(Bangumi, _id)
elif isinstance(data, Bangumi):
db_data = self.session.get(Bangumi, data.id)
else:
db_data = self.__data_to_db(data)
db_data["id"] = self.gen_id()
self._insert(db_data=db_data, table_name=self.__table_name)
logger.debug(f"[Database] Insert {data.official_title} into database.")
return False
if not db_data:
return False
bangumi_data = data.dict(exclude_unset=True)
for key, value in bangumi_data.items():
setattr(db_data, key, value)
self.session.add(db_data)
self.session.commit()
self.session.refresh(db_data)
logger.debug(f"[Database] Update {data.official_title}")
return True
def insert_list(self, data: list[BangumiData]):
_id = self.gen_id()
for i, item in enumerate(data):
item.id = _id + i
data_list = [self.__data_to_db(x) for x in data]
self._insert_list(data_list=data_list, table_name=self.__table_name)
logger.debug(f"[Database] Insert {len(data)} bangumi into database.")
def update_all(self, datas: list[Bangumi]):
self.session.add_all(datas)
self.session.commit()
logger.debug(f"[Database] Update {len(datas)} bangumi.")
def update_one(self, data: BangumiData) -> bool:
db_data = self.__data_to_db(data)
return self._update(db_data=db_data, table_name=self.__table_name)
def update_list(self, data: list[BangumiData]):
data_list = [self.__data_to_db(x) for x in data]
self._update_list(data_list=data_list, table_name=self.__table_name)
@locked
def update_rss(self, title_raw, rss_set: str):
# Update rss and added
self._cursor.execute(
"""
UPDATE bangumi
SET rss_link = :rss_link, added = 0
WHERE title_raw = :title_raw
""",
{"rss_link": rss_set, "title_raw": title_raw},
)
self._conn.commit()
statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
bangumi = self.session.exec(statement).first()
bangumi.rss_link = rss_set
bangumi.added = False
self.session.add(bangumi)
self.session.commit()
self.session.refresh(bangumi)
logger.debug(f"[Database] Update {title_raw} rss_link to {rss_set}.")
def update_poster(self, title_raw, poster_link: str):
self._cursor.execute(
"""
UPDATE bangumi
SET poster_link = :poster_link
WHERE title_raw = :title_raw
""",
{"poster_link": poster_link, "title_raw": title_raw},
)
self._conn.commit()
statement = select(Bangumi).where(Bangumi.title_raw == title_raw)
bangumi = self.session.exec(statement).first()
bangumi.poster_link = poster_link
self.session.add(bangumi)
self.session.commit()
self.session.refresh(bangumi)
logger.debug(f"[Database] Update {title_raw} poster_link to {poster_link}.")
def delete_one(self, _id: int) -> bool:
self._cursor.execute(
"""
DELETE FROM bangumi WHERE id = :id
""",
{"id": _id},
)
self._conn.commit()
def delete_one(self, _id: int):
statement = select(Bangumi).where(Bangumi.id == _id)
bangumi = self.session.exec(statement).first()
self.session.delete(bangumi)
self.session.commit()
logger.debug(f"[Database] Delete bangumi id: {_id}.")
return self._cursor.rowcount == 1
def delete_all(self):
self._delete_all(self.__table_name)
statement = delete(Bangumi)
self.session.exec(statement)
self.session.commit()
def search_all(self) -> list[BangumiData]:
dict_data = self._search_datas(self.__table_name)
return [self.__db_to_data(x) for x in dict_data]
def search_all(self) -> list[Bangumi]:
statement = select(Bangumi)
return self.session.exec(statement).all()
def search_id(self, _id: int) -> BangumiData | None:
condition = {"id": _id}
dict_data = self._search_data(table_name=self.__table_name, condition=condition)
if dict_data is None:
def search_id(self, _id: int) -> Optional[Bangumi]:
statement = select(Bangumi).where(Bangumi.id == _id)
bangumi = self.session.exec(statement).first()
if bangumi is None:
logger.warning(f"[Database] Cannot find bangumi id: {_id}.")
return None
return self.__db_to_data(dict_data)
def search_official_title(self, official_title: str) -> BangumiData | None:
dict_data = self._search_data(
table_name=self.__table_name, condition={"official_title": official_title}
)
if dict_data is None:
return None
return self.__db_to_data(dict_data)
else:
logger.debug(f"[Database] Find bangumi id: {_id}.")
return self.session.exec(statement).first()
def match_poster(self, bangumi_name: str) -> str:
data = self._cursor.execute(
"""
SELECT poster_link FROM bangumi
WHERE INSTR(:official_title, official_title) > 0
""",
{"official_title": bangumi_name},
).fetchone()
if not data:
# Use like to match
statement = select(Bangumi).where(
func.instr(bangumi_name, Bangumi.official_title) > 0
)
data = self.session.exec(statement).first()
if data:
return data.poster_link
else:
return ""
poster_link = data[0]
if not poster_link:
return ""
return poster_link
def match_list(self, torrent_list: list, rss_link: str) -> list:
# Match title_raw in database
keys = ["title_raw", "rss_link", "poster_link"]
match_datas = self._search_datas(
table_name=self.__table_name,
keys=keys,
)
match_datas = self.search_all()
if not match_datas:
return torrent_list
# Match title
@@ -157,71 +112,51 @@ class BangumiDatabase(DataConnector):
while i < len(torrent_list):
torrent = torrent_list[i]
for match_data in match_datas:
if match_data.get("title_raw") in torrent.name:
if rss_link not in match_data.get("rss_link"):
match_data["rss_link"] += f",{rss_link}"
self.update_rss(
match_data.get("title_raw"), match_data.get("rss_link")
)
if not match_data.get("poster_link"):
self.update_poster(
match_data.get("title_raw"), torrent.poster_link
)
if match_data.title_raw in torrent.name:
if rss_link not in match_data.rss_link:
match_data.rss_link += f",{rss_link}"
self.update_rss(match_data.title_raw, match_data.rss_link)
# if not match_data.poster_link:
# self.update_poster(match_data.title_raw, torrent.poster_link)
torrent_list.pop(i)
break
else:
i += 1
return torrent_list
def not_complete(self) -> list[BangumiData]:
def match_torrent(self, torrent_name: str) -> Optional[Bangumi]:
statement = select(Bangumi).where(
and_(
func.instr(torrent_name, Bangumi.title_raw) > 0,
Bangumi.deleted == False,
)
)
return self.session.exec(statement).first()
def not_complete(self) -> list[Bangumi]:
# Find eps_complete = False
condition = {"eps_collect": 0}
dict_data = self._search_datas(
table_name=self.__table_name,
condition=condition,
condition = select(Bangumi).where(Bangumi.eps_collect == False)
datas = self.session.exec(condition).all()
return datas
def not_added(self) -> list[Bangumi]:
conditions = select(Bangumi).where(
or_(
Bangumi.added == 0, Bangumi.rule_name is None, Bangumi.save_path is None
)
)
return [self.__db_to_data(x) for x in dict_data]
datas = self.session.exec(conditions).all()
return datas
def not_added(self) -> list[BangumiData]:
self._cursor.execute(
"""
SELECT * FROM bangumi
WHERE added = 0 OR rule_name IS NULL OR save_path IS NULL
"""
)
return self.__fetch_data()
def disable_rule(self, _id: int):
statement = select(Bangumi).where(Bangumi.id == _id)
bangumi = self.session.exec(statement).first()
bangumi.deleted = True
self.session.add(bangumi)
self.session.commit()
self.session.refresh(bangumi)
logger.debug(f"[Database] Disable rule {bangumi.title_raw}.")
def gen_id(self) -> int:
self._cursor.execute(
"""
SELECT id FROM bangumi ORDER BY id DESC LIMIT 1
"""
)
data = self._cursor.fetchone()
if data is None:
return 1
return data[0] + 1
def __check_exist(self, data: BangumiData):
self._cursor.execute(
"""
SELECT * FROM bangumi WHERE official_title = :official_title
""",
{"official_title": data.official_title},
)
values = self._cursor.fetchone()
if values is None:
return False
return True
def __check_list_exist(self, data_list: list[BangumiData]):
for data in data_list:
if self.__check_exist(data):
return True
return False
if __name__ == "__main__":
with BangumiDatabase() as db:
name = "久保同学不放过我(2023)"
print(db.match_poster(name))
def search_rss(self, rss_link: str) -> list[Bangumi]:
statement = select(Bangumi).where(func.instr(rss_link, Bangumi.rss_link) > 0)
return self.session.exec(statement).all()

View File

@@ -0,0 +1,44 @@
from sqlmodel import Session, SQLModel
from .rss import RSSDatabase
from .torrent import TorrentDatabase
from .bangumi import BangumiDatabase
from .user import UserDatabase
from .engine import engine as e
from module.models import User, Bangumi
class Database(Session):
def __init__(self, engine=e):
self.engine = engine
super().__init__(engine)
self.rss = RSSDatabase(self)
self.torrent = TorrentDatabase(self)
self.bangumi = BangumiDatabase(self)
self.user = UserDatabase(self)
def create_table(self):
SQLModel.metadata.create_all(self.engine)
def drop_table(self):
SQLModel.metadata.drop_all(self.engine)
def migrate(self):
# Run migration online
bangumi_data = self.bangumi.search_all()
user_data = self.exec("SELECT * FROM user").all()
readd_bangumi = []
for bangumi in bangumi_data:
dict_data = bangumi.dict()
del dict_data["id"]
readd_bangumi.append(Bangumi(**dict_data))
self.drop_table()
self.create_table()
self.commit()
bangumi_data = self.bangumi.search_all()
self.bangumi.add_all(readd_bangumi)
self.add(User(**user_data[0]))
self.commit()

View File

@@ -1,175 +0,0 @@
import os
import sqlite3
import logging
from module.conf import DATA_PATH
logger = logging.getLogger(__name__)
class DataConnector:
def __init__(self):
# Create folder if not exists
if not os.path.exists(os.path.dirname(DATA_PATH)):
os.makedirs(os.path.dirname(DATA_PATH))
self._conn = sqlite3.connect(DATA_PATH)
self._cursor = self._conn.cursor()
def _update_table(self, table_name: str, db_data: dict):
columns = ", ".join(
[
f"{key} {self.__python_to_sqlite_type(value)}"
for key, value in db_data.items()
]
)
create_table_sql = f"CREATE TABLE IF NOT EXISTS {table_name} ({columns});"
self._cursor.execute(create_table_sql)
self._cursor.execute(f"PRAGMA table_info({table_name})")
existing_columns = {
column_info[1]: column_info for column_info in self._cursor.fetchall()
}
for key, value in db_data.items():
if key not in existing_columns:
insert_column = self.__python_to_sqlite_type(value)
if value is None:
value = "NULL"
add_column_sql = f"ALTER TABLE {table_name} ADD COLUMN {key} {insert_column} DEFAULT {value};"
self._cursor.execute(add_column_sql)
self._conn.commit()
logger.debug(f"Create / Update table {table_name}.")
def _insert(self, table_name: str, db_data: dict):
columns = ", ".join(db_data.keys())
values = ", ".join([f":{key}" for key in db_data.keys()])
self._cursor.execute(
f"INSERT INTO {table_name} ({columns}) VALUES ({values})", db_data
)
self._conn.commit()
def _insert_list(self, table_name: str, data_list: list[dict]):
columns = ", ".join(data_list[0].keys())
values = ", ".join([f":{key}" for key in data_list[0].keys()])
self._cursor.executemany(
f"INSERT INTO {table_name} ({columns}) VALUES ({values})", data_list
)
self._conn.commit()
def _select(self, keys: list[str], table_name: str, condition: str = None) -> dict:
if condition is None:
self._cursor.execute(f"SELECT {', '.join(keys)} FROM {table_name}")
else:
self._cursor.execute(
f"SELECT {', '.join(keys)} FROM {table_name} WHERE {condition}"
)
return dict(zip(keys, self._cursor.fetchone()))
def _update(self, table_name: str, db_data: dict):
_id = db_data.get("id")
if _id is None:
raise ValueError("No _id in db_data.")
set_sql = ", ".join([f"{key} = :{key}" for key in db_data.keys()])
self._cursor.execute(
f"UPDATE {table_name} SET {set_sql} WHERE id = {_id}", db_data
)
self._conn.commit()
return self._cursor.rowcount == 1
def _update_list(self, table_name: str, data_list: list[dict]):
if len(data_list) == 0:
return
set_sql = ", ".join(
[f"{key} = :{key}" for key in data_list[0].keys() if key != "id"]
)
self._cursor.executemany(
f"UPDATE {table_name} SET {set_sql} WHERE id = :id", data_list
)
self._conn.commit()
def _update_section(self, table_name: str, location: dict, update_dict: dict):
set_sql = ", ".join([f"{key} = :{key}" for key in update_dict.keys()])
sql_loc = f"{location['key']} = {location['value']}"
self._cursor.execute(
f"UPDATE {table_name} SET {set_sql} WHERE {sql_loc}", update_dict
)
self._conn.commit()
def _delete_all(self, table_name: str):
self._cursor.execute(f"DELETE FROM {table_name}")
self._conn.commit()
def _delete(self, table_name: str, condition: dict):
condition_sql = " AND ".join([f"{key} = :{key}" for key in condition.keys()])
self._cursor.execute(
f"DELETE FROM {table_name} WHERE {condition_sql}", condition
)
self._conn.commit()
def _search(
self, table_name: str, keys: list[str] | None = None, condition: dict = None
):
if keys is None:
select_sql = "*"
else:
select_sql = ", ".join(keys)
if condition is None:
self._cursor.execute(f"SELECT {select_sql} FROM {table_name}")
else:
custom_condition = condition.pop("_custom_condition", None)
condition_sql = " AND ".join(
[f"{key} = :{key}" for key in condition.keys()]
) + (f" AND {custom_condition}" if custom_condition else "")
self._cursor.execute(
f"SELECT {select_sql} FROM {table_name} WHERE {condition_sql}",
condition,
)
def _search_data(
self, table_name: str, keys: list[str] | None = None, condition: dict = None
) -> dict:
if keys is None:
keys = self.__get_table_columns(table_name)
self._search(table_name, keys, condition)
return dict(zip(keys, self._cursor.fetchone()))
def _search_datas(
self, table_name: str, keys: list[str] | None = None, condition: dict = None
) -> list[dict]:
if keys is None:
keys = self.__get_table_columns(table_name)
self._search(table_name, keys, condition)
return [dict(zip(keys, row)) for row in self._cursor.fetchall()]
def _table_exists(self, table_name: str) -> bool:
self._cursor.execute(
f"SELECT name FROM sqlite_master WHERE type='table' AND name=?;",
(table_name,),
)
return len(self._cursor.fetchall()) == 1
def __get_table_columns(self, table_name: str) -> list[str]:
self._cursor.execute(f"PRAGMA table_info({table_name})")
return [column_info[1] for column_info in self._cursor.fetchall()]
@staticmethod
def __python_to_sqlite_type(value) -> str:
if isinstance(value, int):
return "INTEGER NOT NULL"
elif isinstance(value, float):
return "REAL NOT NULL"
elif isinstance(value, str):
return "TEXT NOT NULL"
elif isinstance(value, bool):
return "INTEGER NOT NULL"
elif isinstance(value, list):
return "TEXT NOT NULL"
elif value is None:
return "TEXT"
else:
raise ValueError(f"Unsupported data type: {type(value)}")
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._conn.close()

View File

@@ -0,0 +1,7 @@
from sqlmodel import create_engine, Session
from module.conf import DATA_PATH
engine = create_engine(DATA_PATH)
db_session = Session(engine)

View File

@@ -0,0 +1,97 @@
import logging
from sqlmodel import Session, select, delete, and_
from module.models import RSSItem, RSSUpdate
logger = logging.getLogger(__name__)
class RSSDatabase:
def __init__(self, session: Session):
self.session = session
def add(self, data: RSSItem):
# Check if exists
statement = select(RSSItem).where(RSSItem.url == data.url)
db_data = self.session.exec(statement).first()
if db_data:
logger.debug(f"RSS Item {data.url} already exists.")
return False
else:
logger.debug(f"RSS Item {data.url} not exists, adding...")
self.session.add(data)
self.session.commit()
self.session.refresh(data)
return True
def add_all(self, data: list[RSSItem]):
for item in data:
self.add(item)
def update(self, _id: int, data: RSSUpdate):
# Check if exists
statement = select(RSSItem).where(RSSItem.id == _id)
db_data = self.session.exec(statement).first()
if not db_data:
return False
# Update
dict_data = data.dict(exclude_unset=True)
for key, value in dict_data.items():
setattr(db_data, key, value)
self.session.add(db_data)
self.session.commit()
self.session.refresh(db_data)
return True
def enable(self, _id: int):
statement = select(RSSItem).where(RSSItem.id == _id)
db_data = self.session.exec(statement).first()
if not db_data:
return False
db_data.enabled = True
self.session.add(db_data)
self.session.commit()
self.session.refresh(db_data)
return True
def disable(self, _id: int):
statement = select(RSSItem).where(RSSItem.id == _id)
db_data = self.session.exec(statement).first()
if not db_data:
return False
db_data.enabled = False
self.session.add(db_data)
self.session.commit()
self.session.refresh(db_data)
return True
def search_id(self, _id: int) -> RSSItem:
return self.session.get(RSSItem, _id)
def search_all(self) -> list[RSSItem]:
return self.session.exec(select(RSSItem)).all()
def search_active(self) -> list[RSSItem]:
return self.session.exec(select(RSSItem).where(RSSItem.enabled)).all()
def search_aggregate(self) -> list[RSSItem]:
return self.session.exec(
select(RSSItem).where(and_(RSSItem.aggregate, RSSItem.enabled))
).all()
def delete(self, _id: int) -> bool:
condition = delete(RSSItem).where(RSSItem.id == _id)
try:
self.session.exec(condition)
self.session.commit()
return True
except Exception as e:
logger.error("Delete RSS Item failed.")
return False
def delete_all(self):
condition = delete(RSSItem)
self.session.exec(condition)
self.session.commit()

View File

@@ -1,47 +1,57 @@
import logging
from .connector import DataConnector
from sqlmodel import Session, select
from module.models import Torrent
logger = logging.getLogger(__name__)
class TorrentDatabase(DataConnector):
def update_table(self):
table_name = "torrent"
db_data = self.__data_to_db()
self._update_table(table_name, db_data)
class TorrentDatabase:
def __init__(self, session: Session):
self.session = session
def __data_to_db(self, data: SaveTorrent):
db_data = data.dict()
for key, value in db_data.items():
if isinstance(value, bool):
db_data[key] = int(value)
elif isinstance(value, list):
db_data[key] = ",".join(value)
return db_data
def add(self, data: Torrent):
self.session.add(data)
self.session.commit()
self.session.refresh(data)
logger.debug(f"Insert {data.name} in database.")
def __db_to_data(self, db_data: dict):
for key, item in db_data.items():
if isinstance(item, int):
if key not in ["id", "offset", "season", "year"]:
db_data[key] = bool(item)
elif key in ["filter", "rss_link"]:
db_data[key] = item.split(",")
return SaveTorrent(**db_data)
def add_all(self, datas: list[Torrent]):
self.session.add_all(datas)
self.session.commit()
logger.debug(f"Insert {len(datas)} torrents in database.")
def if_downloaded(self, torrent_url: str, torrent_name: str) -> bool:
self._cursor.execute(
"SELECT * FROM torrent WHERE torrent_url = ? OR torrent_name = ?",
(torrent_url, torrent_name),
)
return bool(self._cursor.fetchone())
def update(self, data: Torrent):
self.session.add(data)
self.session.commit()
self.session.refresh(data)
logger.debug(f"Update {data.name} in database.")
def insert(self, data: SaveTorrent):
db_data = self.__data_to_db(data)
columns = ", ".join(db_data.keys())
values = ", ".join([f":{key}" for key in db_data.keys()])
self._cursor.execute(
f"INSERT INTO torrent ({columns}) VALUES ({values})", db_data
)
logger.debug(f"Add {data.torrent_name} into database.")
self._conn.commit()
def update_all(self, datas: list[Torrent]):
self.session.add_all(datas)
self.session.commit()
def update_one_user(self, data: Torrent):
self.session.add(data)
self.session.commit()
self.session.refresh(data)
logger.debug(f"Update {data.name} in database.")
def search(self, _id: int) -> Torrent:
return self.session.exec(select(Torrent).where(Torrent.id == _id)).first()
def search_all(self) -> list[Torrent]:
return self.session.exec(select(Torrent)).all()
def search_rss(self, rss_id: int) -> list[Torrent]:
return self.session.exec(select(Torrent).where(Torrent.rss_id == rss_id)).all()
def check_new(self, torrents_list: list[Torrent]) -> list[Torrent]:
new_torrents = []
old_torrents = self.search_all()
old_urls = [t.url for t in old_torrents]
for torrent in torrents_list:
if torrent.url not in old_urls:
new_torrents.append(torrent)
return new_torrents

View File

@@ -2,73 +2,101 @@ import logging
from fastapi import HTTPException
from module.database.connector import DataConnector
from module.models.user import User, UserUpdate, UserLogin
from module.models import ResponseModel
from module.security.jwt import get_password_hash, verify_password
from module.models.user import User
from sqlmodel import Session, select
logger = logging.getLogger(__name__)
class AuthDB(DataConnector):
def __init__(self):
super().__init__()
self.__table_name = "user"
if not self._table_exists(self.__table_name):
self.__update_table()
def __update_table(self):
db_data = self.__data_to_db(User())
self._update_table(self.__table_name, db_data)
self._insert(self.__table_name, db_data)
@staticmethod
def __data_to_db(data: User) -> dict:
db_data = data.dict()
db_data["password"] = get_password_hash(db_data["password"])
return db_data
@staticmethod
def __db_to_data(db_data: dict) -> User:
return User(**db_data)
class UserDatabase:
def __init__(self, session: Session):
self.session = session
def get_user(self, username):
self._cursor.execute(
f"SELECT * FROM {self.__table_name} WHERE username=?", (username,)
)
result = self._cursor.fetchone()
statement = select(User).where(User.username == username)
result = self.session.exec(statement).first()
if not result:
return None
db_data = dict(zip([x[0] for x in self._cursor.description], result))
return self.__db_to_data(db_data)
raise HTTPException(status_code=404, detail="User not found")
return result
def auth_user(self, username, password) -> bool:
self._cursor.execute(
f"SELECT username, password FROM {self.__table_name} WHERE username=?",
(username,),
)
result = self._cursor.fetchone()
def auth_user(self, user: User):
statement = select(User).where(User.username == user.username)
result = self.session.exec(statement).first()
if not result:
raise HTTPException(status_code=401, detail="User not found")
if not verify_password(password, result[1]):
raise HTTPException(status_code=401, detail="Password error")
return True
return ResponseModel(
status_code=401,
status=False,
msg_en="User not found",
msg_zh="用户不存在"
)
if not verify_password(user.password, result.password):
return ResponseModel(
status_code=401,
status=False,
msg_en="Incorrect password",
msg_zh="密码错误"
)
return ResponseModel(
status_code=200,
status=True,
msg_en="Login successfully",
msg_zh="登录成功"
)
def update_user(self, username, update_user: User):
def update_user(self, username, update_user: UserUpdate):
# Update username and password
new_username = update_user.username
new_password = update_user.password
self._cursor.execute(
f"""
UPDATE {self.__table_name}
SET username = '{new_username}', password = '{get_password_hash(new_password)}'
WHERE username = '{username}'
statement = select(User).where(User.username == username)
result = self.session.exec(statement).first()
if not result:
raise HTTPException(status_code=404, detail="User not found")
if update_user.username:
result.username = update_user.username
if update_user.password:
result.password = get_password_hash(update_user.password)
self.session.add(result)
self.session.commit()
return result
def merge_old_user(self):
# get old data
statement = """
SELECT * FROM user
"""
result = self.session.exec(statement).first()
if not result:
return
# add new data
user = User(username=result.username, password=result.password)
# Drop old table
statement = """
DROP TABLE user
"""
self.session.exec(statement)
# Create new table
statement = """
CREATE TABLE user (
id INTEGER NOT NULL PRIMARY KEY,
username VARCHAR NOT NULL,
password VARCHAR NOT NULL
)
self._conn.commit()
"""
self.session.exec(statement)
self.session.add(user)
self.session.commit()
if __name__ == "__main__":
with AuthDB() as db:
# db.update_user(UserLogin(username="admin", password="adminadmin"), User(username="admin", password="cica1234"))
db.update_user("admin", User(username="estrella", password="cica1234"))
def add_default_user(self):
# Check if user exists
statement = select(User)
try:
result = self.session.exec(statement).all()
except Exception as e:
self.merge_old_user()
result = self.session.exec(statement).all()
if len(result) != 0:
return
# Add default user
user = User(username="admin", password=get_password_hash("adminadmin"))
self.session.add(user)
self.session.commit()

View File

@@ -1,13 +1,10 @@
import logging
import time
from aria2p import Client, ClientException, API
from aria2p import API, Client, ClientException
from module.conf import settings
from module.downloader.exceptions import ConflictError
logger = logging.getLogger(__name__)

View File

@@ -3,9 +3,9 @@ import time
from qbittorrentapi import Client, LoginFailed
from qbittorrentapi.exceptions import (
APIConnectionError,
Conflict409Error,
Forbidden403Error,
APIConnectionError,
)
from module.ab_decorator import qb_connect_failed_wait
@@ -39,12 +39,12 @@ class QbDownloader:
time.sleep(5)
times += 1
except Forbidden403Error:
logger.error(f"Login refused by qBittorrent Server")
logger.info(f"Please release the IP in qBittorrent Server")
logger.error("Login refused by qBittorrent Server")
logger.info("Please release the IP in qBittorrent Server")
break
except APIConnectionError:
logger.error(f"Cannot connect to qBittorrent Server")
logger.info(f"Please check the IP and port in WebUI settings")
logger.error("Cannot connect to qBittorrent Server")
logger.info("Please check the IP and port in WebUI settings")
time.sleep(10)
times += 1
except Exception as e:
@@ -82,10 +82,10 @@ class QbDownloader:
status_filter=status_filter, category=category, tag=tag
)
def torrents_add(self, urls, save_path, category, torrent_files=None):
def add_torrents(self, torrent_urls, torrent_files, save_path, category):
resp = self._client.torrents_add(
is_paused=False,
urls=urls,
urls=torrent_urls,
torrent_files=torrent_files,
save_path=save_path,
category=category,

View File

@@ -1,11 +1,11 @@
import logging
import re
from module.conf import settings
from module.models import Bangumi, Torrent
from module.network import RequestContent
from .path import TorrentPath
from module.models import BangumiData
from module.conf import settings
logger = logging.getLogger(__name__)
@@ -56,25 +56,25 @@ class DownloadClient(TorrentPath):
def init_downloader(self):
prefs = {
"rss_auto_downloading_enabled": True,
"rss_max_articles_per_feed": 5000,
"rss_max_articles_per_feed": 500,
"rss_processing_enabled": True,
"rss_refresh_interval": 30,
}
self.client.prefs_init(prefs=prefs)
try:
self.client.add_category("BangumiCollection")
except Exception as e:
except Exception:
logger.debug("[Downloader] Cannot add new category, maybe already exists.")
if settings.downloader.path == "":
prefs = self.client.get_app_prefs()
settings.downloader.path = self._join_path(prefs["save_path"], "Bangumi")
def set_rule(self, data: BangumiData):
def set_rule(self, data: Bangumi):
data.rule_name = self._rule_name(data)
data.save_path = self._gen_save_path(data)
raw_title = re.escape(data.title_raw)
rule = {
"enable": True,
"mustContain": raw_title,
"mustContain": data.title_raw,
"mustNotContain": "|".join(data.filter),
"useRegex": True,
"episodeFilter": "",
@@ -93,7 +93,7 @@ class DownloadClient(TorrentPath):
f"[Downloader] Add {data.official_title} Season {data.season} to auto download rules."
)
def set_rules(self, bangumi_info: list[BangumiData]):
def set_rules(self, bangumi_info: list[Bangumi]):
logger.debug("[Downloader] Start adding rules.")
for info in bangumi_info:
self.set_rule(info)
@@ -112,19 +112,39 @@ class DownloadClient(TorrentPath):
def delete_torrent(self, hashes):
self.client.torrents_delete(hashes)
logger.info(f"[Downloader] Remove torrents.")
logger.info("[Downloader] Remove torrents.")
def add_torrent(self, torrent: dict):
if self.client.torrents_add(
urls=torrent.get("urls"),
torrent_files=torrent.get("torrent_files"),
save_path=torrent.get("save_path"),
def add_torrent(self, torrent: Torrent | list, bangumi: Bangumi) -> bool:
if not bangumi.save_path:
bangumi.save_path = self._gen_save_path(bangumi)
with RequestContent() as req:
if isinstance(torrent, list):
if len(torrent) == 0:
logger.debug(f"[Downloader] No torrent found: {bangumi.official_title}")
return False
if "magnet" in torrent[0].url:
torrent_url = [t.url for t in torrent]
torrent_file = None
else:
torrent_file = [req.get_content(t.url) for t in torrent]
torrent_url = None
else:
if "magnet" in torrent.url:
torrent_url = torrent.url
torrent_file = None
else:
torrent_file = req.get_content(torrent.url)
torrent_url = None
if self.client.add_torrents(
torrent_urls=torrent_url,
torrent_files=torrent_file,
save_path=bangumi.save_path,
category="Bangumi",
):
logger.debug(f"[Downloader] Add torrent: {torrent.get('save_path')}")
logger.debug(f"[Downloader] Add torrent: {bangumi.official_title}")
return True
else:
logger.error(f"[Downloader] Add torrent failed: {torrent.get('save_path')}")
logger.debug(f"[Downloader] Torrent added before: {bangumi.official_title}")
return False
def move_torrent(self, hashes, location):

View File

@@ -1,13 +1,10 @@
import re
import logging
from os import PathLike
import re
from pathlib import Path
from module.conf import settings
from module.models import BangumiData
if ":\\" in settings.downloader.path:
import ntpath as path
else:
import os.path as path
from module.models import Bangumi, BangumiUpdate
logger = logging.getLogger(__name__)
@@ -22,7 +19,7 @@ class TorrentPath:
subtitle_list = []
for f in info.files:
file_name = f.name
suffix = path.splitext(file_name)[-1]
suffix = Path(file_name).suffix
if suffix.lower() in [".mp4", ".mkv"]:
media_list.append(file_name)
elif suffix.lower() in [".ass", ".srt"]:
@@ -30,10 +27,10 @@ class TorrentPath:
return media_list, subtitle_list
@staticmethod
def _path_to_bangumi(save_path):
def _path_to_bangumi(save_path: PathLike[str] | str):
# Split save path and download path
save_parts = save_path.split(path.sep)
download_parts = settings.downloader.path.split(path.sep)
save_parts = Path(save_path).parts
download_parts = Path(settings.downloader.path).parts
# Get bangumi name and season
bangumi_name = ""
season = 1
@@ -45,22 +42,22 @@ class TorrentPath:
return bangumi_name, season
@staticmethod
def _file_depth(file_path):
return len(file_path.split(path.sep))
def _file_depth(file_path: PathLike[str] | str):
return len(Path(file_path).parts)
def is_ep(self, file_path):
def is_ep(self, file_path: PathLike[str] | str):
return self._file_depth(file_path) <= 2
@staticmethod
def _gen_save_path(data: BangumiData):
def _gen_save_path(data: Bangumi | BangumiUpdate):
folder = (
f"{data.official_title} ({data.year})" if data.year else data.official_title
)
save_path = path.join(settings.downloader.path, folder, f"Season {data.season}")
return save_path
save_path = Path(settings.downloader.path) / folder / f"Season {data.season}"
return str(save_path)
@staticmethod
def _rule_name(data: BangumiData):
def _rule_name(data: Bangumi):
rule_name = (
f"[{data.group_name}] {data.official_title} S{data.season}"
if settings.bangumi_manage.group_tag
@@ -70,4 +67,4 @@ class TorrentPath:
@staticmethod
def _join_path(*args):
return path.join(*args)
return str(Path(*args))

View File

@@ -1,58 +1,58 @@
import logging
from fastapi.responses import JSONResponse
from module.downloader import DownloadClient
from module.models import BangumiData
from module.database import BangumiDatabase
from module.models import Bangumi, ResponseModel
from module.searcher import SearchTorrent
from module.rss import RSSEngine
logger = logging.getLogger(__name__)
class SeasonCollector(DownloadClient):
def add_season_torrents(self, data: BangumiData, torrents, torrent_files=None):
if torrent_files:
download_info = {
"torrent_files": torrent_files,
"save_path": self._gen_save_path(data),
}
return self.add_torrent(download_info)
else:
download_info = {
"urls": [torrent.torrent_link for torrent in torrents],
"save_path": self._gen_save_path(data),
}
return self.add_torrent(download_info)
def collect_season(self, data: BangumiData, link: str = None, proxy: bool = False):
logger.info(f"Start collecting {data.official_title} Season {data.season}...")
def collect_season(self, bangumi: Bangumi, link: str = None):
logger.info(
f"Start collecting {bangumi.official_title} Season {bangumi.season}..."
)
with SearchTorrent() as st:
if not link:
torrents = st.search_season(data)
torrents = st.search_season(bangumi)
else:
torrents = st.get_torrents(link, _filter="|".join(data.filter))
torrent_files = None
if proxy:
torrent_files = [
st.get_content(torrent.torrent_link) for torrent in torrents
]
return self.add_season_torrents(
data=data, torrents=torrents, torrent_files=torrent_files
)
torrents = st.get_torrents(link, bangumi.filter.replace(",", "|"))
if self.add_torrent(torrents, bangumi):
logger.info(f"Collections of {bangumi.official_title} Season {bangumi.season} completed.")
bangumi.eps_collect = True
with RSSEngine() as engine:
engine.bangumi.update(bangumi)
return ResponseModel(
status=True,
status_code=200,
msg_en=f"Collections of {bangumi.official_title} Season {bangumi.season} completed.",
msg_zh=f"收集 {bangumi.official_title}{bangumi.season} 季完成。",
)
else:
logger.warning(f"Collection of {bangumi.official_title} Season {bangumi.season} failed.")
return ResponseModel(
status=False,
status_code=406,
msg_en=f"Collection of {bangumi.official_title} Season {bangumi.season} failed.",
msg_zh=f"收集 {bangumi.official_title}{bangumi.season} 季失败。",
)
def subscribe_season(self, data: BangumiData):
with BangumiDatabase() as db:
@staticmethod
def subscribe_season(data: Bangumi):
with RSSEngine() as engine:
data.added = True
data.eps_collect = True
self.set_rule(data)
db.insert(data)
self.add_rss_feed(data.rss_link[0], item_path=data.official_title)
engine.add_rss(
rss_link=data.rss_link, name=data.official_title, aggregate=False
)
engine.bangumi.add(data)
return engine.download_bangumi(data)
def eps_complete():
with BangumiDatabase() as bd:
datas = bd.not_complete()
with RSSEngine() as engine:
datas = engine.bangumi.not_complete()
if datas:
logger.info("Start collecting full season...")
for data in datas:
@@ -60,4 +60,4 @@ def eps_complete():
with SeasonCollector() as sc:
sc.collect_season(data)
data.eps_collect = True
bd.update_list(datas)
engine.bangumi.update_all(datas)

View File

@@ -1,11 +1,9 @@
import logging
from module.downloader import DownloadClient
from module.parser import TitleParser
from module.models import SubtitleFile, EpisodeFile, Notification
from module.conf import settings
from module.downloader import DownloadClient
from module.models import EpisodeFile, Notification, SubtitleFile
from module.parser import TitleParser
logger = logging.getLogger(__name__)

View File

@@ -1,138 +1,154 @@
import logging
from fastapi.responses import JSONResponse
from module.database import Database
from module.downloader import DownloadClient
from module.models import BangumiData
from module.database import BangumiDatabase
from module.models import Bangumi, BangumiUpdate, ResponseModel
logger = logging.getLogger(__name__)
class TorrentManager(BangumiDatabase):
class TorrentManager(Database):
@staticmethod
def __match_torrents_list(data: BangumiData) -> list:
def __match_torrents_list(data: Bangumi | BangumiUpdate) -> list:
with DownloadClient() as client:
torrents = client.get_torrent_info(status_filter=None)
return [
torrent.hash for torrent in torrents if torrent.save_path == data.save_path
]
def delete_torrents(self, data: BangumiData, client: DownloadClient):
def delete_torrents(self, data: Bangumi, client: DownloadClient):
hash_list = self.__match_torrents_list(data)
if hash_list:
client.delete_torrent(hash_list)
logger.info(f"Delete rule and torrents for {data.official_title}")
return f"Delete {data.official_title} torrents."
return ResponseModel(
status_code=200,
status=True,
msg_en=f"Delete rule and torrents for {data.official_title}",
msg_zh=f"删除 {data.official_title} 规则和种子",
)
else:
return f"Can't find {data.official_title} torrents."
return ResponseModel(
status_code=406,
status=False,
msg_en=f"Can't find torrents for {data.official_title}",
msg_zh=f"无法找到 {data.official_title} 的种子",
)
def delete_rule(self, _id: int | str, file: bool = False):
data = self.search_id(int(_id))
if isinstance(data, BangumiData):
data = self.bangumi.search_id(int(_id))
if isinstance(data, Bangumi):
with DownloadClient() as client:
client.remove_rule(data.rule_name)
client.remove_rss_feed(data.official_title)
self.delete_one(int(_id))
# client.remove_rule(data.rule_name)
# client.remove_rss_feed(data.official_title)
self.rss.delete(data.official_title)
self.bangumi.delete_one(int(_id))
if file:
torrent_message = self.delete_torrents(data, client)
return JSONResponse(
status_code=200,
content={
"msg": f"Delete {data.official_title} rule. {torrent_message}"
},
)
return torrent_message
logger.info(f"[Manager] Delete rule for {data.official_title}")
return JSONResponse(
return ResponseModel(
status_code=200,
content={"msg": f"Delete rule for {data.official_title}"},
status=True,
msg_en=f"Delete rule for {data.official_title}",
msg_zh=f"删除 {data.official_title} 规则",
)
else:
return JSONResponse(
status_code=406, content={"msg": f"Can't find id {_id}"}
return ResponseModel(
status_code=406,
status=False,
msg_en=f"Can't find id {_id}",
msg_zh=f"无法找到 id {_id}",
)
def disable_rule(self, _id: str | int, file: bool = False):
data = self.search_id(int(_id))
if isinstance(data, BangumiData):
data = self.bangumi.search_id(int(_id))
if isinstance(data, Bangumi):
with DownloadClient() as client:
client.remove_rule(data.rule_name)
# client.remove_rule(data.rule_name)
data.deleted = True
self.update_one(data)
self.bangumi.update(data)
if file:
torrent_message = self.delete_torrents(data, client)
return JSONResponse(
status_code=200,
content={
"msg": f"Disable {data.official_title} rule. {torrent_message}"
},
)
return torrent_message
logger.info(f"[Manager] Disable rule for {data.official_title}")
return JSONResponse(
return ResponseModel(
status_code=200,
content={
"msg": f"Disable {data.official_title} rule.",
},
status=True,
msg_en=f"Disable rule for {data.official_title}",
msg_zh=f"禁用 {data.official_title} 规则",
)
else:
return JSONResponse(
status_code=406, content={"msg": f"Can't find id {_id}"}
return ResponseModel(
status_code=406,
status=False,
msg_en=f"Can't find id {_id}",
msg_zh=f"无法找到 id {_id}",
)
def enable_rule(self, _id: str | int):
data = self.search_id(int(_id))
if isinstance(data, BangumiData):
data = self.bangumi.search_id(int(_id))
if data:
data.deleted = False
self.update_one(data)
with DownloadClient() as client:
client.set_rule(data)
self.bangumi.update(data)
logger.info(f"[Manager] Enable rule for {data.official_title}")
return JSONResponse(
return ResponseModel(
status_code=200,
content={
"msg": f"Enable {data.official_title} rule.",
},
status=True,
msg_en=f"Enable rule for {data.official_title}",
msg_zh=f"启用 {data.official_title} 规则",
)
else:
return JSONResponse(
status_code=406, content={"msg": f"Can't find bangumi id {_id}"}
return ResponseModel(
status_code=406,
status=False,
msg_en=f"Can't find id {_id}",
msg_zh=f"无法找到 id {_id}",
)
def update_rule(self, data: BangumiData):
old_data = self.search_id(data.id)
if not old_data:
logger.error(f"[Manager] Can't find data with {data.id}")
return JSONResponse(
status_code=406, content={"msg": f"Can't find data with {data.id}"}
)
else:
def update_rule(self, bangumi_id, data: BangumiUpdate):
old_data: Bangumi = self.bangumi.search_id(bangumi_id)
if old_data:
# Move torrent
match_list = self.__match_torrents_list(data)
match_list = self.__match_torrents_list(old_data)
with DownloadClient() as client:
path = client._gen_save_path(data)
if match_list:
client.move_torrent(match_list, path)
# Set new download rule
client.remove_rule(data.rule_name)
client.set_rule(data)
self.update_one(data)
return JSONResponse(
self.bangumi.update(data, bangumi_id)
return ResponseModel(
status_code=200,
content={
"msg": f"Set new path for {data.official_title}",
},
status=True,
msg_en=f"Update rule for {data.official_title}",
msg_zh=f"更新 {data.official_title} 规则",
)
else:
logger.error(f"[Manager] Can't find data with {bangumi_id}")
return ResponseModel(
status_code=406,
status=False,
msg_en=f"Can't find data with {bangumi_id}",
msg_zh=f"无法找到 id {bangumi_id} 的数据",
)
def search_all_bangumi(self):
datas = self.search_all()
datas = self.bangumi.search_all()
if not datas:
return []
return [data for data in datas if not data.deleted]
def search_one(self, _id: int | str):
data = self.search_id(int(_id))
data = self.bangumi.search_id(int(_id))
if not data:
logger.error(f"[Manager] Can't find data with {_id}")
return {"status": "error", "msg": f"Can't find data with {_id}"}
return ResponseModel(
status_code=406,
status=False,
msg_en=f"Can't find data with {_id}",
msg_zh=f"无法找到 id {_id} 的数据",
)
else:
return data

View File

@@ -1,5 +1,6 @@
from .bangumi import *
from .bangumi import Bangumi, Episode, BangumiUpdate, Notification
from .config import Config
from .torrent import EpisodeFile, SubtitleFile
from .user import UserLogin
from .torrent import TorrentBase
from .rss import RSSItem, RSSUpdate
from .torrent import EpisodeFile, SubtitleFile, Torrent, TorrentUpdate
from .user import UserLogin, User, UserUpdate
from .response import ResponseModel, APIResponse

View File

@@ -1,26 +1,54 @@
from pydantic import BaseModel, Field
from dataclasses import dataclass
from pydantic import BaseModel
from sqlmodel import SQLModel, Field
from typing import Optional
class BangumiData(BaseModel):
id: int = Field(0, alias="id", title="番剧ID")
official_title: str = Field("official_title", alias="official_title", title="番剧中文名")
year: str | None = Field(None, alias="year", title="番剧年份")
title_raw: str = Field("title_raw", alias="title_raw", title="番剧")
season: int = Field(1, alias="season", title="番剧季度")
season_raw: str | None = Field(None, alias="season_raw", title="番剧季度原名")
group_name: str | None = Field(None, alias="group_name", title="字幕组")
dpi: str | None = Field(None, alias="dpi", title="分辨率")
source: str | None = Field(None, alias="source", title="来源")
subtitle: str | None = Field(None, alias="subtitle", title="字幕")
eps_collect: bool = Field(False, alias="eps_collect", title="是否已收集")
offset: int = Field(0, alias="offset", title="番剧偏移量")
filter: list[str] = Field(["720", "\\d+-\\d+"], alias="filter", title="番剧过滤器")
rss_link: list[str] = Field([], alias="rss_link", title="番剧RSS链接")
poster_link: str | None = Field(None, alias="poster_link", title="番剧海报链接")
added: bool = Field(False, alias="added", title="是否已添加")
rule_name: str | None = Field(None, alias="rule_name", title="番剧规则名")
save_path: str | None = Field(None, alias="save_path", title="番剧保存路径")
class Bangumi(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
official_title: str = Field(
default="official_title", alias="official_title", title="番剧中文"
)
year: Optional[str] = Field(alias="year", title="番剧年份")
title_raw: str = Field(default="title_raw", alias="title_raw", title="番剧原名")
season: int = Field(default=1, alias="season", title="番剧季度")
season_raw: Optional[str] = Field(alias="season_raw", title="番剧季度原名")
group_name: Optional[str] = Field(alias="group_name", title="字幕")
dpi: Optional[str] = Field(alias="dpi", title="分辨率")
source: Optional[str] = Field(alias="source", title="来源")
subtitle: Optional[str] = Field(alias="subtitle", title="字幕")
eps_collect: bool = Field(default=False, alias="eps_collect", title="是否已收集")
offset: int = Field(default=0, alias="offset", title="番剧偏移量")
filter: str = Field(default="720,\\d+-\\d+", alias="filter", title="番剧过滤器")
rss_link: str = Field(default="", alias="rss_link", title="番剧RSS链接")
poster_link: Optional[str] = Field(alias="poster_link", title="番剧海报链接")
added: bool = Field(default=False, alias="added", title="是否已添加")
rule_name: Optional[str] = Field(alias="rule_name", title="番剧规则名")
save_path: Optional[str] = Field(alias="save_path", title="番剧保存路径")
deleted: bool = Field(False, alias="deleted", title="是否已删除")
class BangumiUpdate(SQLModel):
official_title: str = Field(
default="official_title", alias="official_title", title="番剧中文名"
)
year: Optional[str] = Field(alias="year", title="番剧年份")
title_raw: str = Field(default="title_raw", alias="title_raw", title="番剧原名")
season: int = Field(default=1, alias="season", title="番剧季度")
season_raw: Optional[str] = Field(alias="season_raw", title="番剧季度原名")
group_name: Optional[str] = Field(alias="group_name", title="字幕组")
dpi: Optional[str] = Field(alias="dpi", title="分辨率")
source: Optional[str] = Field(alias="source", title="来源")
subtitle: Optional[str] = Field(alias="subtitle", title="字幕")
eps_collect: bool = Field(default=False, alias="eps_collect", title="是否已收集")
offset: int = Field(default=0, alias="offset", title="番剧偏移量")
filter: str = Field(default="720,\\d+-\\d+", alias="filter", title="番剧过滤器")
rss_link: str = Field(default="", alias="rss_link", title="番剧RSS链接")
poster_link: Optional[str] = Field(alias="poster_link", title="番剧海报链接")
added: bool = Field(default=False, alias="added", title="是否已添加")
rule_name: Optional[str] = Field(alias="rule_name", title="番剧规则名")
save_path: Optional[str] = Field(alias="save_path", title="番剧保存路径")
deleted: bool = Field(False, alias="deleted", title="是否已删除")
@@ -28,14 +56,14 @@ class Notification(BaseModel):
official_title: str = Field(..., alias="official_title", title="番剧名")
season: int = Field(..., alias="season", title="番剧季度")
episode: int = Field(..., alias="episode", title="番剧集数")
poster_path: str | None = Field(None, alias="poster_path", title="番剧海报路径")
poster_path: Optional[str] = Field(None, alias="poster_path", title="番剧海报路径")
@dataclass
class Episode:
title_en: str | None
title_zh: str | None
title_jp: str | None
title_en: Optional[str]
title_zh: Optional[str]
title_jp: Optional[str]
season: int
season_raw: str
episode: int

View File

@@ -1,29 +1,38 @@
from os.path import expandvars
from pydantic import BaseModel, Field
# Sub config
class Program(BaseModel):
rss_time: int = Field(7200, description="Sleep time")
rss_time: int = Field(900, description="Sleep time")
rename_time: int = Field(60, description="Rename times in one loop")
webui_port: int = Field(7892, description="WebUI port")
class Downloader(BaseModel):
type: str = Field("qbittorrent", description="Downloader type")
host: str = Field("172.17.0.1:8080", description="Downloader host")
username: str = Field("admin", description="Downloader username")
password: str = Field("adminadmin", description="Downloader password")
host_: str = Field("172.17.0.1:8080", alias="host", description="Downloader host")
username_: str = Field("admin", alias="username", description="Downloader username")
password_: str = Field(
"adminadmin", alias="password", description="Downloader password"
)
path: str = Field("/downloads/Bangumi", description="Downloader path")
ssl: bool = Field(False, description="Downloader ssl")
@property
def host(self):
return expandvars(self.host_)
@property
def username(self):
return expandvars(self.username_)
@property
def password(self):
return expandvars(self.password_)
class RSSParser(BaseModel):
enable: bool = Field(True, description="Enable RSS parser")
type: str = Field("mikan", description="RSS parser type")
token: str = Field("token", description="RSS parser token")
custom_url: str = Field("mikanani.me", description="Custom RSS host url")
parser_type: str = Field("parser", description="Parser type")
filter: list[str] = Field(["720", r"\d+-\d"], description="Filter")
language: str = "zh"
@@ -45,15 +54,31 @@ class Proxy(BaseModel):
type: str = Field("http", description="Proxy type")
host: str = Field("", description="Proxy host")
port: int = Field(0, description="Proxy port")
username: str = Field("", description="Proxy username")
password: str = Field("", description="Proxy password")
username_: str = Field("", alias="username", description="Proxy username")
password_: str = Field("", alias="password", description="Proxy password")
@property
def username(self):
return expandvars(self.username_)
@property
def password(self):
return expandvars(self.password_)
class Notification(BaseModel):
enable: bool = Field(False, description="Enable notification")
type: str = Field("telegram", description="Notification type")
token: str = Field("", description="Notification token")
chat_id: str = Field("", description="Notification chat id")
token_: str = Field("", alias="token", description="Notification token")
chat_id_: str = Field("", alias="chat_id", description="Notification chat id")
@property
def token(self):
return expandvars(self.token_)
@property
def chat_id(self):
return expandvars(self.chat_id_)
class Config(BaseModel):
@@ -64,3 +89,6 @@ class Config(BaseModel):
log: Log = Log()
proxy: Proxy = Proxy()
notification: Notification = Notification()
def dict(self, *args, by_alias=True, **kwargs):
return super().dict(*args, by_alias=by_alias, **kwargs)

View File

@@ -0,0 +1,14 @@
from pydantic import BaseModel, Field
class ResponseModel(BaseModel):
status: bool = Field(..., example=True)
status_code: int = Field(..., example=200)
msg_en: str
msg_zh: str
class APIResponse(BaseModel):
status: bool = Field(..., example=True)
msg_en: str = Field(..., example="Success")
msg_zh: str = Field(..., example="成功")

View File

@@ -0,0 +1,19 @@
from sqlmodel import SQLModel, Field
from typing import Optional
class RSSItem(SQLModel, table=True):
id: int = Field(default=None, primary_key=True, alias="id")
name: Optional[str] = Field(None, alias="name")
url: str = Field("https://mikanani.me", alias="url")
aggregate: bool = Field(False, alias="aggregate")
parser: str = Field("mikan", alias="parser")
enabled: bool = Field(True, alias="enabled")
class RSSUpdate(SQLModel):
name: Optional[str] = Field(None, alias="name")
url: Optional[str] = Field("https://mikanani.me", alias="url")
aggregate: Optional[bool] = Field(True, alias="aggregate")
parser: Optional[str] = Field("mikan", alias="parser")
enabled: Optional[bool] = Field(True, alias="enabled")

View File

@@ -1,16 +1,20 @@
from pydantic import BaseModel, Field
from pydantic import BaseModel
from sqlmodel import SQLModel, Field
from typing import Optional
class TorrentBase(BaseModel):
name: str = Field(...)
torrent_link: str = Field(...)
homepage: str | None = Field(None)
class Torrent(SQLModel, table=True):
id: int = Field(default=None, primary_key=True, alias="id")
bangumi_id: Optional[int] = Field(None, alias="refer_id", foreign_key="bangumi.id")
rss_id: Optional[int] = Field(None, alias="rss_id", foreign_key="rssitem.id")
name: str = Field("", alias="name")
url: str = Field("https://example.com/torrent", alias="url")
homepage: Optional[str] = Field(None, alias="homepage")
downloaded: bool = Field(False, alias="downloaded")
class FileSet(BaseModel):
media_path: str = Field(...)
sc_subtitle: str | None = Field(None)
tc_subtitle: str | None = Field(None)
class TorrentUpdate(SQLModel):
downloaded: bool = Field(False, alias="downloaded")
class EpisodeFile(BaseModel):

View File

@@ -1,14 +1,24 @@
from pydantic import BaseModel, Field
from pydantic import BaseModel
from typing import Optional
from sqlmodel import SQLModel, Field
class User(BaseModel):
class User(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
username: str = Field(
"admin", min_length=4, max_length=20, regex=r"^[a-zA-Z0-9_]+$"
)
password: str = Field("adminadmin", min_length=8)
class UserLogin(BaseModel):
class UserUpdate(SQLModel):
username: Optional[str] = Field(
None, min_length=4, max_length=20, regex=r"^[a-zA-Z0-9_]+$"
)
password: Optional[str] = Field(None, min_length=8)
class UserLogin(SQLModel):
username: str
password: str = Field(..., min_length=8)

View File

@@ -1 +1 @@
from .request_contents import RequestContent, TorrentInfo
from .request_contents import RequestContent

View File

@@ -1,84 +1,52 @@
import re
import logging
import xml.etree.ElementTree
from dataclasses import dataclass
from bs4 import BeautifulSoup
from module.conf import settings
from module.models import Torrent
from .request_url import RequestURL
from .site import mikan_parser
from module.conf import settings
from .site import rss_parser
@dataclass
class TorrentInfo:
name: str
torrent_link: str
homepage: str
_poster_link: str | None = None
_official_title: str | None = None
def __fetch_mikan_info(self):
if self._poster_link is None or self._official_title is None:
with RequestContent() as req:
self._poster_link, self._official_title = req.get_mikan_info(
self.homepage
)
@property
def poster_link(self) -> str:
self.__fetch_mikan_info()
return self._poster_link
@property
def official_title(self) -> str:
self.__fetch_mikan_info()
return self._official_title
logger = logging.getLogger(__name__)
class RequestContent(RequestURL):
# Mikanani RSS
def get_torrents(
self,
_url: str,
_filter: str = "|".join(settings.rss_parser.filter),
limit: int = 100,
retry: int = 3,
) -> list[TorrentInfo]:
try:
soup = self.get_xml(_url, retry)
torrent_titles, torrent_urls, torrent_homepage = mikan_parser(soup)
torrents: list[TorrentInfo] = []
) -> list[Torrent]:
soup = self.get_xml(_url, retry)
if soup:
torrent_titles, torrent_urls, torrent_homepage = rss_parser(soup)
torrents: list[Torrent] = []
for _title, torrent_url, homepage in zip(
torrent_titles, torrent_urls, torrent_homepage
):
if re.search(_filter, _title) is None:
torrents.append(
TorrentInfo(
name=_title, torrent_link=torrent_url, homepage=homepage
)
Torrent(name=_title, url=torrent_url, homepage=homepage)
)
if len(torrents) >= limit:
break
return torrents
except ConnectionError:
else:
logger.warning(f"[Network] Failed to get torrents: {_url}")
return []
def get_mikan_info(self, _url) -> tuple[str, str]:
content = self.get_html(_url)
soup = BeautifulSoup(content, "html.parser")
poster_div = soup.find("div", {"class": "bangumi-poster"})
poster_style = poster_div.get("style")
official_title = soup.select_one(
'p.bangumi-title a[href^="/Home/Bangumi/"]'
).text
if poster_style:
poster_path = poster_style.split("url('")[1].split("')")[0]
return poster_path, official_title
return "", ""
def get_xml(self, _url, retry: int = 3) -> xml.etree.ElementTree.Element:
return xml.etree.ElementTree.fromstring(self.get_url(_url, retry).text)
req = self.get_url(_url, retry)
if req:
return xml.etree.ElementTree.fromstring(req.text)
# API JSON
def get_json(self, _url) -> dict:
return self.get_url(_url).json()
req = self.get_url(_url)
if req:
return req.json()
def post_json(self, _url, data: dict) -> dict:
return self.post_url(_url, data).json()
@@ -90,7 +58,14 @@ class RequestContent(RequestURL):
return self.get_url(_url).text
def get_content(self, _url):
return self.get_url(_url).content
req = self.get_url(_url)
if req:
return req.content
def check_connection(self, _url):
return self.check_url(_url)
def get_rss_title(self, _url):
soup = self.get_xml(_url)
if soup:
return soup.find("./channel/title").text

View File

@@ -1,9 +1,9 @@
import logging
import socket
import time
import requests
import socket
import socks
import logging
from module.conf import settings
@@ -35,7 +35,7 @@ class RequestURL:
break
logger.error(f"[Network] Failed connecting to {url}")
logger.warning("[Network] Please check DNS/Connection settings")
raise ConnectionError(f"Failed connecting to {url}")
return None
def post_url(self, url: str, data: dict, retry=3):
try_time = 0
@@ -59,7 +59,7 @@ class RequestURL:
break
logger.error(f"[Network] Failed connecting to {url}")
logger.warning("[Network] Please check DNS/Connection settings")
raise ConnectionError(f"Failed connecting to {url}")
return None
def check_url(self, url: str):
if "://" not in url:
@@ -68,7 +68,7 @@ class RequestURL:
req = requests.head(url=url, headers=self.header, timeout=5)
req.raise_for_status()
return True
except requests.RequestException as e:
except requests.RequestException:
logger.debug(f"[Network] Cannot connect to {url}.")
return False

View File

@@ -1 +1 @@
from .mikan import mikan_parser
from .mikan import rss_parser

View File

@@ -1,4 +1,4 @@
def mikan_parser(soup):
def rss_parser(soup):
torrent_titles = []
torrent_urls = []
torrent_homepage = []
@@ -7,3 +7,7 @@ def mikan_parser(soup):
torrent_urls.append(item.find("enclosure").attrib["url"])
torrent_homepage.append(item.find("link").text)
return torrent_titles, torrent_urls, torrent_homepage
def mikan_title(soup):
return soup.find("title").text

View File

@@ -1,11 +1,15 @@
import logging
from .plugin import *
from module.models import Notification
from module.conf import settings
from module.database import BangumiDatabase
from module.database import Database
from module.models import Notification
from .plugin import (
BarkNotification,
ServerChanNotification,
TelegramNotification,
WecomNotification,
)
logger = logging.getLogger(__name__)
@@ -32,19 +36,9 @@ class PostNotification:
@staticmethod
def _get_poster(notify: Notification):
with BangumiDatabase() as db:
poster_path = db.match_poster(notify.official_title)
if poster_path:
poster_link = "https://mikanani.me" + poster_path
# text = f"""
# 番剧名称:{notify.official_title}\n季度 第{notify.season}季\n更新集数 第{notify.episode}集\n{poster_link}\n
# """
else:
poster_link = "https://mikanani.me"
# text = f"""
# 番剧名称:{notify.official_title}\n季度 第{notify.season}季\n更新集数 第{notify.episode}集\n
# """
notify.poster_path = poster_link
with Database() as db:
poster_path = db.bangumi.match_poster(notify.official_title)
notify.poster_path = poster_path
def send_msg(self, notify: Notification) -> bool:
self._get_poster(notify)
@@ -61,13 +55,3 @@ class PostNotification:
def __exit__(self, exc_type, exc_val, exc_tb):
self.notifier.__exit__(exc_type, exc_val, exc_tb)
if __name__ == "__main__":
info = Notification(
official_title="久保同学不放过我",
season=2,
episode=1,
)
with PostNotification() as client:
client.send_msg(info)

View File

@@ -1,8 +1,7 @@
import logging
from module.network import RequestContent
from module.models import Notification
from module.network import RequestContent
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
import logging
from module.network import RequestContent
from module.models import Notification
from module.network import RequestContent
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
import logging
from module.network import RequestContent
from module.models import Notification
from module.network import RequestContent
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
import logging
from module.network import RequestContent
from module.models import Notification
from module.network import RequestContent
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
import logging
from module.network import RequestContent
from module.models import Notification
from module.network import RequestContent
logger = logging.getLogger(__name__)

View File

@@ -1,3 +1,4 @@
from .raw_parser import raw_parser
from .torrent_parser import torrent_parser
from .tmdb_parser import tmdb_parser
from .torrent_parser import torrent_parser
from .mikan_parser import mikan_parser

View File

@@ -1,7 +1,8 @@
from module.network import RequestContent
search_url = lambda e: f"https://api.bgm.tv/search/subject/{e}?responseGroup=large"
def search_url(e):
return f"https://api.bgm.tv/search/subject/{e}?responseGroup=large"
def bgm_parser(title):

View File

@@ -0,0 +1,21 @@
from bs4 import BeautifulSoup
from urllib3.util import parse_url
from module.network import RequestContent
def mikan_parser(homepage: str):
root_path = parse_url(homepage).host
with RequestContent() as req:
content = req.get_html(homepage)
soup = BeautifulSoup(content, "html.parser")
poster_div = soup.find("div", {"class": "bangumi-poster"})
poster_style = poster_div.get("style")
official_title = soup.select_one(
'p.bangumi-title a[href^="/Home/Bangumi/"]'
).text
if poster_style:
poster_path = poster_style.split("url('")[1].split("')")[0]
poster_link = f"https://{root_path}{poster_path}"
return poster_link, official_title
return "", ""

View File

@@ -94,23 +94,11 @@ def name_process(name: str):
split = re.split("-", name)
if len(split) == 1:
split_space = split[0].split(" ")
language_pattern = []
for item in split_space:
if re.search(r"[\u4e00-\u9fa5]{2,}", item) is not None:
language_pattern.append(1)
elif re.search(r"[a-zA-Z]{2,}", item) is not None:
language_pattern.append(0)
elif re.search(r"[\u0800-\u4e00]{2,}", item) is not None:
language_pattern.append(2)
split = [split_space[0]]
for i in range(1, len(split_space)):
# 如果当前字符串的语言与上一个字符串的语言相同
if language_pattern[i] == language_pattern[i - 1]:
# 合并这两个字符串
split[-1] += " " + split_space[i]
else:
# 否则,将当前字符串添加到结果列表中
split.append(split_space[i])
for idx, item in enumerate(split_space):
if re.search(r"^[\u4e00-\u9fa5]{2,}", item) is not None:
split_space.remove(item)
split = [item.strip(), " ".join(split_space).strip()]
break
for item in split:
if re.search(r"[\u0800-\u4e00]{2,}", item) and not name_jp:
name_jp = item.strip()
@@ -118,8 +106,6 @@ def name_process(name: str):
name_zh = item.strip()
elif re.search(r"[a-zA-Z]{3,}", item) and not name_en:
name_en = item.strip()
if name_en not in name:
name_en = None
return name_en, name_zh, name_jp
@@ -196,5 +182,5 @@ def raw_parser(raw: str) -> Episode | None:
if __name__ == "__main__":
title = "【极影字幕·毁片党】LoveLive! SunShine!! 幻日的夜羽 -SUNSHINE in the MIRROR- 第01集 TV版 HEVC_opus 1080p"
title = "[动漫国字幕组&LoliHouse] THE MARGINAL SERVICE - 08 [WebRip 1080p HEVC-10bit AAC][简繁内封字幕]"
print(raw_parser(title))

View File

@@ -2,8 +2,11 @@ import re
import time
from dataclasses import dataclass
from module.network import RequestContent
from module.conf import TMDB_API
from module.network import RequestContent
TMDB_URL = "https://api.themoviedb.org"
@dataclass
@@ -14,16 +17,18 @@ class TMDBInfo:
season: list[dict]
last_season: int
year: str
poster_link: str = None
LANGUAGE = {"zh": "zh-CN", "jp": "ja-JP", "en": "en-US"}
search_url = (
lambda e: f"https://api.themoviedb.org/3/search/tv?api_key={TMDB_API}&page=1&query={e}&include_adult=false"
)
info_url = (
lambda e, key: f"https://api.themoviedb.org/3/tv/{e}?api_key={TMDB_API}&language={LANGUAGE[key]}"
)
def search_url(e):
return f"{TMDB_URL}/3/search/tv?api_key={TMDB_API}&page=1&query={e}&include_adult=false"
def info_url(e, key):
return f"{TMDB_URL}/3/tv/{e}?api_key={TMDB_API}&language={LANGUAGE[key]}"
def is_animation(tv_id, language) -> bool:
@@ -36,7 +41,7 @@ def is_animation(tv_id, language) -> bool:
return False
def get_season(seasons: list) -> int:
def get_season(seasons: list) -> tuple[int, str]:
ss = [s for s in seasons if s["air_date"] is not None]
ss = sorted(ss, key=lambda e: e.get("air_date"), reverse=True)
for season in ss:
@@ -45,7 +50,7 @@ def get_season(seasons: list) -> int:
[year, _, _] = date
now_year = time.localtime().tm_year
if int(year) <= now_year:
return int(re.findall(r"\d", season.get("season"))[0])
return int(re.findall(r"\d", season.get("season"))[0]), season.get("poster_path")
def tmdb_parser(title, language) -> TMDBInfo | None:
@@ -71,10 +76,16 @@ def tmdb_parser(title, language) -> TMDBInfo | None:
}
for s in info_content.get("seasons")
]
last_season = get_season(season)
last_season, poster_path = get_season(season)
if poster_path is None:
poster_path = info_content.get("poster_path")
original_title = info_content.get("original_name")
official_title = info_content.get("name")
year_number = info_content.get("first_air_date").split("-")[0]
if poster_path:
poster_link = "https://image.tmdb.org/t/p/w300" + poster_path
else:
poster_link = None
return TMDBInfo(
id,
official_title,
@@ -82,6 +93,11 @@ def tmdb_parser(title, language) -> TMDBInfo | None:
season,
last_season,
str(year_number),
poster_link,
)
else:
return None
if __name__ == '__main__':
print(tmdb_parser("魔法禁书目录", "zh"))

View File

@@ -1,7 +1,6 @@
import re
import logging
import os.path as unix_path
import ntpath as win_path
from pathlib import Path
import re
from module.models import EpisodeFile, SubtitleFile
@@ -23,11 +22,16 @@ SUBTITLE_LANG = {
}
def split_path(torrent_path: str) -> str:
if PLATFORM == "Windows":
return win_path.split(torrent_path)[-1]
else:
return unix_path.split(torrent_path)[-1]
def get_path_basename(torrent_path: str) -> str:
"""
Returns the basename of a path string.
:param torrent_path: A string representing a path to a file.
:type torrent_path: str
:return: A string representing the basename of the given path.
:rtype: str
"""
return Path(torrent_path).name
def get_group(group_and_title) -> tuple[str | None, str]:
@@ -64,7 +68,7 @@ def torrent_parser(
season: int | None = None,
file_type: str = "media",
) -> EpisodeFile | SubtitleFile:
media_path = split_path(torrent_path)
media_path = get_path_basename(torrent_path)
for rule in RULES:
if torrent_name:
match_obj = re.match(rule, torrent_name, re.I)
@@ -77,7 +81,7 @@ def torrent_parser(
else:
title, _ = get_season_and_title(title)
episode = int(match_obj.group(2))
suffix = unix_path.splitext(torrent_path)[-1]
suffix = Path(torrent_path).suffix
if file_type == "media":
return EpisodeFile(
media_path=torrent_path,

View File

@@ -1,9 +1,9 @@
import logging
from .analyser import raw_parser, torrent_parser, tmdb_parser
from module.models import BangumiData
from module.conf import settings
from module.models import Bangumi
from .analyser import raw_parser, tmdb_parser, torrent_parser, mikan_parser
logger = logging.getLogger(__name__)
@@ -26,20 +26,18 @@ class TitleParser:
@staticmethod
def tmdb_parser(title: str, season: int, language: str):
official_title, tmdb_season, year = title, season, None
tmdb_info = tmdb_parser(title, language)
if tmdb_info:
logger.debug(f"TMDB Matched, official title is {tmdb_info.title}")
tmdb_season = tmdb_info.last_season if tmdb_info.last_season else season
official_title = tmdb_info.title
year = tmdb_info.year
return tmdb_info.title, tmdb_season, tmdb_info.year, tmdb_info.poster_link
else:
logger.warning(f"Cannot match {title} in TMDB. Use raw title instead.")
logger.warning("Please change bangumi info manually.")
return official_title, tmdb_season, year
return title, season, None, None
@staticmethod
def raw_parser(raw: str, rss_link: str) -> BangumiData | None:
def raw_parser(raw: str) -> Bangumi | None:
language = settings.rss_parser.language
try:
episode = raw_parser(raw)
@@ -60,7 +58,8 @@ class TitleParser:
else:
official_title = title_raw
_season = episode.season
data = BangumiData(
logger.debug(f"RAW:{raw} >> {title_raw}")
return Bangumi(
official_title=official_title,
title_raw=title_raw,
season=_season,
@@ -71,12 +70,13 @@ class TitleParser:
subtitle=episode.sub,
eps_collect=False if episode.episode > 1 else True,
offset=0,
filter=settings.rss_parser.filter,
rss_link=[rss_link],
filter=",".join(settings.rss_parser.filter),
)
logger.debug(f"RAW:{raw} >> {title_raw}")
return data
except Exception as e:
logger.debug(e)
logger.warning(f"Cannot parse {raw}.")
return None
@staticmethod
def mikan_parser(homepage: str) -> tuple[str, str]:
return mikan_parser(homepage)

View File

@@ -1,4 +1,2 @@
from .analyser import RSSAnalyser
analyser = RSSAnalyser()
from .engine import RSSEngine

View File

@@ -1,37 +1,40 @@
import re
import logging
import re
from .engine import RSSEngine
from module.network import RequestContent, TorrentInfo
from module.parser import TitleParser
from module.models import BangumiData
from module.database import BangumiDatabase
from module.conf import settings
from module.models import Bangumi, Torrent, RSSItem, ResponseModel
from module.network import RequestContent
from module.parser import TitleParser
logger = logging.getLogger(__name__)
class RSSAnalyser:
def __init__(self):
self._title_analyser = TitleParser()
with BangumiDatabase() as db:
db.update_table()
def official_title_parser(self, data: BangumiData, mikan_title: str):
if settings.rss_parser.parser_type == "mikan":
data.official_title = mikan_title if mikan_title else data.official_title
elif settings.rss_parser.parser_type == "tmdb":
tmdb_title, season, year = self._title_analyser.tmdb_parser(
data.official_title, data.season, settings.rss_parser.language
class RSSAnalyser(TitleParser):
def official_title_parser(self, bangumi: Bangumi, rss: RSSItem, torrent: Torrent):
if rss.parser == "mikan":
try:
bangumi.poster_link, bangumi.official_title = self.mikan_parser(
torrent.homepage
)
except AttributeError:
logger.warning("[Parser] Mikan torrent has no homepage info.")
pass
elif rss.parser == "tmdb":
tmdb_title, season, year, poster_link = self.tmdb_parser(
bangumi.official_title, bangumi.season, settings.rss_parser.language
)
data.official_title = tmdb_title
data.year = year
data.season = season
bangumi.official_title = tmdb_title
bangumi.year = year
bangumi.season = season
bangumi.poster_link = poster_link
else:
pass
data.official_title = re.sub(r"[/:.\\]", " ", data.official_title)
bangumi.official_title = re.sub(r"[/:.\\]", " ", bangumi.official_title)
@staticmethod
def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list:
def get_rss_torrents(rss_link: str, full_parse: bool = True) -> list[Torrent]:
with RequestContent() as req:
if full_parse:
rss_torrents = req.get_torrents(rss_link)
@@ -40,61 +43,53 @@ class RSSAnalyser:
return rss_torrents
def torrents_to_data(
self, torrents: list, rss_link: str, full_parse: bool = True
self, torrents: list[Torrent], rss: RSSItem, full_parse: bool = True
) -> list:
new_data = []
for torrent in torrents:
data = self._title_analyser.raw_parser(raw=torrent.name, rss_link=rss_link)
if data and data.title_raw not in [i.title_raw for i in new_data]:
try:
poster_link, mikan_title = (
torrent.poster_link,
torrent.official_title,
)
except AttributeError:
poster_link, mikan_title = None, None
data.poster_link = poster_link
self.official_title_parser(data, mikan_title)
bangumi = self.raw_parser(raw=torrent.name)
if bangumi and bangumi.title_raw not in [i.title_raw for i in new_data]:
self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent)
if not full_parse:
return [data]
new_data.append(data)
logger.debug(f"[RSS] New title found: {data.official_title}")
return [bangumi]
new_data.append(bangumi)
logger.info(f"[RSS] New bangumi founded: {bangumi.official_title}")
return new_data
def torrent_to_data(
self, torrent: TorrentInfo, rss_link: str | None = None
) -> BangumiData:
data = self._title_analyser.raw_parser(raw=torrent.name, rss_link=rss_link)
if data:
try:
poster_link, mikan_title = (
torrent.poster_link,
torrent.official_title,
)
except AttributeError:
poster_link, mikan_title = None, None
data.poster_link = poster_link
self.official_title_parser(data, mikan_title)
return data
def torrent_to_data(self, torrent: Torrent, rss: RSSItem) -> Bangumi:
bangumi = self.raw_parser(raw=torrent.name)
if bangumi:
self.official_title_parser(bangumi=bangumi, rss=rss, torrent=torrent)
bangumi.rss_link = rss.url
return bangumi
def rss_to_data(
self, rss_link: str, database: BangumiDatabase, full_parse: bool = True
) -> list[BangumiData]:
rss_torrents = self.get_rss_torrents(rss_link, full_parse)
torrents_to_add = database.match_list(rss_torrents, rss_link)
self, rss: RSSItem, engine: RSSEngine, full_parse: bool = True
) -> list[Bangumi]:
rss_torrents = self.get_rss_torrents(rss.url, full_parse)
torrents_to_add = engine.bangumi.match_list(rss_torrents, rss.url)
if not torrents_to_add:
logger.debug("[RSS] No new title has been found.")
return []
# New List
new_data = self.torrents_to_data(torrents_to_add, rss_link, full_parse)
new_data = self.torrents_to_data(torrents_to_add, rss, full_parse)
if new_data:
# Add to database
engine.bangumi.add_all(new_data)
return new_data
else:
return []
def link_to_data(self, link: str) -> BangumiData:
torrents = self.get_rss_torrents(link, False)
def link_to_data(self, rss: RSSItem) -> Bangumi | ResponseModel:
torrents = self.get_rss_torrents(rss.url, False)
for torrent in torrents:
data = self.torrent_to_data(torrent, link)
data = self.torrent_to_data(torrent, rss)
if data:
return data
else:
return ResponseModel(
status=False,
status_code=406,
msg_en="No new title has been found.",
msg_zh="没有找到新的番剧。",
)

View File

@@ -0,0 +1,147 @@
import re
import logging
from typing import Optional
from module.models import Bangumi, RSSItem, Torrent, ResponseModel
from module.network import RequestContent
from module.downloader import DownloadClient
from module.database import Database, engine
logger = logging.getLogger(__name__)
class RSSEngine(Database):
def __init__(self, _engine=engine):
super().__init__(_engine)
self._to_refresh = False
@staticmethod
def _get_torrents(rss: RSSItem) -> list[Torrent]:
with RequestContent() as req:
torrents = req.get_torrents(rss.url)
# Add RSS ID
for torrent in torrents:
torrent.rss_id = rss.id
return torrents
def get_rss_torrents(self, rss_id: int) -> list[Torrent]:
rss = self.rss.search_id(rss_id)
if rss:
return self.torrent.search_rss(rss_id)
else:
return []
def add_rss(self, rss_link: str, name: str | None = None, aggregate: bool = True, parser: str = "mikan"):
if not name:
with RequestContent() as req:
name = req.get_rss_title(rss_link)
if not name:
return ResponseModel(
status=False,
status_code=406,
msg_en="Failed to get RSS title.",
msg_zh="无法获取 RSS 标题。",
)
rss_data = RSSItem(name=name, url=rss_link, aggregate=aggregate, parser=parser)
if self.rss.add(rss_data):
return ResponseModel(
status=True,
status_code=200,
msg_en="RSS added successfully.",
msg_zh="RSS 添加成功。",
)
else:
return ResponseModel(
status=False,
status_code=406,
msg_en="RSS added failed.",
msg_zh="RSS 添加失败。",
)
def disable_list(self, rss_id_list: list[int]):
for rss_id in rss_id_list:
self.rss.disable(rss_id)
return ResponseModel(
status=True,
status_code=200,
msg_en="Disable RSS successfully.",
msg_zh="禁用 RSS 成功。",
)
def enable_list(self, rss_id_list: list[int]):
for rss_id in rss_id_list:
self.rss.enable(rss_id)
return ResponseModel(
status=True,
status_code=200,
msg_en="Enable RSS successfully.",
msg_zh="启用 RSS 成功。",
)
def delete_list(self, rss_id_list: list[int]):
for rss_id in rss_id_list:
self.rss.delete(rss_id)
return ResponseModel(
status=True,
status_code=200,
msg_en="Delete RSS successfully.",
msg_zh="删除 RSS 成功。",
)
def pull_rss(self, rss_item: RSSItem) -> list[Torrent]:
torrents = self._get_torrents(rss_item)
new_torrents = self.torrent.check_new(torrents)
return new_torrents
def match_torrent(self, torrent: Torrent) -> Optional[Bangumi]:
matched: Bangumi = self.bangumi.match_torrent(torrent.name)
if matched:
_filter = matched.filter.replace(",", "|")
if not re.search(_filter, torrent.name, re.IGNORECASE):
torrent.bangumi_id = matched.id
return matched
return None
def refresh_rss(self, client: DownloadClient, rss_id: Optional[int] = None):
# Get All RSS Items
if not rss_id:
rss_items: list[RSSItem] = self.rss.search_active()
else:
rss_item = self.rss.search_id(rss_id)
rss_items = [rss_item] if rss_item else []
# From RSS Items, get all torrents
logger.debug(f"[Engine] Get {len(rss_items)} RSS items")
for rss_item in rss_items:
new_torrents = self.pull_rss(rss_item)
# Get all enabled bangumi data
for torrent in new_torrents:
matched_data = self.match_torrent(torrent)
if matched_data:
if client.add_torrent(torrent, matched_data):
logger.debug(f"[Engine] Add torrent {torrent.name} to client")
torrent.downloaded = True
# Add all torrents to database
self.torrent.add_all(new_torrents)
def download_bangumi(self, bangumi: Bangumi):
with RequestContent() as req:
torrents = req.get_torrents(bangumi.rss_link, bangumi.filter.replace(",", "|"))
if torrents:
with DownloadClient() as client:
client.add_torrent(torrents, bangumi)
self.torrent.add_all(torrents)
return ResponseModel(
status=True,
status_code=200,
msg_en=f"[Engine] Download {bangumi.official_title} successfully.",
msg_zh=f"下载 {bangumi.official_title} 成功。",
)
else:
return ResponseModel(
status=False,
status_code=406,
msg_en=f"[Engine] Download {bangumi.official_title} failed.",
msg_zh=f"[Engine] 下载 {bangumi.official_title} 失败。",
)

View File

@@ -1,57 +0,0 @@
import logging
from module.database import BangumiDatabase
from module.downloader import DownloadClient
from module.network import RequestContent
from module.conf import settings
from module.models import BangumiData
logger = logging.getLogger(__name__)
def matched(torrent_title: str):
with BangumiDatabase() as db:
return db.match_torrent(torrent_title)
def save_path(data: BangumiData):
folder = (
f"{data.official_title}({data.year})" if data.year else f"{data.official_title}"
)
season = f"Season {data.season}"
return path.join(
settings.downloader.path,
folder,
season,
)
def add_download(data: BangumiData, torrent: TorrentInfo):
torrent = {
"url": torrent.url,
"save_path": save_path(data),
}
with DownloadClient() as client:
client.add_torrent(torrent)
with TorrentDatabase() as db:
db.add_torrent(torrent)
def downloaded(torrent: TorrentInfo):
with TorrentDatabase() as db:
return db.if_downloaded(torrent)
def get_downloads(rss_link: str):
with RequestContent() as req:
torrents = req.get_torrents(rss_link)
for torrent in torrents:
if not downloaded(torrent):
data = matched(torrent.title)
if data:
add_download(data, torrent)
logger.info(f"Add {torrent.title} to download list")
else:
logger.debug(f"{torrent.title} not matched")
else:
logger.debug(f"{torrent.title} already downloaded")

View File

@@ -1,14 +0,0 @@
from module.network import RequestContent
from module.conf import settings
class RSSSearcher(RequestContent):
def __search_url(self, keywords: str) -> str:
keywords.replace(" ", "+")
url = f"{settings.rss_parser.custom_url}/RSS/Search?keyword={keywords}"
return url
def search_keywords(self, keywords: str) -> list[dict]:
url = self.__search_url(keywords)
torrents = self.get_torrents(url)
return torrents

View File

@@ -1 +1,2 @@
from .searcher import SearchTorrent
from .provider import SEARCH_CONFIG

Some files were not shown because too many files have changed in this diff Show More