主程序迁移完成

This commit is contained in:
EstrellaXD
2023-05-10 18:59:53 +08:00
parent ceeca52603
commit 2ca65f115b
17 changed files with 243 additions and 204 deletions

View File

@@ -1,7 +1,7 @@
import os
import signal
import logging
import uvicorn
import threading
import time
from fastapi import Request
from fastapi.responses import HTMLResponse, RedirectResponse
@@ -9,86 +9,19 @@ from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from module.api import router
from module.sub_thread import start_thread, start_program, stop_thread, stop_event
from module.conf import VERSION, settings, setup_logger
from module.rss import RSSAnalyser
from module.manager import Renamer
from module.conf.uvicorn_logging import logging_config
logger = logging.getLogger(__name__)
rss_link = settings.rss_link()
stop_event = threading.Event()
def show_info():
with open("icon", "r") as f:
for line in f.readlines():
logger.info(line.strip("\n"))
logger.info(
f"Version {VERSION} Author: EstrellaXD Twitter: https://twitter.com/Estrella_Pan"
)
logger.info("GitHub: https://github.com/EstrellaXD/Auto_Bangumi/")
logger.info("Starting AutoBangumi...")
def rss_loop(stop_event, rss_link: str):
rss_analyser = RSSAnalyser()
while not stop_event.is_set():
rss_analyser.run(rss_link)
stop_event.wait(settings.program.rss_time)
def rename_loop(stop_event):
while not stop_event.is_set():
with Renamer() as renamer:
renamer.rename()
stop_event.wait(settings.program.rename_time)
rss_thread = threading.Thread(
target=rss_loop,
args=(stop_event, rss_link),
)
rename_thread = threading.Thread(
target=rename_loop,
args=(stop_event,),
)
def stop_thread():
global rss_thread, rename_thread
if not stop_event.is_set():
stop_event.set()
rename_thread.join()
rss_thread.join()
def start_thread():
global rss_thread, rename_thread
if stop_event.is_set():
stop_event.clear()
time.sleep(1)
settings.load()
rss_link = settings.rss_link()
new_level = logging.DEBUG if settings.log.debug_enable else logging.INFO
setup_logger(new_level)
print(new_level)
rss_thread = threading.Thread(target=rss_loop, args=(stop_event, rss_link))
rename_thread = threading.Thread(target=rename_loop, args=(stop_event,))
rss_thread.start()
rename_thread.start()
return {"status": "ok"}
@router.on_event("startup")
async def startup():
global rss_thread, rename_thread
log_level = logging.DEBUG if settings.log.debug_enable else logging.INFO
setup_logger(log_level)
show_info()
rss_thread.start()
rename_thread.start()
start_program()
@router.on_event("shutdown")
@@ -116,6 +49,22 @@ async def stop():
return {"status": "ok"}
@router.get("/api/v1/status", tags=["program"])
async def status():
if stop_event.is_set():
return {"status": "stop"}
else:
return {"status": "running"}
@router.get("/api/v1/shutdown", tags=["program"])
async def shutdown_program():
stop_thread()
logger.info("Shutting down program...")
os.kill(os.getpid(), signal.SIGINT)
return {"status": "ok"}
if VERSION != "DEV_VERSION":
router.mount("/assets", StaticFiles(directory="templates/assets"), name="assets")
templates = Jinja2Templates(directory="templates")
@@ -132,6 +81,9 @@ else:
if __name__ == "__main__":
if not os.path.isdir("data"):
os.mkdir("data")
uvicorn.run(
"main:router", host="0.0.0.0", port=settings.program.webui_port, log_config=logging_config, reload=True
router, host="0.0.0.0", port=settings.program.webui_port,
log_config=logging_config,
)

View File

@@ -10,9 +10,10 @@ from module.utils import json_config
from module.models.api import *
from module.models import Config
logger = logging.getLogger(__name__)
router = FastAPI()
api_func = APIProcess(settings)
api_func = APIProcess()
@router.get("/api/v1/data", tags=["info"])

View File

@@ -1,37 +0,0 @@
import os
import logging
import asyncio
from module.conf import LOG_PATH, VERSION
from module.manager import Renamer, FullSeasonGet
from module.rss import RSSAnalyser
from module.models import Config
logger = logging.getLogger(__name__)
def reset_log():
if os.path.exists(LOG_PATH):
os.remove(LOG_PATH)
async def rss_loop(
rss_link: str,
settings: Config,
):
with RSSAnalyser() as analyser:
analyser.rss_to_datas(rss_link)
if settings.bangumi_manage.eps_complete:
with FullSeasonGet(settings) as season:
season.eps_complete()
await asyncio.sleep(settings.program.sleep_time)
async def rename_loop(renamer: Renamer, wait_time: int = 360):
renamer.rename()
await asyncio.sleep(wait_time)

View File

@@ -3,7 +3,7 @@ import logging
LOG_PATH = "data/log.txt"
def setup_logger(level):
def setup_logger(level: int = logging.INFO):
logging.addLevelName(logging.DEBUG, "DEBUG:")
logging.addLevelName(logging.INFO, "INFO:")
logging.addLevelName(logging.WARNING, "WARNING:")

View File

@@ -1,38 +1,38 @@
from .log import LOG_PATH
logging_config = {
"version": 1,
"disable_existing_loggers": False,
"loggers": {
"uvicorn.error": {
"level": "INFO",
"handlers": ["default"],
"propagate": True,
},
"uvicorn.asgi": { # 更改 "uvicorn.access" 为 "uvicorn.asgi"
"level": "INFO",
"handlers": ["access"],
"propagate": True,
},
},
"handlers": {
"default": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "default",
},
"access": {
"class": "logging.StreamHandler",
"level": "DEBUG",
"formatter": "access",
},
},
"formatters": {
"default": {
"format": "[%(asctime)s] %(levelname)-8s %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
}
},
"handlers": {
"file": {
"class": "logging.FileHandler",
"filename": LOG_PATH,
"formatter": "default",
"encoding": "utf-8",
},
"access": {
"format": "[%(asctime)s] %(levelname)s: %(client_addr)s - \"%(request_line)s\" %(status_code)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
"console": {
"class": "logging.StreamHandler",
"formatter": "default",
},
},
"loggers": {
"uvicorn": {
"handlers": ["file", "console"],
"level": "INFO",
},
"uvicorn.error": {
"level": "INFO",
},
"uvicorn.access": {
"handlers": ["file", "console"],
"level": "INFO",
"propagate": False,
},
},
}

View File

@@ -1,2 +1 @@
from .download_client import DownloadClient
from .api_func import APIProcess

View File

@@ -1,12 +1,11 @@
import re
import logging
from module.core import DownloadClient
from module.downloader import DownloadClient
from module.manager import FullSeasonGet
from module.rss import RSSAnalyser
from module.utils import json_config
from module.conf import DATA_PATH
from module.conf.config import CONFIG_PATH
from module.conf import DATA_PATH, settings
from module.models import Config
from module.network import RequestContent
@@ -16,10 +15,10 @@ logger = logging.getLogger(__name__)
class APIProcess:
def __init__(self, settings: Config):
def __init__(self):
self._rss_analyser = RSSAnalyser()
self._client = DownloadClient()
self._full_season_get = FullSeasonGet(settings)
self._full_season_get = FullSeasonGet()
self._custom_url = settings.rss_parser.custom_url
def link_process(self, link):
@@ -78,12 +77,12 @@ class APIProcess:
@staticmethod
def update_config(config: Config):
save_config_to_file(config, CONFIG_PATH)
settings.load()
return {"message": "Success"}
@staticmethod
def get_config() -> dict:
return json_config.load(CONFIG_PATH)
return settings.dict()
def get_rss(self, full_path: str):
url = f"https://mikanani.me/RSS/{full_path}"

View File

@@ -1 +1 @@
from .operator import DataOperator
from .bangumi import BangumiDatabase

View File

@@ -6,12 +6,11 @@ from module.models import BangumiData
logger = logging.getLogger(__name__)
class DataOperator(DataConnector):
class BangumiDatabase(DataConnector):
def __init__(self):
super().__init__()
self.__update_table()
def __update_table(self):
def update_table(self):
table_name = "bangumi"
db_data = self.__data_to_db(BangumiData())
columns = ", ".join([f"{key} {self.__python_to_sqlite_type(value)}" for key, value in db_data.items()])
@@ -63,6 +62,14 @@ class DataOperator(DataConnector):
db_data[key] = item.split(",")
return BangumiData(**db_data)
def __fetch_data(self) -> list[BangumiData] | None:
values = self._cursor.fetchall()
if values is None:
return None
keys = [x[0] for x in self._cursor.description]
dict_data = [dict(zip(keys, value)) for value in values]
return [self.__db_to_data(x) for x in dict_data]
def insert(self, data: BangumiData):
db_data = self.__data_to_db(data)
columns = ", ".join(db_data.keys())
@@ -86,6 +93,12 @@ class DataOperator(DataConnector):
self._conn.commit()
return self._cursor.rowcount == 1
def update_list(self, data: list[BangumiData]):
db_data = [self.__data_to_db(x) for x in data]
update_columns = ", ".join([f"{key} = :{key}" for key in db_data[0].keys() if key != "id"])
self._cursor.executemany(f"UPDATE bangumi SET {update_columns} WHERE id = :id", db_data)
self._conn.commit()
def update_rss(self, title_raw, rss_set: str):
# Update rss and added
self._cursor.execute(
@@ -190,19 +203,22 @@ class DataOperator(DataConnector):
self.update_rss(title_raw, rss_set)
return titles
def get_to_complete(self) -> list[BangumiData] | None:
def not_complete(self) -> list[BangumiData] | None:
# Find eps_complete = False
self._cursor.execute(
"""
SELECT * FROM bangumi WHERE eps_collect = 1
SELECT * FROM bangumi WHERE eps_collect = 0
"""
)
values = self._cursor.fetchall()
if values is None:
return None
keys = [x[0] for x in self._cursor.description]
dict_data = [dict(zip(keys, value)) for value in values]
return [self.__db_to_data(x) for x in dict_data]
return self.__fetch_data()
def not_added(self) -> list[BangumiData] | None:
self._cursor.execute(
"""
SELECT * FROM bangumi WHERE added = 0
"""
)
return self.__fetch_data()
def gen_id(self) -> int:
self._cursor.execute(

View File

@@ -1,16 +1,2 @@
from module.models import Config
from .download_client import DownloadClient
def getClient(settings: Config):
# TODO 多下载器支持
type = settings.downloader.type
host = settings.downloader.host
username = settings.downloader.username
password = settings.downloader.password
ssl = settings.downloader.ssl
if type == "qbittorrent":
from module.downloader.client.qb_downloader import QbDownloader
return QbDownloader(host, username, password, ssl)
else:
raise Exception(f"Unsupported downloader type: {type}")

View File

@@ -2,7 +2,6 @@ import re
import logging
import os
from module.downloader import getClient
from module.models import BangumiData
from module.conf import settings
@@ -12,11 +11,26 @@ logger = logging.getLogger(__name__)
class DownloadClient:
def __init__(self):
self.client = getClient(settings)
self.client = self.__getClient()
self.authed = False
self.download_path = settings.downloader.path
self.group_tag = settings.bangumi_manage.group_tag
@staticmethod
def __getClient():
# TODO 多下载器支持
type = settings.downloader.type
host = settings.downloader.host
username = settings.downloader.username
password = settings.downloader.password
ssl = settings.downloader.ssl
if type == "qbittorrent":
from .client.qb_downloader import QbDownloader
return QbDownloader(host, username, password, ssl)
else:
raise Exception(f"Unsupported downloader type: {type}")
def __enter__(self):
if not self.authed:
self.auth()
@@ -47,8 +61,8 @@ class DownloadClient:
self.download_path = os.path.join(prefs["save_path"], "Bangumi")
def set_rule(self, info: BangumiData):
official_name, raw_name, season, group = (
info.official_title,
official_name = f"{info.official_title}({info.year})" if info.year else info.official_title
raw_name, season, group = (
info.title_raw,
info.season,
info.group_name,
@@ -76,17 +90,13 @@ class DownloadClient:
}
rule_name = f"[{group}] {official_name}" if self.group_tag else official_name
self.client.rss_set_rule(rule_name=f"{rule_name} S{season}", rule_def=rule)
logger.info(f"Add {official_name} Season {season}")
def rss_feed(self, rss_link, item_path="Mikan_RSS"):
# TODO: 定时刷新 RSS
self.client.rss_add_feed(url=rss_link, item_path=item_path)
logger.info(f"Add {official_name} Season {season} to auto download rules.")
def add_collection_feed(self, rss_link, item_path):
self.client.rss_add_feed(url=rss_link, item_path=item_path)
logger.info("Add Collection RSS Feed successfully.")
def add_rules(self, bangumi_info: list[BangumiData]):
def set_rules(self, bangumi_info: list[BangumiData]):
logger.debug("Start adding rules.")
for info in bangumi_info:
if not info.added:
@@ -115,7 +125,7 @@ class DownloadClient:
def move_torrent(self, hashes, location):
self.client.move_torrent(hashes=hashes, new_location=location)
def add_rss_feed(self, rss_link, item_path):
def add_rss_feed(self, rss_link, item_path="Mikan_RSS"):
self.client.rss_add_feed(url=rss_link, item_path=item_path)
logger.info("Add RSS Feed successfully.")

View File

@@ -3,19 +3,19 @@ import re
import logging
from module.network import RequestContent
from module.core import DownloadClient
from module.models import BangumiData, Config
from module.database import DataOperator
from module.downloader import DownloadClient
from module.models import BangumiData
from module.database import BangumiDatabase
from module.conf import settings
logger = logging.getLogger(__name__)
class FullSeasonGet(DownloadClient):
def __init__(self, settings: Config):
def __init__(self):
super().__init__()
self.SEARCH_KEY = [
"group",
"group_name",
"title_raw",
"season_raw",
"subtitle",
@@ -70,14 +70,17 @@ class FullSeasonGet(DownloadClient):
for download in downloads:
self.add_torrent(download)
logger.info("Completed!")
data.eps_collect = False
data.eps_collect = True
def eps_complete(self):
with DataOperator() as op:
datas = op.get_uncompleted()
for data in datas:
if data.eps_collect:
self.download_season(data)
with BangumiDatabase() as bd:
datas = bd.not_complete()
if datas:
logger.info("Start collecting full season...")
for data in datas:
if not data.eps_collect:
self.download_season(data)
bd.update_list(datas)
def download_collection(
self, data: BangumiData, link
@@ -89,3 +92,10 @@ class FullSeasonGet(DownloadClient):
for download in downloads:
self.add_torrent(download)
logger.info("Completed!")
if __name__ == '__main__':
from module.conf import setup_logger
setup_logger()
with FullSeasonGet() as full_season_get:
full_season_get.eps_complete()

View File

@@ -3,7 +3,7 @@ import os.path
import re
from pathlib import PurePath, PureWindowsPath
from module.core.download_client import DownloadClient
from module.downloader import DownloadClient
from module.parser import TitleParser
from module.network import PostNotification

View File

@@ -1 +1,2 @@
from .rss_analyser import RSSAnalyser
from .add_rules import add_rules

View File

@@ -0,0 +1,11 @@
from module.database import BangumiDatabase
from module.downloader import DownloadClient
def add_rules():
with BangumiDatabase() as db:
bangumi_list = db.not_added()
if bangumi_list:
with DownloadClient() as client:
client.set_rules(bangumi_list)
db.update_list(bangumi_list)

View File

@@ -2,8 +2,8 @@ import logging
from module.network import RequestContent
from module.parser import TitleParser
from module.models import Config, BangumiData
from module.database import DataOperator
from module.models import BangumiData
from module.database import BangumiDatabase
from module.conf import settings
logger = logging.getLogger(__name__)
@@ -32,12 +32,12 @@ class RSSAnalyser:
with RequestContent() as req:
rss_torrents = req.get_torrents(rss_link)
title_dict = {torrent.name: torrent.homepage for torrent in rss_torrents}
with DataOperator() as op:
new_dict = op.match_list(title_dict, rss_link)
with BangumiDatabase() as database:
new_dict = database.match_list(title_dict, rss_link)
if not new_dict:
logger.debug("No new title found.")
return []
_id = op.gen_id()
_id = database.gen_id()
new_data = []
# New List
with RequestContent() as req:
@@ -51,12 +51,12 @@ class RSSAnalyser:
# Official title type
self.official_title_parser(data, mikan_title)
if not full_parse:
op.insert(data)
database.insert(data)
return [data]
new_data.append(data)
_id += 1
logger.debug(f"New title found: {data.official_title}")
op.insert_list(new_data)
database.insert_list(new_data)
return new_data
def run(self, rss_link: str):

91
src/module/sub_thread.py Normal file
View File

@@ -0,0 +1,91 @@
import os.path
import time
import logging
import threading
from module.rss import RSSAnalyser, add_rules
from module.manager import Renamer, FullSeasonGet
from module.database import BangumiDatabase
from module.downloader import DownloadClient
from module.conf import settings
from module.conf import VERSION
logger = logging.getLogger(__name__)
stop_event = threading.Event()
def rss_loop(stop_event):
rss_analyser = RSSAnalyser()
rss_link = settings.rss_link()
while not stop_event.is_set():
rss_analyser.run(rss_link)
add_rules()
if settings.bangumi_manage.eps_complete:
with FullSeasonGet() as full_season_get:
full_season_get.eps_complete()
stop_event.wait(settings.program.rss_time)
def rename_loop(stop_event):
while not stop_event.is_set():
with Renamer() as renamer:
renamer.rename()
stop_event.wait(settings.program.rename_time)
rss_thread = threading.Thread(
target=rss_loop,
args=(stop_event,),
)
rename_thread = threading.Thread(
target=rename_loop,
args=(stop_event,),
)
def start_info():
with open("icon", "r") as f:
for line in f.readlines():
logger.info(line.strip("\n"))
logger.info(
f"Version {VERSION} Author: EstrellaXD Twitter: https://twitter.com/Estrella_Pan"
)
logger.info("GitHub: https://github.com/EstrellaXD/Auto_Bangumi/")
logger.info("Starting AutoBangumi...")
def stop_thread():
global rss_thread, rename_thread
if not stop_event.is_set():
stop_event.set()
rename_thread.join()
rss_thread.join()
def start_thread():
global rss_thread, rename_thread
if stop_event.is_set():
stop_event.clear()
time.sleep(1)
settings.load()
rss_thread = threading.Thread(target=rss_loop, args=(stop_event,))
rename_thread = threading.Thread(target=rename_loop, args=(stop_event,))
rss_thread.start()
rename_thread.start()
return {"status": "ok"}
def start_program():
global rss_thread, rename_thread
start_info()
with BangumiDatabase() as database:
database.update_table()
with DownloadClient() as client:
client.init_downloader()
client.add_rss_feed(settings.rss_link())
rss_thread = threading.Thread(target=rss_loop, args=(stop_event,))
rename_thread = threading.Thread(target=rename_loop, args=(stop_event,))
rss_thread.start()
rename_thread.start()