mirror of
https://github.com/EstrellaXD/Auto_Bangumi.git
synced 2026-02-11 22:37:02 +08:00
Fix Bugs, add multiprocess in main.py
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -179,3 +179,4 @@ cython_debug/
|
||||
test.*
|
||||
.run
|
||||
/module/conf/version.py
|
||||
/templates/
|
||||
|
||||
11
main.py
11
main.py
@@ -4,4 +4,13 @@ from module import api
|
||||
import multiprocessing
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
||||
num_processes = 2
|
||||
processes = []
|
||||
p1 = multiprocessing.Process(target=app.run)
|
||||
p2 = multiprocessing.Process(target=api.run)
|
||||
process_list = [p1, p2]
|
||||
for p in process_list:
|
||||
p.start()
|
||||
processes.append(p)
|
||||
for p in processes:
|
||||
p.join()
|
||||
@@ -8,7 +8,7 @@ from pydantic import BaseModel
|
||||
import logging
|
||||
|
||||
from .core import APIProcess
|
||||
from .conf import settings
|
||||
from .conf import settings, DATA_PATH, LOG_PATH
|
||||
from .utils import json_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -16,8 +16,8 @@ logger = logging.getLogger(__name__)
|
||||
app = FastAPI()
|
||||
api_func = APIProcess()
|
||||
|
||||
app.mount("/assets", StaticFiles(directory="/templates/assets"), name="assets")
|
||||
templates = Jinja2Templates(directory="/templates")
|
||||
app.mount("/assets", StaticFiles(directory="templates/assets"), name="assets")
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
|
||||
|
||||
@app.get("/", response_class=HTMLResponse)
|
||||
@@ -28,14 +28,13 @@ def index(request: Request):
|
||||
|
||||
@app.get("/api/v1/data")
|
||||
def get_data():
|
||||
data = json_config.load(settings.info_path)
|
||||
data = json_config.load(DATA_PATH)
|
||||
return data
|
||||
|
||||
|
||||
@app.get("/api/v1/log")
|
||||
async def get_log():
|
||||
log_path = settings.log_path
|
||||
return FileResponse(log_path)
|
||||
return FileResponse(LOG_PATH)
|
||||
|
||||
|
||||
@app.get("/api/v1/resetRule")
|
||||
@@ -74,7 +73,7 @@ async def add_rule(info: AddRule):
|
||||
|
||||
def run():
|
||||
LOGGING_CONFIG["formatters"]["default"]["fmt"] = "[%(asctime)s] %(levelprefix)s %(message)s"
|
||||
uvicorn.run(app, host="0.0.0.0", port=settings.webui_port)
|
||||
uvicorn.run(app, host="0.0.0.0", port=settings.program.webui_port)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from .config import settings
|
||||
from .config import settings, VERSION
|
||||
from .log import setup_logger, LOG_PATH
|
||||
from .version import VERSION
|
||||
|
||||
|
||||
TMDB_API = "32b19d6a05b512190a056fa4e747cbbc"
|
||||
DATA_PATH = "data/data.json"
|
||||
|
||||
@@ -6,6 +6,7 @@ from dataclasses import dataclass
|
||||
from .const import DEFAULT_SETTINGS, ENV_TO_ATTR
|
||||
|
||||
|
||||
|
||||
class ConfLoad(dict):
|
||||
def __getattr__(self, item):
|
||||
return self.get(item)
|
||||
@@ -56,15 +57,13 @@ class Settings:
|
||||
json.dump(settings, f, indent=4)
|
||||
return settings
|
||||
|
||||
|
||||
if os.path.isfile("version.py"):
|
||||
try:
|
||||
from .version import VERSION
|
||||
if VERSION == "DEV_VERSION":
|
||||
CONFIG_PATH = "config/config_dev.json"
|
||||
else:
|
||||
CONFIG_PATH = "config/config.json"
|
||||
settings = Settings(CONFIG_PATH)
|
||||
else:
|
||||
settings = Settings(DEFAULT_SETTINGS)
|
||||
CONFIG_PATH = ("config/config.json")
|
||||
except ImportError:
|
||||
VERSION = "DEV_VERSION"
|
||||
CONFIG_PATH = "config/config_dev.json"
|
||||
|
||||
settings = Settings(CONFIG_PATH)
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ class RSSAnalyser:
|
||||
|
||||
def rss_to_datas(self, bangumi_info: list) -> list:
|
||||
rss_torrents = self._request.get_torrents(settings.rss_parser.link)
|
||||
self._request.close()
|
||||
self._request.close_session()
|
||||
for torrent in rss_torrents:
|
||||
raw_title = torrent.name
|
||||
extra_add = True
|
||||
@@ -36,7 +36,7 @@ class RSSAnalyser:
|
||||
|
||||
def rss_to_data(self, url) -> dict:
|
||||
rss_torrents = self._request.get_torrents(url)
|
||||
self._request.close()
|
||||
self._request.close_session()
|
||||
for torrent in rss_torrents:
|
||||
try:
|
||||
data = self._title_analyser.return_dict(torrent.name)
|
||||
|
||||
@@ -69,10 +69,11 @@ class QbDownloader:
|
||||
|
||||
def rss_add_feed(self, url, item_path):
|
||||
try:
|
||||
if self.get_rss_info() is not None:
|
||||
self.rss_remove_item(item_path)
|
||||
self._client.rss_add_feed(url, item_path)
|
||||
except Conflict409Error as e:
|
||||
logger.exception(e)
|
||||
raise ConflictError()
|
||||
except Conflict409Error:
|
||||
logger.exception("RSS Exist.")
|
||||
|
||||
def rss_remove_item(self, item_path):
|
||||
try:
|
||||
|
||||
@@ -5,6 +5,7 @@ from module.conf import settings
|
||||
|
||||
import re
|
||||
|
||||
FILTER = "|".join(settings.rss_parser.filter)
|
||||
|
||||
@dataclass
|
||||
class TorrentInfo:
|
||||
@@ -23,7 +24,7 @@ class RequestContent:
|
||||
torrent_urls = [item.get("url") for item in soup.find_all("enclosure")]
|
||||
torrents = []
|
||||
for _title, torrent_url in zip(torrent_titles, torrent_urls):
|
||||
if re.search(settings.not_contain, _title) is None:
|
||||
if re.search(FILTER, _title) is None:
|
||||
torrents.append(TorrentInfo(_title, torrent_url))
|
||||
return torrents
|
||||
|
||||
|
||||
Reference in New Issue
Block a user