feat: add local cache poster.

This commit is contained in:
EstrellaXD
2023-10-04 16:56:07 +08:00
parent efa199979e
commit fb721d25dc
11 changed files with 64 additions and 10 deletions

View File

@@ -36,7 +36,7 @@ def create_app() -> FastAPI:
app = create_app()
app.mount("/posters", StaticFiles(directory="data/posters"), name="posters")
if VERSION != "DEV_VERSION":
app.mount("/assets", StaticFiles(directory="dist/assets"), name="assets")
@@ -58,6 +58,9 @@ else:
return RedirectResponse("/docs")
if __name__ == "__main__":
if os.getenv("IPV6"):
host = "::"

View File

@@ -75,6 +75,15 @@ class Checker:
logger.error(f"[Checker] Downloader connect failed: {e}")
return False
@staticmethod
def check_img_cache() -> bool:
img_path = Path("data/posters")
if img_path.exists():
return True
else:
img_path.mkdir()
return False
if __name__ == "__main__":
# print(Checker().check_downloader())

View File

@@ -2,7 +2,7 @@ import logging
from module.conf import VERSION, settings
from module.models import ResponseModel
from module.update import data_migration, first_run, from_30_to_31, start_up
from module.update import data_migration, first_run, from_30_to_31, start_up, cache_image
from .sub_thread import RenameThread, RSSThread
@@ -46,6 +46,9 @@ class Program(RenameThread, RSSThread):
# Update database
from_30_to_31()
logger.info("[Core] Database updated.")
if not self.img_cache:
logger.info("[Core] No image cache exists, create image cache.")
cache_image()
self.start()
def start(self):

View File

@@ -54,3 +54,7 @@ class ProgramStatus(Checker):
@property
def database(self):
return self.check_database()
@property
def img_cache(self):
return self.check_img_cache()

View File

@@ -2,6 +2,7 @@ from bs4 import BeautifulSoup
from urllib3.util import parse_url
from module.network import RequestContent
from module.utils import save_image
def mikan_parser(homepage: str):
@@ -9,13 +10,14 @@ def mikan_parser(homepage: str):
with RequestContent() as req:
content = req.get_html(homepage)
soup = BeautifulSoup(content, "html.parser")
poster_div = soup.find("div", {"class": "bangumi-poster"})
poster_style = poster_div.get("style")
poster_div = soup.find("div", {"class": "bangumi-poster"}).get("style")
official_title = soup.select_one(
'p.bangumi-title a[href^="/Home/Bangumi/"]'
).text
if poster_style:
poster_path = poster_style.split("url('")[1].split("')")[0]
poster_link = f"https://{root_path}{poster_path}"
if poster_div:
poster_path = poster_div.split("url('")[1].split("')")[0]
img = req.get_content(f"https://{root_path}{poster_path}")
suffix = poster_path.split(".")[-1]
poster_link = save_image(img, suffix)
return poster_link, official_title
return "", ""

View File

@@ -4,6 +4,7 @@ from dataclasses import dataclass
from module.conf import TMDB_API
from module.network import RequestContent
from module.utils import save_image
TMDB_URL = "https://api.themoviedb.org"
@@ -85,7 +86,8 @@ def tmdb_parser(title, language) -> TMDBInfo | None:
official_title = info_content.get("name")
year_number = info_content.get("first_air_date").split("-")[0]
if poster_path:
poster_link = "https://image.tmdb.org/t/p/w300" + poster_path
img = req.get_content(f"https://image.tmdb.org/t/p/w780{poster_path}")
poster_link = save_image(img, "jpg")
else:
poster_link = None
return TMDBInfo(

View File

@@ -1,4 +1,4 @@
from .cross_version import from_30_to_31
from .cross_version import from_30_to_31, cache_image
from .data_migration import data_migration
from .startup import first_run, start_up
from .version_check import version_check

View File

@@ -3,6 +3,8 @@ import re
from urllib3.util import parse_url
from module.rss import RSSEngine
from module.utils import save_image
from module.network import RequestContent
def from_30_to_31():
@@ -28,3 +30,17 @@ def from_30_to_31():
else:
aggregate = False
db.add_rss(rss_link=rss, aggregate=aggregate)
def cache_image():
with RSSEngine() as db, RequestContent() as req:
bangumis = db.bangumi.search_all()
for bangumi in bangumis:
if bangumi.poster_link:
# Hash local path
img = req.get_content(bangumi.poster_link)
suffix = bangumi.poster_link.split(".")[-1]
img_path = save_image(img, suffix)
bangumi.poster_link = img_path
db.bangumi.update_all(bangumis)

View File

@@ -1 +1 @@
from .cache_image import save_image, load_image

View File

@@ -0,0 +1,14 @@
import hashlib
def save_image(img, suffix):
img_hash = hashlib.md5(img).hexdigest()[0:8]
image_path = f"data/posters/{img_hash}.{suffix}"
with open(image_path, "wb") as f:
f.write(img)
return f"posters/{img_hash}.{suffix}"
def load_image(img_path):
with open(f"data/images/{img_path}", "rb") as f:
return f.read()

View File

@@ -103,6 +103,7 @@ export default defineConfig({
server: {
proxy: {
'^/api/.*': 'http://127.0.0.1:7892',
'^/posters/.*': 'http://127.0.0.1:7892',
},
},
});