mirror of
https://github.com/jxxghp/MoviePilot.git
synced 2026-03-20 03:57:30 +08:00
refactor:重构缓存系统
This commit is contained in:
@@ -4,17 +4,19 @@ from typing import Any, Optional
|
||||
from urllib.parse import quote
|
||||
|
||||
import redis
|
||||
from redis.asyncio import Redis
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.schemas import ConfigChangeEventData
|
||||
from app.schemas.types import EventType
|
||||
from app.utils.singleton import Singleton
|
||||
|
||||
|
||||
class RedisHelper:
|
||||
class RedisHelper(metaclass=Singleton):
|
||||
"""
|
||||
Redis连接和操作助手类
|
||||
Redis连接和操作助手类,单例模式
|
||||
|
||||
特性:
|
||||
- 管理Redis连接池和客户端
|
||||
@@ -27,32 +29,30 @@ class RedisHelper:
|
||||
_complex_serializable_types = set()
|
||||
_simple_serializable_types = set()
|
||||
|
||||
def __init__(self, redis_url: Optional[str] = "redis://localhost"):
|
||||
def __init__(self):
|
||||
"""
|
||||
初始化Redis助手实例
|
||||
|
||||
:param redis_url: Redis服务的URL
|
||||
"""
|
||||
self.redis_url = redis_url
|
||||
self.redis_url = settings.CACHE_BACKEND_URL
|
||||
self.client = None
|
||||
self._connect()
|
||||
|
||||
def _connect(self):
|
||||
"""
|
||||
建立Redis连接
|
||||
"""
|
||||
try:
|
||||
self.client = redis.Redis.from_url(
|
||||
self.redis_url,
|
||||
decode_responses=False,
|
||||
socket_timeout=30,
|
||||
socket_connect_timeout=5,
|
||||
health_check_interval=60,
|
||||
)
|
||||
# 测试连接,确保Redis可用
|
||||
self.client.ping()
|
||||
logger.info(f"Successfully connected to Redis:{self.redis_url}")
|
||||
self.set_memory_limit()
|
||||
if self.client is None:
|
||||
self.client = redis.Redis.from_url(
|
||||
self.redis_url,
|
||||
decode_responses=False,
|
||||
socket_timeout=30,
|
||||
socket_connect_timeout=5,
|
||||
health_check_interval=60,
|
||||
)
|
||||
# 测试连接,确保Redis可用
|
||||
self.client.ping()
|
||||
logger.info(f"Successfully connected to Redis:{self.redis_url}")
|
||||
self.set_memory_limit()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
raise RuntimeError("Redis connection failed") from e
|
||||
@@ -69,6 +69,7 @@ class RedisHelper:
|
||||
if event_data.key not in ['CACHE_BACKEND_TYPE', 'CACHE_BACKEND_URL', 'CACHE_REDIS_MAXMEMORY']:
|
||||
return
|
||||
logger.info("配置变更,重连Redis...")
|
||||
self.close()
|
||||
self._connect()
|
||||
|
||||
def set_memory_limit(self, policy: Optional[str] = "allkeys-lru"):
|
||||
@@ -164,6 +165,7 @@ class RedisHelper:
|
||||
:param kwargs: 其他参数
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
# 对值进行序列化
|
||||
serialized_value = self.serialize(value)
|
||||
@@ -181,6 +183,7 @@ class RedisHelper:
|
||||
:return: 存在返回True,否则返回False
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
return self.client.exists(redis_key) == 1
|
||||
except Exception as e:
|
||||
@@ -196,6 +199,7 @@ class RedisHelper:
|
||||
:return: 返回缓存的值,如果缓存不存在返回None
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
value = self.client.get(redis_key)
|
||||
if value is not None:
|
||||
@@ -213,6 +217,7 @@ class RedisHelper:
|
||||
:param region: 缓存的区
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
self.client.delete(redis_key)
|
||||
except Exception as e:
|
||||
@@ -225,6 +230,7 @@ class RedisHelper:
|
||||
:param region: 缓存的区
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
if region:
|
||||
cache_region = self.get_region(quote(region))
|
||||
redis_key = f"{cache_region}:key:*"
|
||||
@@ -239,10 +245,302 @@ class RedisHelper:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear cache, region: {region}, error: {e}")
|
||||
|
||||
def items(self, region: Optional[str] = None):
|
||||
"""
|
||||
获取指定区域的所有缓存键值对
|
||||
|
||||
:param region: 缓存的区
|
||||
:return: 返回键值对生成器
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
if region:
|
||||
cache_region = self.get_region(quote(region))
|
||||
redis_key = f"{cache_region}:key:*"
|
||||
for key in self.client.scan_iter(redis_key):
|
||||
value = self.client.get(key)
|
||||
if value is not None:
|
||||
yield key, self.deserialize(value)
|
||||
else:
|
||||
for key in self.client.scan_iter("*"):
|
||||
value = self.client.get(key)
|
||||
if value is not None:
|
||||
yield key, self.deserialize(value)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get items from Redis, region: {region}, error: {e}")
|
||||
|
||||
def test(self) -> bool:
|
||||
"""
|
||||
测试Redis连接性
|
||||
"""
|
||||
try:
|
||||
self._connect()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Redis connection test failed: {e}")
|
||||
return False
|
||||
|
||||
def close(self) -> None:
|
||||
"""
|
||||
关闭Redis客户端的连接池
|
||||
"""
|
||||
if self.client:
|
||||
self.client.close()
|
||||
self.client = None
|
||||
logger.debug("Redis connection closed")
|
||||
|
||||
|
||||
class AsyncRedisHelper(metaclass=Singleton):
|
||||
"""
|
||||
异步Redis连接和操作助手类,单例模式
|
||||
|
||||
特性:
|
||||
- 管理异步Redis连接池和客户端
|
||||
- 提供序列化和反序列化功能
|
||||
- 支持内存限制和淘汰策略设置
|
||||
- 提供键名生成和区域管理功能
|
||||
- 所有操作都是异步的
|
||||
"""
|
||||
|
||||
# 类型缓存集合,针对非容器简单类型
|
||||
_complex_serializable_types = set()
|
||||
_simple_serializable_types = set()
|
||||
|
||||
def __init__(self, redis_url: Optional[str] = "redis://localhost"):
|
||||
"""
|
||||
初始化异步Redis助手实例
|
||||
|
||||
:param redis_url: Redis服务的URL
|
||||
"""
|
||||
self.redis_url = redis_url
|
||||
self.client: Optional[Redis] = None
|
||||
|
||||
async def _connect(self):
|
||||
"""
|
||||
建立异步Redis连接
|
||||
"""
|
||||
try:
|
||||
if self.client is None:
|
||||
self.client = Redis.from_url(
|
||||
self.redis_url,
|
||||
decode_responses=False,
|
||||
socket_timeout=30,
|
||||
socket_connect_timeout=5,
|
||||
health_check_interval=60,
|
||||
)
|
||||
# 测试连接,确保Redis可用
|
||||
await self.client.ping()
|
||||
logger.info(f"Successfully connected to Redis (async):{self.redis_url}")
|
||||
await self.set_memory_limit()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis (async): {e}")
|
||||
raise RuntimeError("Redis async connection failed") from e
|
||||
|
||||
@eventmanager.register(EventType.ConfigChanged)
|
||||
async def handle_config_changed(self, event: Event):
|
||||
"""
|
||||
处理配置变更事件,更新Redis设置
|
||||
:param event: 事件对象
|
||||
"""
|
||||
if not event:
|
||||
return
|
||||
event_data: ConfigChangeEventData = event.event_data
|
||||
if event_data.key not in ['CACHE_BACKEND_TYPE', 'CACHE_BACKEND_URL', 'CACHE_REDIS_MAXMEMORY']:
|
||||
return
|
||||
logger.info("配置变更,重连Redis (async)...")
|
||||
await self.close()
|
||||
await self._connect()
|
||||
|
||||
async def set_memory_limit(self, policy: Optional[str] = "allkeys-lru"):
|
||||
"""
|
||||
动态设置Redis最大内存和内存淘汰策略
|
||||
|
||||
:param policy: 淘汰策略(如'allkeys-lru')
|
||||
"""
|
||||
try:
|
||||
# 如果有显式值,则直接使用,为0时说明不限制,如果未配置,开启BIG_MEMORY_MODE时为"1024mb",未开启时为"256mb"
|
||||
maxmemory = settings.CACHE_REDIS_MAXMEMORY or ("1024mb" if settings.BIG_MEMORY_MODE else "256mb")
|
||||
await self.client.config_set("maxmemory", maxmemory)
|
||||
await self.client.config_set("maxmemory-policy", policy)
|
||||
logger.debug(f"Redis maxmemory set to {maxmemory}, policy: {policy} (async)")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set Redis maxmemory or policy (async): {e}")
|
||||
|
||||
@staticmethod
|
||||
def is_container_type(t):
|
||||
"""
|
||||
判断是否为容器类型
|
||||
"""
|
||||
return t in (list, dict, tuple, set)
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, value: Any) -> bytes:
|
||||
"""
|
||||
将值序列化为二进制数据,根据序列化方式标识格式
|
||||
"""
|
||||
vt = type(value)
|
||||
# 针对非容器类型使用缓存策略
|
||||
if not cls.is_container_type(vt):
|
||||
# 如果已知需要复杂序列化
|
||||
if vt in cls._complex_serializable_types:
|
||||
return b"PICKLE" + b"\x00" + pickle.dumps(value)
|
||||
# 如果已知可以简单序列化
|
||||
if vt in cls._simple_serializable_types:
|
||||
json_data = json.dumps(value).encode("utf-8")
|
||||
return b"JSON" + b"\x00" + json_data
|
||||
# 对于未知的非容器类型,尝试简单序列化,如抛出异常,再使用复杂序列化
|
||||
try:
|
||||
json_data = json.dumps(value).encode("utf-8")
|
||||
cls._simple_serializable_types.add(vt)
|
||||
return b"JSON" + b"\x00" + json_data
|
||||
except TypeError:
|
||||
cls._complex_serializable_types.add(vt)
|
||||
return b"PICKLE" + b"\x00" + pickle.dumps(value)
|
||||
# 针对容器类型,每次尝试简单序列化,不使用缓存
|
||||
else:
|
||||
try:
|
||||
json_data = json.dumps(value).encode("utf-8")
|
||||
return b"JSON" + b"\x00" + json_data
|
||||
except TypeError:
|
||||
return b"PICKLE" + b"\x00" + pickle.dumps(value)
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, value: bytes) -> Any:
|
||||
"""
|
||||
将二进制数据反序列化为原始值,根据格式标识区分序列化方式
|
||||
"""
|
||||
format_marker, data = value.split(b"\x00", 1)
|
||||
if format_marker == b"JSON":
|
||||
return json.loads(data.decode("utf-8"))
|
||||
elif format_marker == b"PICKLE":
|
||||
return pickle.loads(data)
|
||||
else:
|
||||
raise ValueError("Unknown serialization format")
|
||||
|
||||
@staticmethod
|
||||
def get_region(region: Optional[str] = "DEFAULT"):
|
||||
"""
|
||||
获取缓存的区
|
||||
"""
|
||||
return f"region:{region}" if region else "region:default"
|
||||
|
||||
def get_redis_key(self, region: str, key: str) -> str:
|
||||
"""
|
||||
获取缓存Key
|
||||
"""
|
||||
# 使用region作为缓存键的一部分
|
||||
region = self.get_region(quote(region))
|
||||
return f"{region}:key:{quote(key)}"
|
||||
|
||||
async def set(self, key: str, value: Any, ttl: Optional[int] = None,
|
||||
region: Optional[str] = "DEFAULT", **kwargs) -> None:
|
||||
"""
|
||||
异步设置缓存
|
||||
|
||||
:param key: 缓存的键
|
||||
:param value: 缓存的值
|
||||
:param ttl: 缓存的存活时间,单位秒
|
||||
:param region: 缓存的区
|
||||
:param kwargs: 其他参数
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
# 对值进行序列化
|
||||
serialized_value = self.serialize(value)
|
||||
kwargs.pop("maxsize", None)
|
||||
await self.client.set(redis_key, serialized_value, ex=ttl, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set key (async): {key} in region: {region}, error: {e}")
|
||||
|
||||
async def exists(self, key: str, region: Optional[str] = "DEFAULT") -> bool:
|
||||
"""
|
||||
异步判断缓存键是否存在
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
:return: 存在返回True,否则返回False
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
result = await self.client.exists(redis_key)
|
||||
return result == 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to exists key (async): {key} region: {region}, error: {e}")
|
||||
return False
|
||||
|
||||
async def get(self, key: str, region: Optional[str] = "DEFAULT") -> Optional[Any]:
|
||||
"""
|
||||
异步获取缓存的值
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
:return: 返回缓存的值,如果缓存不存在返回None
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
value = await self.client.get(redis_key)
|
||||
if value is not None:
|
||||
return self.deserialize(value)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get key (async): {key} in region: {region}, error: {e}")
|
||||
return None
|
||||
|
||||
async def delete(self, key: str, region: Optional[str] = "DEFAULT") -> None:
|
||||
"""
|
||||
异步删除缓存
|
||||
|
||||
:param key: 缓存的键
|
||||
:param region: 缓存的区
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
redis_key = self.get_redis_key(region, key)
|
||||
await self.client.delete(redis_key)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete key (async): {key} in region: {region}, error: {e}")
|
||||
|
||||
async def clear(self, region: Optional[str] = None) -> None:
|
||||
"""
|
||||
异步清除指定区域的缓存或全部缓存
|
||||
|
||||
:param region: 缓存的区
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
if region:
|
||||
cache_region = self.get_region(quote(region))
|
||||
redis_key = f"{cache_region}:key:*"
|
||||
async with self.client.pipeline() as pipe:
|
||||
async for key in self.client.scan_iter(redis_key):
|
||||
await pipe.delete(key)
|
||||
await pipe.execute()
|
||||
logger.info(f"Cleared Redis cache for region (async): {region}")
|
||||
else:
|
||||
await self.client.flushdb()
|
||||
logger.info("Cleared all Redis cache (async)")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear cache (async), region: {region}, error: {e}")
|
||||
|
||||
async def test(self) -> bool:
|
||||
"""
|
||||
异步测试Redis连接性
|
||||
"""
|
||||
try:
|
||||
await self._connect()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Redis async connection test failed: {e}")
|
||||
return False
|
||||
|
||||
async def close(self) -> None:
|
||||
"""
|
||||
关闭异步Redis客户端的连接池
|
||||
"""
|
||||
if self.client:
|
||||
await self.client.close()
|
||||
self.client = None
|
||||
logger.debug("Redis async connection closed")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from threading import Thread
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
from app.core.cache import cached, cache_backend
|
||||
from app.core.cache import cached
|
||||
from app.core.config import settings
|
||||
from app.db.subscribe_oper import SubscribeOper
|
||||
from app.db.systemconfig_oper import SystemConfigOper
|
||||
@@ -111,7 +111,12 @@ class SubscribeHelper(metaclass=WeakSingleton):
|
||||
if res and res.status_code == 200:
|
||||
# 清除缓存
|
||||
if clear_cache:
|
||||
cache_backend.clear(region=self._shares_cache_region)
|
||||
self.get_shares.cache_clear()
|
||||
self.get_statistic.cache_clear()
|
||||
self.get_share_statistics.cache_clear()
|
||||
self.async_get_shares.cache_clear()
|
||||
self.async_get_statistic.cache_clear()
|
||||
self.async_get_share_statistics.cache_clear()
|
||||
return True, ""
|
||||
else:
|
||||
return False, res.json().get("message")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
from app.core.cache import cached, cache_backend
|
||||
from app.core.cache import cached
|
||||
from app.core.config import settings
|
||||
from app.db.models import Workflow
|
||||
from app.db.workflow_oper import WorkflowOper
|
||||
@@ -89,7 +89,8 @@ class WorkflowHelper(metaclass=WeakSingleton):
|
||||
if success:
|
||||
# 清除缓存
|
||||
if clear_cache:
|
||||
cache_backend.clear(region=self._shares_cache_region)
|
||||
self.get_shares.cache_clear()
|
||||
self.async_get_shares.cache_clear()
|
||||
return True, ""
|
||||
else:
|
||||
try:
|
||||
|
||||
Reference in New Issue
Block a user