224
web/backend/cache_manager.py
Normal file
224
web/backend/cache_manager.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""
|
||||
Redis-based cache manager for Media Downloader API
|
||||
Provides caching for expensive queries with configurable TTL
|
||||
"""
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Callable
|
||||
from functools import wraps
|
||||
import redis
|
||||
from redis.exceptions import RedisError
|
||||
|
||||
# Add parent path to allow imports from modules
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
from modules.universal_logger import get_logger
|
||||
from web.backend.core.config import settings
|
||||
|
||||
logger = get_logger('CacheManager')
|
||||
|
||||
|
||||
class CacheManager:
|
||||
"""Redis cache manager with automatic connection handling"""
|
||||
|
||||
def __init__(self, host: str = '127.0.0.1', port: int = 6379, db: int = 0, ttl: int = 300):
|
||||
"""
|
||||
Initialize cache manager
|
||||
|
||||
Args:
|
||||
host: Redis host (default: 127.0.0.1)
|
||||
port: Redis port (default: 6379)
|
||||
db: Redis database number (default: 0)
|
||||
ttl: Default TTL in seconds (default: 300 = 5 minutes)
|
||||
"""
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.db = db
|
||||
self.default_ttl = ttl
|
||||
self._redis = None
|
||||
self._connect()
|
||||
|
||||
def _connect(self):
|
||||
"""Connect to Redis with error handling"""
|
||||
try:
|
||||
self._redis = redis.Redis(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
db=self.db,
|
||||
decode_responses=True,
|
||||
socket_connect_timeout=2,
|
||||
socket_timeout=2
|
||||
)
|
||||
# Test connection
|
||||
self._redis.ping()
|
||||
logger.info(f"Connected to Redis at {self.host}:{self.port}", module="Redis")
|
||||
except RedisError as e:
|
||||
logger.warning(f"Redis connection failed: {e}. Caching disabled.", module="Redis")
|
||||
self._redis = None
|
||||
|
||||
@property
|
||||
def is_available(self) -> bool:
|
||||
"""Check if Redis is available"""
|
||||
if self._redis is None:
|
||||
return False
|
||||
try:
|
||||
self._redis.ping()
|
||||
return True
|
||||
except RedisError:
|
||||
return False
|
||||
|
||||
def get(self, key: str) -> Optional[Any]:
|
||||
"""
|
||||
Get value from cache
|
||||
|
||||
Args:
|
||||
key: Cache key
|
||||
|
||||
Returns:
|
||||
Cached value (deserialized from JSON) or None if not found/error
|
||||
"""
|
||||
if not self.is_available:
|
||||
return None
|
||||
|
||||
try:
|
||||
value = self._redis.get(key)
|
||||
if value is None:
|
||||
return None
|
||||
return json.loads(value)
|
||||
except (RedisError, json.JSONDecodeError) as e:
|
||||
logger.warning(f"Cache get error for key '{key}': {e}", module="Cache")
|
||||
return None
|
||||
|
||||
def set(self, key: str, value: Any, ttl: Optional[int] = None):
|
||||
"""
|
||||
Set value in cache
|
||||
|
||||
Args:
|
||||
key: Cache key
|
||||
value: Value to cache (will be JSON serialized)
|
||||
ttl: TTL in seconds (default: use default_ttl)
|
||||
"""
|
||||
if not self.is_available:
|
||||
return
|
||||
|
||||
ttl = ttl if ttl is not None else self.default_ttl
|
||||
|
||||
try:
|
||||
serialized = json.dumps(value)
|
||||
self._redis.setex(key, ttl, serialized)
|
||||
except (RedisError, TypeError) as e:
|
||||
logger.warning(f"Cache set error for key '{key}': {e}", module="Cache")
|
||||
|
||||
def delete(self, key: str):
|
||||
"""
|
||||
Delete key from cache
|
||||
|
||||
Args:
|
||||
key: Cache key to delete
|
||||
"""
|
||||
if not self.is_available:
|
||||
return
|
||||
|
||||
try:
|
||||
self._redis.delete(key)
|
||||
except RedisError as e:
|
||||
logger.warning(f"Cache delete error for key '{key}': {e}", module="Cache")
|
||||
|
||||
def clear(self, pattern: str = "*"):
|
||||
"""
|
||||
Clear cache keys matching pattern
|
||||
|
||||
Args:
|
||||
pattern: Redis key pattern (default: "*" clears all)
|
||||
"""
|
||||
if not self.is_available:
|
||||
return
|
||||
|
||||
try:
|
||||
keys = self._redis.keys(pattern)
|
||||
if keys:
|
||||
self._redis.delete(*keys)
|
||||
logger.info(f"Cleared {len(keys)} cache keys matching '{pattern}'", module="Cache")
|
||||
except RedisError as e:
|
||||
logger.warning(f"Cache clear error for pattern '{pattern}': {e}", module="Cache")
|
||||
|
||||
def cached(self, key_prefix: str, ttl: Optional[int] = None):
|
||||
"""
|
||||
Decorator for caching function results
|
||||
|
||||
Args:
|
||||
key_prefix: Prefix for cache key (full key includes function args)
|
||||
ttl: TTL in seconds (default: use default_ttl)
|
||||
|
||||
Example:
|
||||
@cache_manager.cached('stats', ttl=300)
|
||||
def get_download_stats(platform: str, days: int):
|
||||
# Expensive query
|
||||
return stats
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def async_wrapper(*args, **kwargs):
|
||||
# Build cache key from function args
|
||||
cache_key = f"{key_prefix}:{func.__name__}:{hash((args, tuple(sorted(kwargs.items()))))}"
|
||||
|
||||
# Try to get from cache
|
||||
cached_result = self.get(cache_key)
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Cache HIT: {cache_key}", module="Cache")
|
||||
return cached_result
|
||||
|
||||
# Cache miss - execute function
|
||||
logger.debug(f"Cache MISS: {cache_key}", module="Cache")
|
||||
result = await func(*args, **kwargs)
|
||||
|
||||
# Store in cache
|
||||
self.set(cache_key, result, ttl)
|
||||
|
||||
return result
|
||||
|
||||
@wraps(func)
|
||||
def sync_wrapper(*args, **kwargs):
|
||||
# Build cache key from function args
|
||||
cache_key = f"{key_prefix}:{func.__name__}:{hash((args, tuple(sorted(kwargs.items()))))}"
|
||||
|
||||
# Try to get from cache
|
||||
cached_result = self.get(cache_key)
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Cache HIT: {cache_key}", module="Cache")
|
||||
return cached_result
|
||||
|
||||
# Cache miss - execute function
|
||||
logger.debug(f"Cache MISS: {cache_key}", module="Cache")
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
# Store in cache
|
||||
self.set(cache_key, result, ttl)
|
||||
|
||||
return result
|
||||
|
||||
# Return appropriate wrapper based on function type
|
||||
import asyncio
|
||||
if asyncio.iscoroutinefunction(func):
|
||||
return async_wrapper
|
||||
else:
|
||||
return sync_wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# Global cache manager instance (use centralized config)
|
||||
cache_manager = CacheManager(
|
||||
host=settings.REDIS_HOST,
|
||||
port=settings.REDIS_PORT,
|
||||
db=settings.REDIS_DB,
|
||||
ttl=settings.REDIS_TTL
|
||||
)
|
||||
|
||||
|
||||
def invalidate_download_cache():
|
||||
"""Invalidate all download-related caches"""
|
||||
cache_manager.clear("downloads:*")
|
||||
cache_manager.clear("stats:*")
|
||||
cache_manager.clear("filters:*")
|
||||
logger.info("Invalidated download-related caches", module="Cache")
|
||||
Reference in New Issue
Block a user