cosmo_backend/app/services/redis_cache.py

205 lines
6.5 KiB
Python

"""
Redis cache service
Provides three-layer caching:
L1: In-memory cache (process-level, TTL: 10min)
L2: Redis cache (shared, TTL: 1h-7days)
L3: Database (persistent)
"""
import redis.asyncio as redis
from typing import Any, Optional
import json
import logging
from datetime import datetime, timedelta
from app.config import settings
logger = logging.getLogger(__name__)
class RedisCache:
"""Redis cache manager"""
def __init__(self):
self.client: Optional[redis.Redis] = None
self._connected = False
async def connect(self):
"""Connect to Redis"""
try:
self.client = redis.from_url(
settings.redis_url,
encoding="utf-8",
decode_responses=True,
max_connections=settings.redis_max_connections,
)
# Test connection
await self.client.ping()
self._connected = True
logger.info(f"✓ Connected to Redis at {settings.redis_host}:{settings.redis_port}")
except Exception as e:
logger.warning(f"⚠ Redis connection failed: {e}")
logger.warning("Falling back to in-memory cache only")
self._connected = False
async def disconnect(self):
"""Disconnect from Redis"""
if self.client:
await self.client.close()
logger.info("Redis connection closed")
async def get(self, key: str) -> Optional[Any]:
"""Get value from Redis cache"""
if not self._connected or not self.client:
return None
try:
value = await self.client.get(key)
if value:
logger.debug(f"Redis cache HIT: {key}")
return json.loads(value)
logger.debug(f"Redis cache MISS: {key}")
return None
except Exception as e:
logger.error(f"Redis get error for key '{key}': {e}")
return None
async def set(
self,
key: str,
value: Any,
ttl_seconds: Optional[int] = None,
) -> bool:
"""Set value in Redis cache with optional TTL"""
if not self._connected or not self.client:
return False
try:
serialized = json.dumps(value, default=str)
if ttl_seconds:
await self.client.setex(key, ttl_seconds, serialized)
else:
await self.client.set(key, serialized)
logger.debug(f"Redis cache SET: {key} (TTL: {ttl_seconds}s)")
return True
except Exception as e:
logger.error(f"Redis set error for key '{key}': {e}")
return False
async def delete(self, key: str) -> bool:
"""Delete key from Redis cache"""
if not self._connected or not self.client:
return False
try:
await self.client.delete(key)
logger.debug(f"Redis cache DELETE: {key}")
return True
except Exception as e:
logger.error(f"Redis delete error for key '{key}': {e}")
return False
async def exists(self, key: str) -> bool:
"""Check if key exists in Redis cache"""
if not self._connected or not self.client:
return False
try:
result = await self.client.exists(key)
return result > 0
except Exception as e:
logger.error(f"Redis exists error for key '{key}': {e}")
return False
async def clear_pattern(self, pattern: str) -> int:
"""Clear all keys matching pattern"""
if not self._connected or not self.client:
return 0
try:
keys = []
async for key in self.client.scan_iter(match=pattern):
keys.append(key)
if keys:
deleted = await self.client.delete(*keys)
logger.info(f"Cleared {deleted} keys matching pattern '{pattern}'")
return deleted
return 0
except Exception as e:
logger.error(f"Redis clear_pattern error for pattern '{pattern}': {e}")
return 0
async def get_stats(self) -> dict:
"""Get Redis statistics"""
if not self._connected or not self.client:
return {"connected": False}
try:
info = await self.client.info()
return {
"connected": True,
"used_memory_human": info.get("used_memory_human"),
"connected_clients": info.get("connected_clients"),
"total_commands_processed": info.get("total_commands_processed"),
"keyspace_hits": info.get("keyspace_hits"),
"keyspace_misses": info.get("keyspace_misses"),
}
except Exception as e:
logger.error(f"Redis get_stats error: {e}")
return {"connected": False, "error": str(e)}
# Singleton instance
redis_cache = RedisCache()
# Helper functions for common cache operations
def make_cache_key(prefix: str, *args) -> str:
"""Create standardized cache key"""
parts = [str(arg) for arg in args if arg is not None]
return f"{prefix}:{':'.join(parts)}"
def get_ttl_seconds(cache_type: str) -> int:
"""Get TTL in seconds based on cache type"""
ttl_map = {
"current_positions": 3600, # 1 hour
"historical_positions": 86400 * 7, # 7 days
"static_data": 86400 * 30, # 30 days
"nasa_api_response": 86400 * 3, # 3 days (from settings)
}
return ttl_map.get(cache_type, 3600) # Default 1 hour
async def cache_nasa_response(
body_id: str,
start_time: Optional[datetime],
end_time: Optional[datetime],
step: str,
data: Any,
) -> bool:
"""Cache NASA Horizons API response"""
# Create cache key
start_str = start_time.isoformat() if start_time else "now"
end_str = end_time.isoformat() if end_time else "now"
cache_key = make_cache_key("nasa", body_id, start_str, end_str, step)
# Cache in Redis
ttl = get_ttl_seconds("nasa_api_response")
return await redis_cache.set(cache_key, data, ttl)
async def get_cached_nasa_response(
body_id: str,
start_time: Optional[datetime],
end_time: Optional[datetime],
step: str,
) -> Optional[Any]:
"""Get cached NASA Horizons API response"""
start_str = start_time.isoformat() if start_time else "now"
end_str = end_time.isoformat() if end_time else "now"
cache_key = make_cache_key("nasa", body_id, start_str, end_str, step)
return await redis_cache.get(cache_key)