105 lines
3.8 KiB
Python
105 lines
3.8 KiB
Python
import json
|
|
import time
|
|
import logging
|
|
from typing import List, Dict
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
|
|
from app.services.redis_cache import redis_cache
|
|
from app.services.system_settings_service import system_settings_service
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class DanmakuService:
|
|
def __init__(self):
|
|
self.redis_key = "cosmo:danmaku:stream"
|
|
self.default_ttl = 86400 # 24 hours fallback
|
|
|
|
async def get_ttl(self, db: AsyncSession) -> int:
|
|
"""Fetch TTL from system settings or use default"""
|
|
try:
|
|
setting = await system_settings_service.get_setting("danmaku_ttl", db)
|
|
if setting:
|
|
val = int(setting.value)
|
|
# logger.info(f"Using configured danmaku_ttl: {val}")
|
|
return val
|
|
except Exception as e:
|
|
logger.error(f"Failed to fetch danmaku_ttl: {e}")
|
|
return self.default_ttl
|
|
|
|
async def add_danmaku(self, user_id: int, username: str, text: str, db: AsyncSession) -> Dict:
|
|
"""Add a new danmaku message"""
|
|
# Validate length (double check server side)
|
|
if len(text) > 20:
|
|
text = text[:20]
|
|
|
|
now = time.time()
|
|
ttl = await self.get_ttl(db)
|
|
expire_time = now - ttl
|
|
|
|
logger.info(f"Adding danmaku: '{text}' at {now}, ttl={ttl}, expire_threshold={expire_time}")
|
|
|
|
# Create message object
|
|
# Add unique timestamp/random to value to ensure uniqueness in Set if user spams same msg?
|
|
# Actually ZSET handles unique values. If same user sends "Hi" twice, it updates score.
|
|
# To allow same msg multiple times, we can append a unique ID or timestamp to the JSON.
|
|
message = {
|
|
"uid": str(user_id),
|
|
"username": username,
|
|
"text": text,
|
|
"ts": now,
|
|
"id": f"{user_id}_{now}" # Unique ID for React keys
|
|
}
|
|
|
|
serialized = json.dumps(message)
|
|
|
|
# 1. Remove expired messages first
|
|
# ZREMRANGEBYSCORE key -inf (now - ttl)
|
|
if redis_cache.client:
|
|
try:
|
|
# Clean up old
|
|
await redis_cache.client.zremrangebyscore(self.redis_key, 0, expire_time)
|
|
|
|
# Add new
|
|
await redis_cache.client.zadd(self.redis_key, {serialized: now})
|
|
|
|
# Optional: Set key expiry to max TTL just in case (but ZADD keeps it alive)
|
|
await redis_cache.client.expire(self.redis_key, ttl)
|
|
|
|
logger.info(f"Danmaku added by {username}: {text}")
|
|
return message
|
|
except Exception as e:
|
|
logger.error(f"Redis error adding danmaku: {e}")
|
|
raise e
|
|
else:
|
|
logger.warning("Redis not connected, danmaku lost")
|
|
return message
|
|
|
|
async def get_active_danmaku(self, db: AsyncSession) -> List[Dict]:
|
|
"""Get all active danmaku messages"""
|
|
now = time.time()
|
|
ttl = await self.get_ttl(db)
|
|
min_score = now - ttl
|
|
|
|
if redis_cache.client:
|
|
try:
|
|
# Get messages from (now - ttl) to +inf
|
|
# ZRANGEBYSCORE key min max
|
|
results = await redis_cache.client.zrangebyscore(self.redis_key, min_score, "+inf")
|
|
|
|
logger.debug(f"Fetching danmaku: found {len(results)} messages (since {min_score})")
|
|
|
|
messages = []
|
|
for res in results:
|
|
try:
|
|
messages.append(json.loads(res))
|
|
except json.JSONDecodeError:
|
|
continue
|
|
|
|
return messages
|
|
except Exception as e:
|
|
logger.error(f"Redis error getting danmaku: {e}")
|
|
return []
|
|
return []
|
|
|
|
danmaku_service = DanmakuService()
|