cosmo/backend/app/main.py

239 lines
8.4 KiB
Python

"""
Cosmo - Deep Space Explorer Backend API
FastAPI application entry point
"""
import sys
from pathlib import Path
# Add backend directory to Python path for direct execution
backend_dir = Path(__file__).resolve().parent.parent
if str(backend_dir) not in sys.path:
sys.path.insert(0, str(backend_dir))
import logging
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.middleware.gzip import GZipMiddleware
from fastapi.staticfiles import StaticFiles
from app.config import settings
from app.api.auth import router as auth_router
from app.api.user import router as user_router
from app.api.system import router as system_router
from app.api.danmaku import router as danmaku_router
from app.api.task import router as task_router
from app.api.cache import router as cache_router
from app.api.celestial_static import router as celestial_static_router
from app.api.celestial_body import router as celestial_body_router
from app.api.celestial_resource import router as celestial_resource_router
from app.api.celestial_orbit import router as celestial_orbit_router
from app.api.nasa_download import router as nasa_download_router
from app.api.celestial_position import router as celestial_position_router
from app.api.star_system import router as star_system_router
from app.api.scheduled_job import router as scheduled_job_router
from app.api.social import router as social_router # Import social_router
from app.api.event import router as event_router # Import event_router
from app.services.redis_cache import redis_cache
from app.services.cache_preheat import preheat_all_caches
from app.services.scheduler_service import scheduler_service
from app.database import close_db
# Configure logging
# Set root logger to WARNING in production, INFO in development
log_level = logging.INFO if settings.jwt_secret_key == "your-secret-key-change-this-in-production" else logging.WARNING
logging.basicConfig(
level=log_level,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
# Reduce noise from specific loggers in production
if log_level == logging.WARNING:
logging.getLogger("app.services.cache").setLevel(logging.ERROR)
logging.getLogger("app.services.redis_cache").setLevel(logging.ERROR)
logging.getLogger("app.api.celestial_position").setLevel(logging.WARNING)
logging.getLogger("apscheduler").setLevel(logging.WARNING)
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Application lifespan manager - startup and shutdown events"""
# Startup
logger.info("=" * 60)
logger.info("Starting Cosmo Backend API...")
logger.info("=" * 60)
# Connect to Redis
await redis_cache.connect()
# Initialize database tables (create if not exist)
from app.database import engine, Base
from app.models.db import SystemSettings # Import to register the model
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
logger.info("✓ Database tables initialized")
# Initialize default system settings
from app.database import AsyncSessionLocal
from app.services.system_settings_service import system_settings_service
async with AsyncSessionLocal() as db:
await system_settings_service.initialize_default_settings(db)
await db.commit()
logger.info("✓ Default system settings initialized")
# Preheat caches (load from database to Redis)
await preheat_all_caches()
# Start Scheduler (use Redis lock to ensure only one instance runs across multiple workers)
from app.services.cache import redis_cache
import asyncio
scheduler_lock_key = "scheduler:lock"
scheduler_lock_ttl = 30 # Lock expires after 30 seconds (renew periodically)
# Try to acquire scheduler lock
lock_acquired = await redis_cache.set_if_not_exists(
scheduler_lock_key,
"locked",
ttl=scheduler_lock_ttl
)
if lock_acquired:
scheduler_service.start()
logger.info("✓ Scheduler started in this worker (acquired lock)")
# Start background task to renew lock periodically
async def renew_scheduler_lock():
while scheduler_service.scheduler.running:
await asyncio.sleep(15) # Renew every 15 seconds
try:
await redis_cache.set(scheduler_lock_key, "locked", ttl=scheduler_lock_ttl)
except Exception as e:
logger.error(f"Failed to renew scheduler lock: {e}")
asyncio.create_task(renew_scheduler_lock())
else:
logger.info("⊘ Scheduler not started in this worker (another worker holds the lock)")
logger.info("✓ Application started successfully")
logger.info("=" * 60)
yield
# Shutdown
logger.info("=" * 60)
logger.info("Shutting down Cosmo Backend API...")
# Stop Scheduler and release lock
if scheduler_service.scheduler.running:
scheduler_service.shutdown()
# Release scheduler lock
try:
await redis_cache.delete(scheduler_lock_key)
logger.info("✓ Scheduler lock released")
except Exception as e:
logger.error(f"Failed to release scheduler lock: {e}")
# Disconnect Redis
await redis_cache.disconnect()
# Close database connections
await close_db()
logger.info("✓ Application shutdown complete")
logger.info("=" * 60)
# Create FastAPI app
app = FastAPI(
title=settings.app_name,
description="Backend API for deep space probe visualization using NASA JPL Horizons data",
version="1.0.0",
lifespan=lifespan,
)
# Configure CORS
app.add_middleware(
CORSMiddleware,
allow_origins=settings.cors_origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Add GZip compression for responses > 1KB
# This significantly reduces the size of orbit data (~3MB -> ~300KB)
app.add_middleware(GZipMiddleware, minimum_size=1000)
# Include routers
app.include_router(auth_router, prefix=settings.api_prefix)
app.include_router(user_router, prefix=settings.api_prefix)
app.include_router(system_router, prefix=settings.api_prefix)
app.include_router(danmaku_router, prefix=settings.api_prefix)
# Celestial body related routers
app.include_router(star_system_router, prefix=settings.api_prefix)
app.include_router(celestial_body_router, prefix=settings.api_prefix)
app.include_router(celestial_position_router, prefix=settings.api_prefix)
app.include_router(celestial_resource_router, prefix=settings.api_prefix)
app.include_router(celestial_orbit_router, prefix=settings.api_prefix)
app.include_router(celestial_static_router, prefix=settings.api_prefix)
# Admin and utility routers
app.include_router(cache_router, prefix=settings.api_prefix)
app.include_router(nasa_download_router, prefix=settings.api_prefix)
app.include_router(task_router, prefix=settings.api_prefix)
app.include_router(scheduled_job_router, prefix=settings.api_prefix)
app.include_router(social_router, prefix=settings.api_prefix)
app.include_router(event_router, prefix=settings.api_prefix) # Added event_router
# Mount static files for uploaded resources
upload_dir = Path(__file__).parent.parent / "upload"
upload_dir.mkdir(exist_ok=True)
app.mount("/upload", StaticFiles(directory=str(upload_dir)), name="upload")
logger.info(f"Static files mounted at /upload -> {upload_dir}")
# Mount public assets directory
public_assets_dir = Path(__file__).parent.parent / "public" / "assets"
public_assets_dir.mkdir(parents=True, exist_ok=True)
app.mount("/public/assets", StaticFiles(directory=str(public_assets_dir)), name="public_assets")
logger.info(f"Public assets mounted at /public/assets -> {public_assets_dir}")
@app.get("/")
async def root():
"""Root endpoint"""
return {
"app": settings.app_name,
"version": "1.0.0",
"docs": "/docs",
"api": settings.api_prefix,
}
@app.get("/health")
async def health():
"""Health check endpoint with service status"""
from app.services.redis_cache import redis_cache
redis_stats = await redis_cache.get_stats()
return {
"status": "healthy",
"redis": redis_stats,
"database": "connected", # If we got here, database is working
}
if __name__ == "__main__":
import uvicorn
uvicorn.run(
"app.main:app",
host="0.0.0.0",
port=8000,
reload=True,
log_level="info",
)