0.9.9
parent
f3a5f71e57
commit
75318d5b28
|
|
@ -25,6 +25,7 @@ from app.services.db_service import (
|
|||
resource_service,
|
||||
)
|
||||
from app.services.orbit_service import orbit_service
|
||||
from app.services.system_settings_service import system_settings_service
|
||||
from app.database import get_db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -64,11 +65,49 @@ async def create_celestial_body(
|
|||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Body with ID {body_data.id} already exists"
|
||||
)
|
||||
|
||||
|
||||
new_body = await celestial_body_service.create_body(body_data.dict(), db)
|
||||
return new_body
|
||||
|
||||
|
||||
@router.get("/search")
|
||||
async def search_celestial_body(
|
||||
name: str = Query(..., description="Body name or ID to search in NASA Horizons")
|
||||
):
|
||||
"""
|
||||
Search for a celestial body in NASA Horizons database by name or ID
|
||||
|
||||
Returns body information if found, including suggested ID and full name
|
||||
"""
|
||||
logger.info(f"Searching for celestial body: {name}")
|
||||
|
||||
try:
|
||||
result = horizons_service.search_body_by_name(name)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"Found body: {result['full_name']}")
|
||||
return {
|
||||
"success": True,
|
||||
"data": {
|
||||
"id": result["id"],
|
||||
"name": result["name"],
|
||||
"full_name": result["full_name"],
|
||||
}
|
||||
}
|
||||
else:
|
||||
logger.warning(f"Search failed: {result['error']}")
|
||||
return {
|
||||
"success": False,
|
||||
"error": result["error"]
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Search error: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Search failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.put("/{body_id}")
|
||||
async def update_celestial_body(
|
||||
body_id: str,
|
||||
|
|
@ -541,6 +580,21 @@ async def list_bodies(
|
|||
|
||||
bodies_list = []
|
||||
for body in bodies:
|
||||
# Get resources for this body
|
||||
resources = await resource_service.get_resources_by_body(body.id, None, db)
|
||||
|
||||
# Group resources by type
|
||||
resources_by_type = {}
|
||||
for resource in resources:
|
||||
if resource.resource_type not in resources_by_type:
|
||||
resources_by_type[resource.resource_type] = []
|
||||
resources_by_type[resource.resource_type].append({
|
||||
"id": resource.id,
|
||||
"file_path": resource.file_path,
|
||||
"file_size": resource.file_size,
|
||||
"mime_type": resource.mime_type,
|
||||
})
|
||||
|
||||
bodies_list.append(
|
||||
{
|
||||
"id": body.id,
|
||||
|
|
@ -549,6 +603,8 @@ async def list_bodies(
|
|||
"type": body.type,
|
||||
"description": body.description,
|
||||
"is_active": body.is_active,
|
||||
"resources": resources_by_type,
|
||||
"has_resources": len(resources) > 0,
|
||||
}
|
||||
)
|
||||
return {"bodies": bodies_list}
|
||||
|
|
@ -558,9 +614,26 @@ async def list_bodies(
|
|||
async def clear_cache():
|
||||
"""
|
||||
Clear the data cache (admin endpoint)
|
||||
Clears both memory cache and Redis cache
|
||||
"""
|
||||
# Clear memory cache
|
||||
cache_service.clear()
|
||||
return {"message": "Cache cleared successfully"}
|
||||
|
||||
# Clear Redis cache
|
||||
positions_cleared = await redis_cache.clear_pattern("positions:*")
|
||||
nasa_cleared = await redis_cache.clear_pattern("nasa:*")
|
||||
|
||||
total_cleared = positions_cleared + nasa_cleared
|
||||
|
||||
return {
|
||||
"message": f"Cache cleared successfully ({total_cleared} Redis keys deleted)",
|
||||
"memory_cache": "cleared",
|
||||
"redis_cache": {
|
||||
"positions_keys": positions_cleared,
|
||||
"nasa_keys": nasa_cleared,
|
||||
"total": total_cleared
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@router.post("/cache/preheat")
|
||||
|
|
@ -1041,3 +1114,242 @@ async def delete_orbit(
|
|||
return {"message": f"Orbit for {body_id} deleted successfully"}
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Orbit not found")
|
||||
|
||||
|
||||
# ============================================================
|
||||
# NASA Data Download APIs
|
||||
# ============================================================
|
||||
|
||||
@router.get("/positions/download/bodies")
|
||||
async def get_downloadable_bodies(
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get list of celestial bodies available for NASA data download, grouped by type
|
||||
|
||||
Returns:
|
||||
- Dictionary with body types as keys and lists of bodies as values
|
||||
"""
|
||||
logger.info("Fetching downloadable bodies for NASA data download")
|
||||
|
||||
try:
|
||||
# Get all active celestial bodies
|
||||
all_bodies = await celestial_body_service.get_all_bodies(db)
|
||||
|
||||
# Group bodies by type
|
||||
grouped_bodies = {}
|
||||
for body in all_bodies:
|
||||
if body.type not in grouped_bodies:
|
||||
grouped_bodies[body.type] = []
|
||||
|
||||
grouped_bodies[body.type].append({
|
||||
"id": body.id,
|
||||
"name": body.name,
|
||||
"name_zh": body.name_zh,
|
||||
"type": body.type,
|
||||
"is_active": body.is_active,
|
||||
"description": body.description
|
||||
})
|
||||
|
||||
# Sort each group by name
|
||||
for body_type in grouped_bodies:
|
||||
grouped_bodies[body_type].sort(key=lambda x: x["name"])
|
||||
|
||||
logger.info(f"✅ Returning {len(all_bodies)} bodies in {len(grouped_bodies)} groups")
|
||||
return {"bodies": grouped_bodies}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch downloadable bodies: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/positions/download/status")
|
||||
async def get_download_status(
|
||||
body_id: str = Query(..., description="Celestial body ID"),
|
||||
start_date: str = Query(..., description="Start date (YYYY-MM-DD)"),
|
||||
end_date: str = Query(..., description="End date (YYYY-MM-DD)"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get data availability status for a specific body within a date range
|
||||
|
||||
Returns:
|
||||
- List of dates that have position data
|
||||
"""
|
||||
logger.info(f"Checking download status for {body_id} from {start_date} to {end_date}")
|
||||
|
||||
try:
|
||||
# Parse dates
|
||||
start_dt = datetime.strptime(start_date, "%Y-%m-%d")
|
||||
end_dt = datetime.strptime(end_date, "%Y-%m-%d").replace(hour=23, minute=59, second=59)
|
||||
|
||||
# Get available dates
|
||||
available_dates = await position_service.get_available_dates(
|
||||
body_id=body_id,
|
||||
start_time=start_dt,
|
||||
end_time=end_dt,
|
||||
session=db
|
||||
)
|
||||
|
||||
# Convert dates to ISO format strings
|
||||
available_date_strings = [
|
||||
date.isoformat() if hasattr(date, 'isoformat') else str(date)
|
||||
for date in available_dates
|
||||
]
|
||||
|
||||
logger.info(f"✅ Found {len(available_date_strings)} dates with data")
|
||||
return {
|
||||
"body_id": body_id,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
"available_dates": available_date_strings
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to check download status: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
class DownloadPositionRequest(BaseModel):
|
||||
body_ids: list[str]
|
||||
dates: list[str] # List of dates in YYYY-MM-DD format
|
||||
|
||||
|
||||
@router.post("/positions/download")
|
||||
async def download_positions(
|
||||
request: DownloadPositionRequest,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Download position data for specified bodies on specified dates
|
||||
|
||||
This endpoint will:
|
||||
1. Query NASA Horizons API for the position at 00:00:00 UTC on each date
|
||||
2. Save the data to the positions table
|
||||
3. Return the downloaded data
|
||||
|
||||
Args:
|
||||
- body_ids: List of celestial body IDs
|
||||
- dates: List of dates (YYYY-MM-DD format)
|
||||
|
||||
Returns:
|
||||
- Summary of downloaded data with success/failure status
|
||||
"""
|
||||
logger.info(f"Downloading positions for {len(request.body_ids)} bodies on {len(request.dates)} dates")
|
||||
|
||||
try:
|
||||
results = []
|
||||
total_success = 0
|
||||
total_failed = 0
|
||||
|
||||
for body_id in request.body_ids:
|
||||
# Check if body exists
|
||||
body = await celestial_body_service.get_body_by_id(body_id, db)
|
||||
if not body:
|
||||
results.append({
|
||||
"body_id": body_id,
|
||||
"status": "failed",
|
||||
"error": "Body not found"
|
||||
})
|
||||
total_failed += 1
|
||||
continue
|
||||
|
||||
body_results = {
|
||||
"body_id": body_id,
|
||||
"body_name": body.name_zh or body.name,
|
||||
"dates": []
|
||||
}
|
||||
|
||||
for date_str in request.dates:
|
||||
try:
|
||||
# Parse date and set to midnight UTC
|
||||
target_date = datetime.strptime(date_str, "%Y-%m-%d")
|
||||
|
||||
# Check if data already exists for this date
|
||||
existing = await position_service.get_positions(
|
||||
body_id=body_id,
|
||||
start_time=target_date,
|
||||
end_time=target_date.replace(hour=23, minute=59, second=59),
|
||||
session=db
|
||||
)
|
||||
|
||||
if existing and len(existing) > 0:
|
||||
body_results["dates"].append({
|
||||
"date": date_str,
|
||||
"status": "exists",
|
||||
"message": "Data already exists"
|
||||
})
|
||||
total_success += 1
|
||||
continue
|
||||
|
||||
# Download from NASA Horizons
|
||||
logger.info(f"Downloading data for {body_id} on {date_str}")
|
||||
positions = horizons_service.get_body_positions(
|
||||
body_id=body_id,
|
||||
start_time=target_date,
|
||||
end_time=target_date,
|
||||
step="1d"
|
||||
)
|
||||
|
||||
if positions and len(positions) > 0:
|
||||
# Save to database
|
||||
position_data = [{
|
||||
"time": target_date,
|
||||
"x": positions[0].x,
|
||||
"y": positions[0].y,
|
||||
"z": positions[0].z,
|
||||
"vx": positions[0].vx if hasattr(positions[0], 'vx') else None,
|
||||
"vy": positions[0].vy if hasattr(positions[0], 'vy') else None,
|
||||
"vz": positions[0].vz if hasattr(positions[0], 'vz') else None,
|
||||
}]
|
||||
|
||||
await position_service.save_positions(
|
||||
body_id=body_id,
|
||||
positions=position_data,
|
||||
source="nasa_horizons",
|
||||
session=db
|
||||
)
|
||||
|
||||
body_results["dates"].append({
|
||||
"date": date_str,
|
||||
"status": "success",
|
||||
"position": {
|
||||
"x": positions[0].x,
|
||||
"y": positions[0].y,
|
||||
"z": positions[0].z
|
||||
}
|
||||
})
|
||||
total_success += 1
|
||||
logger.info(f"✅ Downloaded data for {body_id} on {date_str}")
|
||||
else:
|
||||
body_results["dates"].append({
|
||||
"date": date_str,
|
||||
"status": "failed",
|
||||
"error": "No data returned from NASA"
|
||||
})
|
||||
total_failed += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to download {body_id} on {date_str}: {e}")
|
||||
body_results["dates"].append({
|
||||
"date": date_str,
|
||||
"status": "failed",
|
||||
"error": str(e)
|
||||
})
|
||||
total_failed += 1
|
||||
|
||||
results.append(body_results)
|
||||
|
||||
logger.info(f"🎉 Download complete: {total_success} succeeded, {total_failed} failed")
|
||||
return {
|
||||
"message": f"Downloaded {total_success} positions ({total_failed} failed)",
|
||||
"total_success": total_success,
|
||||
"total_failed": total_failed,
|
||||
"results": results
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Download failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,253 @@
|
|||
"""
|
||||
System Settings API Routes
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException, Query, Depends, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import Optional, Dict, Any, List
|
||||
import logging
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app.services.system_settings_service import system_settings_service
|
||||
from app.services.redis_cache import redis_cache
|
||||
from app.services.cache import cache_service
|
||||
from app.database import get_db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/system", tags=["system"])
|
||||
|
||||
|
||||
# Pydantic models
|
||||
class SettingCreate(BaseModel):
|
||||
key: str
|
||||
value: Any
|
||||
value_type: str = "string"
|
||||
category: str = "general"
|
||||
label: str
|
||||
description: Optional[str] = None
|
||||
is_public: bool = False
|
||||
|
||||
|
||||
class SettingUpdate(BaseModel):
|
||||
value: Optional[Any] = None
|
||||
value_type: Optional[str] = None
|
||||
category: Optional[str] = None
|
||||
label: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
is_public: Optional[bool] = None
|
||||
|
||||
|
||||
# ============================================================
|
||||
# System Settings CRUD APIs
|
||||
# ============================================================
|
||||
|
||||
@router.get("/settings")
|
||||
async def list_settings(
|
||||
category: Optional[str] = Query(None, description="Filter by category"),
|
||||
is_public: Optional[bool] = Query(None, description="Filter by public status"),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Get all system settings
|
||||
|
||||
Query parameters:
|
||||
- category: Optional filter by category (e.g., 'visualization', 'cache', 'ui')
|
||||
- is_public: Optional filter by public status (true for frontend-accessible settings)
|
||||
"""
|
||||
settings = await system_settings_service.get_all_settings(db, category, is_public)
|
||||
|
||||
result = []
|
||||
for setting in settings:
|
||||
# Parse value based on type
|
||||
parsed_value = await system_settings_service.get_setting_value(setting.key, db)
|
||||
|
||||
result.append({
|
||||
"id": setting.id,
|
||||
"key": setting.key,
|
||||
"value": parsed_value,
|
||||
"raw_value": setting.value,
|
||||
"value_type": setting.value_type,
|
||||
"category": setting.category,
|
||||
"label": setting.label,
|
||||
"description": setting.description,
|
||||
"is_public": setting.is_public,
|
||||
"created_at": setting.created_at.isoformat() if setting.created_at else None,
|
||||
"updated_at": setting.updated_at.isoformat() if setting.updated_at else None,
|
||||
})
|
||||
|
||||
return {"settings": result}
|
||||
|
||||
|
||||
@router.get("/settings/{key}")
|
||||
async def get_setting(
|
||||
key: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get a single setting by key"""
|
||||
setting = await system_settings_service.get_setting(key, db)
|
||||
|
||||
if not setting:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Setting '{key}' not found"
|
||||
)
|
||||
|
||||
parsed_value = await system_settings_service.get_setting_value(key, db)
|
||||
|
||||
return {
|
||||
"id": setting.id,
|
||||
"key": setting.key,
|
||||
"value": parsed_value,
|
||||
"raw_value": setting.value,
|
||||
"value_type": setting.value_type,
|
||||
"category": setting.category,
|
||||
"label": setting.label,
|
||||
"description": setting.description,
|
||||
"is_public": setting.is_public,
|
||||
"created_at": setting.created_at.isoformat() if setting.created_at else None,
|
||||
"updated_at": setting.updated_at.isoformat() if setting.updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/settings", status_code=status.HTTP_201_CREATED)
|
||||
async def create_setting(
|
||||
data: SettingCreate,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new system setting"""
|
||||
# Check if setting already exists
|
||||
existing = await system_settings_service.get_setting(data.key, db)
|
||||
if existing:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Setting '{data.key}' already exists"
|
||||
)
|
||||
|
||||
new_setting = await system_settings_service.create_setting(data.dict(), db)
|
||||
await db.commit()
|
||||
|
||||
parsed_value = await system_settings_service.get_setting_value(data.key, db)
|
||||
|
||||
return {
|
||||
"id": new_setting.id,
|
||||
"key": new_setting.key,
|
||||
"value": parsed_value,
|
||||
"value_type": new_setting.value_type,
|
||||
"category": new_setting.category,
|
||||
"label": new_setting.label,
|
||||
"description": new_setting.description,
|
||||
"is_public": new_setting.is_public,
|
||||
}
|
||||
|
||||
|
||||
@router.put("/settings/{key}")
|
||||
async def update_setting(
|
||||
key: str,
|
||||
data: SettingUpdate,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update a system setting"""
|
||||
update_data = {k: v for k, v in data.dict().items() if v is not None}
|
||||
|
||||
updated = await system_settings_service.update_setting(key, update_data, db)
|
||||
if not updated:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Setting '{key}' not found"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
parsed_value = await system_settings_service.get_setting_value(key, db)
|
||||
|
||||
return {
|
||||
"id": updated.id,
|
||||
"key": updated.key,
|
||||
"value": parsed_value,
|
||||
"value_type": updated.value_type,
|
||||
"category": updated.category,
|
||||
"label": updated.label,
|
||||
"description": updated.description,
|
||||
"is_public": updated.is_public,
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/settings/{key}")
|
||||
async def delete_setting(
|
||||
key: str,
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete a system setting"""
|
||||
deleted = await system_settings_service.delete_setting(key, db)
|
||||
if not deleted:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Setting '{key}' not found"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
|
||||
return {"message": f"Setting '{key}' deleted successfully"}
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Cache Management APIs
|
||||
# ============================================================
|
||||
|
||||
@router.post("/cache/clear")
|
||||
async def clear_all_caches():
|
||||
"""
|
||||
Clear all caches (memory + Redis)
|
||||
|
||||
This is a critical operation for platform management.
|
||||
It clears:
|
||||
- Memory cache (in-process)
|
||||
- Redis cache (all positions and NASA data)
|
||||
"""
|
||||
logger.info("🧹 Starting cache clear operation...")
|
||||
|
||||
# Clear memory cache
|
||||
cache_service.clear()
|
||||
logger.info("✓ Memory cache cleared")
|
||||
|
||||
# Clear Redis cache
|
||||
positions_cleared = await redis_cache.clear_pattern("positions:*")
|
||||
nasa_cleared = await redis_cache.clear_pattern("nasa:*")
|
||||
logger.info(f"✓ Redis cache cleared ({positions_cleared + nasa_cleared} keys)")
|
||||
|
||||
total_cleared = positions_cleared + nasa_cleared
|
||||
|
||||
return {
|
||||
"message": f"All caches cleared successfully ({total_cleared} Redis keys deleted)",
|
||||
"memory_cache": "cleared",
|
||||
"redis_cache": {
|
||||
"positions_keys": positions_cleared,
|
||||
"nasa_keys": nasa_cleared,
|
||||
"total": total_cleared
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@router.get("/cache/stats")
|
||||
async def get_cache_stats():
|
||||
"""Get cache statistics"""
|
||||
redis_stats = await redis_cache.get_stats()
|
||||
|
||||
return {
|
||||
"redis": redis_stats,
|
||||
"memory": {
|
||||
"description": "In-memory cache (process-level)",
|
||||
"note": "Statistics not available for in-memory cache"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@router.post("/settings/init-defaults")
|
||||
async def initialize_default_settings(
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Initialize default system settings (admin use)"""
|
||||
await system_settings_service.initialize_default_settings(db)
|
||||
await db.commit()
|
||||
|
||||
return {"message": "Default settings initialized successfully"}
|
||||
|
|
@ -20,6 +20,7 @@ from app.config import settings
|
|||
from app.api.routes import router as celestial_router
|
||||
from app.api.auth import router as auth_router
|
||||
from app.api.user import router as user_router
|
||||
from app.api.system import router as system_router
|
||||
from app.services.redis_cache import redis_cache
|
||||
from app.services.cache_preheat import preheat_all_caches
|
||||
from app.database import close_db
|
||||
|
|
@ -44,6 +45,21 @@ async def lifespan(app: FastAPI):
|
|||
# Connect to Redis
|
||||
await redis_cache.connect()
|
||||
|
||||
# Initialize database tables (create if not exist)
|
||||
from app.database import engine, Base
|
||||
from app.models.db import SystemSettings # Import to register the model
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
logger.info("✓ Database tables initialized")
|
||||
|
||||
# Initialize default system settings
|
||||
from app.database import AsyncSessionLocal
|
||||
from app.services.system_settings_service import system_settings_service
|
||||
async with AsyncSessionLocal() as db:
|
||||
await system_settings_service.initialize_default_settings(db)
|
||||
await db.commit()
|
||||
logger.info("✓ Default system settings initialized")
|
||||
|
||||
# Preheat caches (load from database to Redis)
|
||||
await preheat_all_caches()
|
||||
|
||||
|
|
@ -87,6 +103,7 @@ app.add_middleware(
|
|||
app.include_router(celestial_router, prefix=settings.api_prefix)
|
||||
app.include_router(auth_router, prefix=settings.api_prefix)
|
||||
app.include_router(user_router, prefix=settings.api_prefix)
|
||||
app.include_router(system_router, prefix=settings.api_prefix)
|
||||
|
||||
# Mount static files for uploaded resources
|
||||
upload_dir = Path(__file__).parent.parent / "upload"
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from .orbit import Orbit
|
|||
from .user import User, user_roles
|
||||
from .role import Role
|
||||
from .menu import Menu, RoleMenu
|
||||
from .system_settings import SystemSettings
|
||||
|
||||
__all__ = [
|
||||
"CelestialBody",
|
||||
|
|
@ -22,5 +23,6 @@ __all__ = [
|
|||
"Role",
|
||||
"Menu",
|
||||
"RoleMenu",
|
||||
"SystemSettings",
|
||||
"user_roles",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
"""
|
||||
System Settings Database Model
|
||||
"""
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean, Text
|
||||
from sqlalchemy.sql import func
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class SystemSettings(Base):
|
||||
"""System settings table - stores platform configuration parameters"""
|
||||
|
||||
__tablename__ = "system_settings"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
key = Column(String(100), unique=True, nullable=False, index=True, comment="Setting key")
|
||||
value = Column(Text, nullable=False, comment="Setting value (JSON string or plain text)")
|
||||
value_type = Column(String(20), nullable=False, default="string", comment="Value type: string, int, float, bool, json")
|
||||
category = Column(String(50), nullable=False, default="general", comment="Setting category")
|
||||
label = Column(String(200), nullable=False, comment="Display label")
|
||||
description = Column(Text, comment="Setting description")
|
||||
is_public = Column(Boolean, default=False, comment="Whether this setting is accessible to frontend")
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SystemSettings(key={self.key}, value={self.value})>"
|
||||
|
|
@ -309,6 +309,36 @@ class PositionService:
|
|||
async with AsyncSessionLocal() as s:
|
||||
return await _delete(s)
|
||||
|
||||
@staticmethod
|
||||
async def get_available_dates(
|
||||
body_id: str,
|
||||
start_time: datetime,
|
||||
end_time: datetime,
|
||||
session: Optional[AsyncSession] = None
|
||||
) -> List[datetime]:
|
||||
"""Get all dates that have position data for a specific body within a time range"""
|
||||
async def _query(s: AsyncSession):
|
||||
from sqlalchemy import func, Date
|
||||
|
||||
# Query distinct dates (truncate to date)
|
||||
query = select(func.date(Position.time)).where(
|
||||
and_(
|
||||
Position.body_id == body_id,
|
||||
Position.time >= start_time,
|
||||
Position.time <= end_time
|
||||
)
|
||||
).distinct().order_by(func.date(Position.time))
|
||||
|
||||
result = await s.execute(query)
|
||||
dates = [row[0] for row in result]
|
||||
return dates
|
||||
|
||||
if session:
|
||||
return await _query(session)
|
||||
else:
|
||||
async with AsyncSessionLocal() as s:
|
||||
return await _query(s)
|
||||
|
||||
|
||||
class NasaCacheService:
|
||||
"""Service for NASA API response caching"""
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from datetime import datetime, timedelta
|
|||
from astroquery.jplhorizons import Horizons
|
||||
from astropy.time import Time
|
||||
import logging
|
||||
import re
|
||||
|
||||
from app.models.celestial import Position, CelestialBody
|
||||
|
||||
|
|
@ -93,6 +94,108 @@ class HorizonsService:
|
|||
logger.error(f"Error querying Horizons for body {body_id}: {str(e)}")
|
||||
raise
|
||||
|
||||
def search_body_by_name(self, name: str) -> dict:
|
||||
"""
|
||||
Search for a celestial body by name in NASA Horizons database
|
||||
|
||||
Args:
|
||||
name: Body name or ID to search for
|
||||
|
||||
Returns:
|
||||
Dictionary with search results:
|
||||
{
|
||||
"success": bool,
|
||||
"id": str (extracted or input),
|
||||
"name": str (short name),
|
||||
"full_name": str (complete name from NASA),
|
||||
"error": str (if failed)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Searching Horizons for: {name}")
|
||||
|
||||
# Try to query with the name
|
||||
obj = Horizons(id=name, location=self.location)
|
||||
vec = obj.vectors()
|
||||
|
||||
# Get the full target name from response
|
||||
targetname = vec['targetname'][0]
|
||||
logger.info(f"Found target: {targetname}")
|
||||
|
||||
# Extract ID and name from targetname
|
||||
# Possible formats:
|
||||
# 1. "136472 Makemake (2005 FY9)" - ID at start
|
||||
# 2. "Voyager 1 (spacecraft) (-31)" - ID in parentheses
|
||||
# 3. "Mars (499)" - ID in parentheses
|
||||
# 4. "Parker Solar Probe (spacecraft)" - no ID
|
||||
# 5. "Hubble Space Telescope (spacecra" - truncated
|
||||
|
||||
numeric_id = None
|
||||
short_name = None
|
||||
|
||||
# Check if input is already a numeric ID
|
||||
input_is_numeric = re.match(r'^-?\d+$', name.strip())
|
||||
if input_is_numeric:
|
||||
numeric_id = name.strip()
|
||||
# Extract name from targetname
|
||||
# Remove leading ID if present
|
||||
name_part = re.sub(r'^\d+\s+', '', targetname)
|
||||
short_name = name_part.split('(')[0].strip()
|
||||
else:
|
||||
# Try to extract ID from start of targetname (format: "136472 Makemake")
|
||||
start_match = re.match(r'^(\d+)\s+(.+)', targetname)
|
||||
if start_match:
|
||||
numeric_id = start_match.group(1)
|
||||
short_name = start_match.group(2).split('(')[0].strip()
|
||||
else:
|
||||
# Try to extract ID from parentheses (format: "Name (-31)" or "Name (499)")
|
||||
id_match = re.search(r'\((-?\d+)\)', targetname)
|
||||
if id_match:
|
||||
numeric_id = id_match.group(1)
|
||||
short_name = targetname.split('(')[0].strip()
|
||||
else:
|
||||
# No numeric ID found, use input name as ID
|
||||
numeric_id = name
|
||||
short_name = targetname.split('(')[0].strip()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"id": numeric_id,
|
||||
"name": short_name,
|
||||
"full_name": targetname,
|
||||
"error": None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
logger.error(f"Error searching for {name}: {error_msg}")
|
||||
|
||||
# Check for specific error types
|
||||
if 'Ambiguous target name' in error_msg:
|
||||
return {
|
||||
"success": False,
|
||||
"id": None,
|
||||
"name": None,
|
||||
"full_name": None,
|
||||
"error": "名称不唯一,请提供更具体的名称或 JPL Horizons ID"
|
||||
}
|
||||
elif 'No matches found' in error_msg or 'Unknown target' in error_msg:
|
||||
return {
|
||||
"success": False,
|
||||
"id": None,
|
||||
"name": None,
|
||||
"full_name": None,
|
||||
"error": "未找到匹配的天体,请检查名称或 ID"
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"success": False,
|
||||
"id": None,
|
||||
"name": None,
|
||||
"full_name": None,
|
||||
"error": f"查询失败: {error_msg}"
|
||||
}
|
||||
|
||||
|
||||
# Singleton instance
|
||||
horizons_service = HorizonsService()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,210 @@
|
|||
"""
|
||||
System Settings Database Service
|
||||
"""
|
||||
from sqlalchemy import select, update, delete
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import Optional, List, Dict, Any
|
||||
import json
|
||||
import logging
|
||||
|
||||
from app.models.db import SystemSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SystemSettingsService:
|
||||
"""Service for managing system settings"""
|
||||
|
||||
async def get_all_settings(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
category: Optional[str] = None,
|
||||
is_public: Optional[bool] = None
|
||||
) -> List[SystemSettings]:
|
||||
"""Get all settings, optionally filtered by category or public status"""
|
||||
query = select(SystemSettings)
|
||||
|
||||
if category:
|
||||
query = query.where(SystemSettings.category == category)
|
||||
if is_public is not None:
|
||||
query = query.where(SystemSettings.is_public == is_public)
|
||||
|
||||
result = await session.execute(query)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_setting(
|
||||
self,
|
||||
key: str,
|
||||
session: AsyncSession
|
||||
) -> Optional[SystemSettings]:
|
||||
"""Get a setting by key"""
|
||||
result = await session.execute(
|
||||
select(SystemSettings).where(SystemSettings.key == key)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_setting_value(
|
||||
self,
|
||||
key: str,
|
||||
session: AsyncSession,
|
||||
default: Any = None
|
||||
) -> Any:
|
||||
"""Get setting value with type conversion"""
|
||||
setting = await self.get_setting(key, session)
|
||||
if not setting:
|
||||
return default
|
||||
|
||||
# Convert value based on type
|
||||
try:
|
||||
if setting.value_type == "int":
|
||||
return int(setting.value)
|
||||
elif setting.value_type == "float":
|
||||
return float(setting.value)
|
||||
elif setting.value_type == "bool":
|
||||
return setting.value.lower() in ("true", "1", "yes")
|
||||
elif setting.value_type == "json":
|
||||
return json.loads(setting.value)
|
||||
else: # string
|
||||
return setting.value
|
||||
except Exception as e:
|
||||
logger.error(f"Error converting setting {key}: {e}")
|
||||
return default
|
||||
|
||||
async def create_setting(
|
||||
self,
|
||||
data: Dict[str, Any],
|
||||
session: AsyncSession
|
||||
) -> SystemSettings:
|
||||
"""Create a new setting"""
|
||||
# Convert value to string for storage
|
||||
value = data.get("value")
|
||||
value_type = data.get("value_type", "string")
|
||||
|
||||
if value_type == "json" and not isinstance(value, str):
|
||||
value = json.dumps(value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
new_setting = SystemSettings(
|
||||
key=data["key"],
|
||||
value=value,
|
||||
value_type=value_type,
|
||||
category=data.get("category", "general"),
|
||||
label=data["label"],
|
||||
description=data.get("description"),
|
||||
is_public=data.get("is_public", False)
|
||||
)
|
||||
|
||||
session.add(new_setting)
|
||||
await session.flush()
|
||||
await session.refresh(new_setting)
|
||||
return new_setting
|
||||
|
||||
async def update_setting(
|
||||
self,
|
||||
key: str,
|
||||
data: Dict[str, Any],
|
||||
session: AsyncSession
|
||||
) -> Optional[SystemSettings]:
|
||||
"""Update a setting"""
|
||||
setting = await self.get_setting(key, session)
|
||||
if not setting:
|
||||
return None
|
||||
|
||||
# Convert value to string if needed
|
||||
if "value" in data:
|
||||
value = data["value"]
|
||||
value_type = data.get("value_type", setting.value_type)
|
||||
|
||||
if value_type == "json" and not isinstance(value, str):
|
||||
data["value"] = json.dumps(value)
|
||||
else:
|
||||
data["value"] = str(value)
|
||||
|
||||
for key, value in data.items():
|
||||
if hasattr(setting, key) and value is not None:
|
||||
setattr(setting, key, value)
|
||||
|
||||
await session.flush()
|
||||
await session.refresh(setting)
|
||||
return setting
|
||||
|
||||
async def delete_setting(
|
||||
self,
|
||||
key: str,
|
||||
session: AsyncSession
|
||||
) -> bool:
|
||||
"""Delete a setting"""
|
||||
result = await session.execute(
|
||||
delete(SystemSettings).where(SystemSettings.key == key)
|
||||
)
|
||||
return result.rowcount > 0
|
||||
|
||||
async def initialize_default_settings(self, session: AsyncSession):
|
||||
"""Initialize default system settings if they don't exist"""
|
||||
defaults = [
|
||||
{
|
||||
"key": "timeline_interval_days",
|
||||
"value": "30",
|
||||
"value_type": "int",
|
||||
"category": "visualization",
|
||||
"label": "时间轴播放间隔(天)",
|
||||
"description": "星图时间轴播放时每次跳转的天数间隔",
|
||||
"is_public": True
|
||||
},
|
||||
{
|
||||
"key": "current_cache_ttl_hours",
|
||||
"value": "1",
|
||||
"value_type": "int",
|
||||
"category": "cache",
|
||||
"label": "当前位置缓存时间(小时)",
|
||||
"description": "当前位置数据在缓存中保存的时间",
|
||||
"is_public": False
|
||||
},
|
||||
{
|
||||
"key": "historical_cache_ttl_days",
|
||||
"value": "7",
|
||||
"value_type": "int",
|
||||
"category": "cache",
|
||||
"label": "历史位置缓存时间(天)",
|
||||
"description": "历史位置数据在缓存中保存的时间",
|
||||
"is_public": False
|
||||
},
|
||||
{
|
||||
"key": "page_size",
|
||||
"value": "10",
|
||||
"value_type": "int",
|
||||
"category": "ui",
|
||||
"label": "每页显示数量",
|
||||
"description": "管理页面默认每页显示的条数",
|
||||
"is_public": True
|
||||
},
|
||||
{
|
||||
"key": "nasa_api_timeout",
|
||||
"value": "30",
|
||||
"value_type": "int",
|
||||
"category": "api",
|
||||
"label": "NASA API超时时间(秒)",
|
||||
"description": "查询NASA Horizons API的超时时间",
|
||||
"is_public": False
|
||||
},
|
||||
{
|
||||
"key": "orbit_points",
|
||||
"value": "200",
|
||||
"value_type": "int",
|
||||
"category": "visualization",
|
||||
"label": "轨道线点数",
|
||||
"description": "生成轨道线时使用的点数,越多越平滑但性能越低",
|
||||
"is_public": True
|
||||
},
|
||||
]
|
||||
|
||||
for default in defaults:
|
||||
existing = await self.get_setting(default["key"], session)
|
||||
if not existing:
|
||||
await self.create_setting(default, session)
|
||||
logger.info(f"Created default setting: {default['key']}")
|
||||
|
||||
|
||||
# Singleton instance
|
||||
system_settings_service = SystemSettingsService()
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
-- Add System Settings menu to platform management
|
||||
-- This should be executed after the system is running
|
||||
|
||||
-- Insert Platform Settings menu under Platform Management (assuming parent_id=4 for Platform Management)
|
||||
INSERT INTO menus (name, title, path, icon, parent_id, sort_order, is_active, created_at, updated_at)
|
||||
VALUES (
|
||||
'system_settings',
|
||||
'系统参数',
|
||||
'/admin/system-settings',
|
||||
'settings',
|
||||
(SELECT id FROM menus WHERE name = 'platform_management'),
|
||||
1,
|
||||
true,
|
||||
NOW(),
|
||||
NOW()
|
||||
)
|
||||
ON CONFLICT (name) DO UPDATE
|
||||
SET
|
||||
title = EXCLUDED.title,
|
||||
path = EXCLUDED.path,
|
||||
icon = EXCLUDED.icon,
|
||||
parent_id = EXCLUDED.parent_id,
|
||||
sort_order = EXCLUDED.sort_order,
|
||||
updated_at = NOW();
|
||||
|
||||
-- Grant access to admin role
|
||||
INSERT INTO role_menus (role_id, menu_id)
|
||||
SELECT
|
||||
r.id,
|
||||
m.id
|
||||
FROM
|
||||
roles r,
|
||||
menus m
|
||||
WHERE
|
||||
r.name = 'admin'
|
||||
AND m.name = 'system_settings'
|
||||
ON CONFLICT (role_id, menu_id) DO NOTHING;
|
||||
|
||||
-- Verify the menu was added
|
||||
SELECT
|
||||
m.id,
|
||||
m.name,
|
||||
m.title,
|
||||
m.path,
|
||||
m.icon,
|
||||
parent.title as parent_menu,
|
||||
m.sort_order
|
||||
FROM menus m
|
||||
LEFT JOIN menus parent ON m.parent_id = parent.id
|
||||
WHERE m.name = 'system_settings';
|
||||
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue