diff --git a/backend/.env.example b/backend/.env.example index 079edfe..e2ce5b5 100644 --- a/backend/.env.example +++ b/backend/.env.example @@ -1,5 +1,5 @@ # Application Settings -APP_NAME=Cosmo - Deep Space Explorer +APP_NAME=COSMO - Deep Space Explorer API_PREFIX=/api # CORS Settings (comma-separated list) diff --git a/backend/CONFIG.md b/backend/CONFIG.md index 4b3f589..9294191 100644 --- a/backend/CONFIG.md +++ b/backend/CONFIG.md @@ -51,7 +51,7 @@ REDIS_MAX_CONNECTIONS=50 # 最大连接数 ### 3. 应用配置 ```bash -APP_NAME=Cosmo - Deep Space Explorer +APP_NAME=COSMO - Deep Space Explorer API_PREFIX=/api CORS_ORIGINS=["*"] # 开发环境允许所有来源 CACHE_TTL_DAYS=3 # NASA API 缓存天数 diff --git a/backend/app/api/celestial_body.py b/backend/app/api/celestial_body.py index c0a4191..ca46c7b 100644 --- a/backend/app/api/celestial_body.py +++ b/backend/app/api/celestial_body.py @@ -25,6 +25,7 @@ class CelestialBodyCreate(BaseModel): name_zh: Optional[str] = None type: str description: Optional[str] = None + details: Optional[str] = None is_active: bool = True extra_data: Optional[Dict[str, Any]] = None @@ -34,6 +35,7 @@ class CelestialBodyUpdate(BaseModel): name_zh: Optional[str] = None type: Optional[str] = None description: Optional[str] = None + details: Optional[str] = None is_active: Optional[bool] = None extra_data: Optional[Dict[str, Any]] = None @@ -112,7 +114,7 @@ async def get_celestial_nasa_data( try: # Fetch raw text from Horizons using the body_id # Note: body.id corresponds to JPL Horizons ID - raw_text = await horizons_service.get_object_data_raw(body.id, db) + raw_text = await horizons_service.get_object_data_raw(body.id) return {"id": body.id, "name": body.name, "raw_data": raw_text} except Exception as e: logger.error(f"Failed to fetch raw data for {body_id}: {e}") @@ -173,6 +175,7 @@ async def get_body_info(body_id: str, db: AsyncSession = Depends(get_db)): name=body.name, type=body.type, description=body.description, + details=body.details, launch_date=extra_data.get("launch_date"), status=extra_data.get("status"), ) @@ -212,6 +215,7 @@ async def list_bodies( "name_zh": body.name_zh, "type": body.type, "description": body.description, + "details": body.details, "is_active": body.is_active, "resources": resources_by_type, "has_resources": len(resources) > 0, diff --git a/backend/app/api/celestial_position.py b/backend/app/api/celestial_position.py index b5de6be..5fa79be 100644 --- a/backend/app/api/celestial_position.py +++ b/backend/app/api/celestial_position.py @@ -76,7 +76,8 @@ async def get_celestial_positions( # Check Redis cache first (persistent across restarts) start_str = "now" end_str = "now" - redis_key = make_cache_key("positions", start_str, end_str, step) + body_ids_str = body_ids if body_ids else "all" + redis_key = make_cache_key("positions", start_str, end_str, step, body_ids_str) redis_cached = await redis_cache.get(redis_key) if redis_cached is not None: logger.info("Cache hit (Redis) for recent positions") @@ -194,7 +195,8 @@ async def get_celestial_positions( # Cache in Redis for persistence across restarts start_str = start_dt.isoformat() if start_dt else "now" end_str = end_dt.isoformat() if end_dt else "now" - redis_key = make_cache_key("positions", start_str, end_str, step) + body_ids_str = body_ids if body_ids else "all" + redis_key = make_cache_key("positions", start_str, end_str, step, body_ids_str) await redis_cache.set(redis_key, bodies_data, get_ttl_seconds("current_positions")) return CelestialDataResponse(bodies=bodies_data) else: @@ -204,7 +206,8 @@ async def get_celestial_positions( # Check Redis cache first (persistent across restarts) start_str = start_dt.isoformat() if start_dt else "now" end_str = end_dt.isoformat() if end_dt else "now" - redis_key = make_cache_key("positions", start_str, end_str, step) + body_ids_str = body_ids if body_ids else "all" # Include body_ids in cache key + redis_key = make_cache_key("positions", start_str, end_str, step, body_ids_str) redis_cached = await redis_cache.get(redis_key) if redis_cached is not None: logger.info("Cache hit (Redis) for positions") @@ -222,7 +225,9 @@ async def get_celestial_positions( # Filter bodies if body_ids specified if body_id_list: + logger.info(f"Filtering bodies from {len(all_bodies)} total. Requested IDs: {body_id_list}") all_bodies = [b for b in all_bodies if b.id in body_id_list] + logger.info(f"After filtering: {len(all_bodies)} bodies. IDs: {[b.id for b in all_bodies]}") use_db_cache = True db_cached_bodies = [] @@ -334,7 +339,7 @@ async def get_celestial_positions( # Special handling for Cassini (mission ended 2017-09-15) elif body.id == "-82": cassini_date = datetime(2017, 9, 15, 11, 58, 0) - pos_data = horizons_service.get_body_positions(body.id, cassini_date, cassini_date, step) + pos_data = await horizons_service.get_body_positions(body.id, cassini_date, cassini_date, step) positions_list = [ {"time": p.time.isoformat(), "x": p.x, "y": p.y, "z": p.z} for p in pos_data @@ -342,7 +347,7 @@ async def get_celestial_positions( else: # Download from NASA Horizons - pos_data = await horizons_service.get_body_positions(body.id, db, start_dt, end_dt, step) + pos_data = await horizons_service.get_body_positions(body.id, start_dt, end_dt, step) positions_list = [ {"time": p.time.isoformat(), "x": p.x, "y": p.y, "z": p.z} for p in pos_data diff --git a/backend/app/api/nasa_download.py b/backend/app/api/nasa_download.py index ee2c734..86d87e0 100644 --- a/backend/app/api/nasa_download.py +++ b/backend/app/api/nasa_download.py @@ -217,15 +217,16 @@ async def download_positions( continue # Download from NASA Horizons + logger.info(f"Downloading position for body {body_id} on {date_str}") positions = await horizons_service.get_body_positions( body_id=body_id, - db=db, start_time=target_date, end_time=target_date, step="1d" ) if positions and len(positions) > 0: + logger.info(f"Received position data for body {body_id}: x={positions[0].x}, y={positions[0].y}, z={positions[0].z}") # Save to database position_data = [{ "time": target_date, @@ -243,6 +244,17 @@ async def download_positions( source="nasa_horizons", session=db ) + logger.info(f"Saved position for body {body_id} on {date_str}") + + # Invalidate caches for this date to ensure fresh data is served + from app.services.redis_cache import redis_cache, make_cache_key + start_str = target_date.isoformat() + end_str = target_date.isoformat() + # Clear both "all bodies" cache and specific body cache + for body_ids_str in ["all", body_id]: + redis_key = make_cache_key("positions", start_str, end_str, "1d", body_ids_str) + await redis_cache.delete(redis_key) + logger.debug(f"Invalidated cache: {redis_key}") body_results["dates"].append({ "date": date_str, @@ -283,3 +295,89 @@ async def download_positions( except Exception as e: logger.error(f"Download failed: {e}") raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/delete") +async def delete_positions( + request: DownloadPositionRequest, + db: AsyncSession = Depends(get_db) +): + """ + Delete position data for specified bodies on specified dates + + Args: + - body_ids: List of celestial body IDs + - dates: List of dates (YYYY-MM-DD format) + + Returns: + - Summary of deleted data + """ + logger.info(f"Deleting positions for {len(request.body_ids)} bodies on {len(request.dates)} dates") + + try: + total_deleted = 0 + from sqlalchemy import text + + for body_id in request.body_ids: + # Invalidate caches for this body + from app.services.redis_cache import redis_cache, make_cache_key + + # We need to loop dates to delete specific records + for date_str in request.dates: + try: + # Parse date + target_date = datetime.strptime(date_str, "%Y-%m-%d") + # End of day + end_of_day = target_date.replace(hour=23, minute=59, second=59, microsecond=999999) + + # Execute deletion + # Using text() for raw SQL is often simpler for range deletes, + # but ORM is safer. Let's use ORM with execute. + # But since position_service might not have delete, we do it here. + + stmt = text(""" + DELETE FROM positions + WHERE body_id = :body_id + AND time >= :start_time + AND time <= :end_time + """) + + result = await db.execute(stmt, { + "body_id": body_id, + "start_time": target_date, + "end_time": end_of_day + }) + + deleted_count = result.rowcount + total_deleted += deleted_count + + if deleted_count > 0: + logger.info(f"Deleted {deleted_count} records for {body_id} on {date_str}") + + # Invalidate cache for this specific date/body combo + # Note: This is approximate as cache keys might cover ranges + start_str = target_date.isoformat() + end_str = target_date.isoformat() + # Clear both "all bodies" cache and specific body cache + for body_ids_str in ["all", body_id]: + # We try to clear '1d' step cache + redis_key = make_cache_key("positions", start_str, end_str, "1d", body_ids_str) + await redis_cache.delete(redis_key) + + except Exception as e: + logger.error(f"Failed to delete data for {body_id} on {date_str}: {e}") + + await db.commit() + + # Clear general patterns to be safe if ranges were cached + await redis_cache.clear_pattern("positions:*") + + return { + "message": f"Successfully deleted {total_deleted} position records", + "total_deleted": total_deleted + } + + except Exception as e: + await db.rollback() + logger.error(f"Delete failed: {e}") + raise HTTPException(status_code=500, detail=str(e)) diff --git a/backend/app/models/celestial.py b/backend/app/models/celestial.py index e6ca46c..e6d590f 100644 --- a/backend/app/models/celestial.py +++ b/backend/app/models/celestial.py @@ -45,6 +45,7 @@ class BodyInfo(BaseModel): name: str type: Literal["planet", "probe", "star", "dwarf_planet", "satellite", "comet"] description: str + details: str | None = None launch_date: str | None = None status: str | None = None @@ -200,4 +201,26 @@ CELESTIAL_BODIES = { "type": "dwarf_planet", "description": "鸟神星,柯伊伯带中第二亮的天体", }, + # Comets / Interstellar Objects + "1I": { + "name": "1I/'Oumuamua", + "name_zh": "奥陌陌", + "type": "comet", + "description": "原定名 1I/2017 U1,是已知第一颗经过太阳系的星际天体。它于2017年10月18日(UT)在距离地球约0.2 AU(30,000,000 km;19,000,000 mi)处被泛星1号望远镜发现,并在极端双曲线的轨道上运行。", + "status": "active", + }, + "3I": { + "name": "3I/ATLAS", + "name_zh": "3I/ATLAS", + "type": "comet", + "description": "又称C/2025 N1 (ATLAS),是一颗星际彗星,由位于智利里奥乌尔塔多的小行星陆地撞击持续报警系统于2025年7月1日发现", + "status": "active", + }, + "90000030": { + "name": "1P/Halley", + "name_zh": "哈雷彗星", + "type": "comet", + "description": "哈雷彗星(正式名称为1P/Halley)是著名的短周期彗星,每隔75-76年就能从地球上被观测到[5],亦是唯一能用肉眼直接从地球看到的短周期彗星,人的一生中可能经历两次其来访。", + "status": "active", + }, } diff --git a/backend/app/models/db/celestial_body.py b/backend/app/models/db/celestial_body.py index 12d343f..266178b 100644 --- a/backend/app/models/db/celestial_body.py +++ b/backend/app/models/db/celestial_body.py @@ -18,6 +18,7 @@ class CelestialBody(Base): name_zh = Column(String(200), nullable=True, comment="Chinese name") type = Column(String(50), nullable=False, comment="Body type") description = Column(Text, nullable=True, comment="Description") + details = Column(Text, nullable=True, comment="Detailed description (Markdown)") is_active = Column(Boolean, nullable=True, comment="Active status for probes (True=active, False=inactive)") extra_data = Column(JSONB, nullable=True, comment="Extended metadata (JSON)") created_at = Column(TIMESTAMP, server_default=func.now()) diff --git a/backend/app/services/horizons.py b/backend/app/services/horizons.py index 7d09574..89fd320 100644 --- a/backend/app/services/horizons.py +++ b/backend/app/services/horizons.py @@ -91,26 +91,23 @@ class HorizonsService: if end_time is None: end_time = start_time - # Format time for Horizons (YYYY-MM-DD HH:MM) - # Horizons accepts ISO-like format without 'T' - start_str = start_time.strftime('%Y-%m-%d %H:%M') - end_str = end_time.strftime('%Y-%m-%d %H:%M') - - # Special case for single point query (start = end) - # Horizons requires START != STOP for ranges, but we can handle single point - # by making a very small range or just asking for 1 step. - # Actually Horizons API is fine with start=end if we don't ask for range? - # Let's keep using range parameters as standard. - if start_time == end_time: - # Just add 1 minute for range, but we only parse the first result - end_dummy = end_time + timedelta(minutes=1) - end_str = end_dummy.strftime('%Y-%m-%d %H:%M') - # Override step to ensure we get the start point - # But wait, '1d' step might skip. - # If start==end, we want exactly one point. - # We can't use '1' count in API easily via URL params without STEP_SIZE? - # Let's just use the provided step. - + # Format time for Horizons + # NASA Horizons accepts: 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS' + # When querying a single point (same start/end date), we need STOP > START + # So we add 1 second and use precise time format + + if start_time.date() == end_time.date(): + # Single day query - use the date at 00:00 and next second + start_str = start_time.strftime('%Y-%m-%d') + # For STOP, add 1 day to satisfy STOP > START requirement + # But use step='1d' so we only get one data point + end_time_adjusted = start_time + timedelta(days=1) + end_str = end_time_adjusted.strftime('%Y-%m-%d') + else: + # Multi-day range query + start_str = start_time.strftime('%Y-%m-%d') + end_str = end_time.strftime('%Y-%m-%d') + logger.info(f"Querying Horizons (httpx) for body {body_id} from {start_str} to {end_str}") url = "https://ssd.jpl.nasa.gov/api/horizons.api" @@ -126,223 +123,16 @@ class HorizonsService: "START_TIME": start_str, "STOP_TIME": end_str, "STEP_SIZE": step, - "CSV_FORMAT": "YES" + "CSV_FORMAT": "YES", + "OUT_UNITS": "AU-D" } - # Configure proxy if available - client_kwargs = {"timeout": settings.nasa_api_timeout} - if settings.proxy_dict: - client_kwargs["proxies"] = settings.proxy_dict - - async with httpx.AsyncClient(**client_kwargs) as client: - response = await client.get(url, params=params) - - if response.status_code != 200: - raise Exception(f"NASA API returned status {response.status_code}") - - return self._parse_vectors(response.text) - - except Exception as e: - logger.error(f"Error querying Horizons for body {body_id}: {str(e)}") - raise - - def _parse_vectors(self, text: str) -> list[Position]: - """ - Parse Horizons CSV output for vector data - - Format looks like: - $$SOE - 2460676.500000000, A.D. 2025-Jan-01 00:00:00.0000, 9.776737278236609E-01, -1.726677228793678E-01, -1.636678733289160E-05, ... - $$EOE - """ - positions = [] - - # Extract data block between $$SOE and $$EOE - match = re.search(r'\$\$SOE(.*?)\$\$EOE', text, re.DOTALL) - if not match: - logger.warning("No data block ($$SOE...$$EOE) found in Horizons response") - # Log a snippet of text for debugging - logger.debug(f"Response snippet: {text[:200]}...") - return [] - - data_block = match.group(1).strip() - lines = data_block.split('\n') - - for line in lines: - parts = [p.strip() for p in line.split(',')] - if len(parts) < 5: - continue - - try: - # Index 0: JD, 1: Date, 2: X, 3: Y, 4: Z, 5: VX, 6: VY, 7: VZ - # Time parsing: 2460676.500000000 is JD. - # A.D. 2025-Jan-01 00:00:00.0000 is Calendar. - # We can use JD or parse the string. Using JD via astropy is accurate. - - jd_str = parts[0] - time_obj = Time(float(jd_str), format="jd").datetime - - x = float(parts[2]) - y = float(parts[3]) - z = float(parts[4]) - - # Velocity if available (indices 5, 6, 7) - vx = float(parts[5]) if len(parts) > 5 else None - vy = float(parts[6]) if len(parts) > 6 else None - vz = float(parts[7]) if len(parts) > 7 else None - - pos = Position( - time=time_obj, - x=x, - y=y, - z=z, - vx=vx, - vy=vy, - vz=vz - ) - positions.append(pos) - except ValueError as e: - logger.warning(f"Failed to parse line: {line}. Error: {e}") - continue - - return positions - -""" -NASA JPL Horizons data query service -""" -from datetime import datetime, timedelta -# from astroquery.jplhorizons import Horizons # Removed astroquery dependency -from astropy.time import Time # Kept astropy for Time object -import logging -import re -import httpx -import os - -from app.models.celestial import Position, CelestialBody -from app.config import settings - -logger = logging.getLogger(__name__) - - -class HorizonsService: - """Service for querying NASA JPL Horizons system""" - - def __init__(self): - """Initialize the service""" - self.location = "@sun" # Heliocentric coordinates - # Proxy is handled via settings.proxy_dict in each request - - async def get_object_data_raw(self, body_id: str) -> str: - """ - Get raw object data (terminal style text) from Horizons - - Args: - body_id: JPL Horizons ID - - Returns: - Raw text response from NASA - """ - url = "https://ssd.jpl.nasa.gov/api/horizons.api" - # Ensure ID is quoted for COMMAND - cmd_val = f"'{body_id}'" if not body_id.startswith("'") else body_id - - params = { - "format": "text", - "COMMAND": cmd_val, - "OBJ_DATA": "YES", - "MAKE_EPHEM": "NO", - "EPHEM_TYPE": "VECTORS", - "CENTER": "@sun" - } - - try: # Configure proxy if available client_kwargs = {"timeout": settings.nasa_api_timeout} if settings.proxy_dict: client_kwargs["proxies"] = settings.proxy_dict logger.info(f"Using proxy for NASA API: {settings.proxy_dict}") - async with httpx.AsyncClient(**client_kwargs) as client: - logger.info(f"Fetching raw data for body {body_id} with timeout {settings.nasa_api_timeout}s") - response = await client.get(url, params=params) - - if response.status_code != 200: - raise Exception(f"NASA API returned status {response.status_code}") - - return response.text - except Exception as e: - logger.error(f"Error fetching raw data for {body_id}: {str(e)}") - raise - - async def get_body_positions( - self, - body_id: str, - start_time: datetime | None = None, - end_time: datetime | None = None, - step: str = "1d", - ) -> list[Position]: - """ - Get positions for a celestial body over a time range - - Args: - body_id: JPL Horizons ID (e.g., '-31' for Voyager 1) - start_time: Start datetime (default: now) - end_time: End datetime (default: now) - step: Time step (e.g., '1d' for 1 day, '1h' for 1 hour) - - Returns: - List of Position objects - """ - try: - # Set default times - if start_time is None: - start_time = datetime.utcnow() - if end_time is None: - end_time = start_time - - # Format time for Horizons (YYYY-MM-DD HH:MM) - # Horizons accepts ISO-like format without 'T' - start_str = start_time.strftime('%Y-%m-%d %H:%M') - end_str = end_time.strftime('%Y-%m-%d %H:%M') - - # Special case for single point query (start = end) - # Horizons requires START != STOP for ranges, but we can handle single point - # by making a very small range or just asking for 1 step. - # Actually Horizons API is fine with start=end if we don't ask for range? - # Let's keep using range parameters as standard. - if start_time == end_time: - # Just add 1 minute for range, but we only parse the first result - end_dummy = end_time + timedelta(minutes=1) - end_str = end_dummy.strftime('%Y-%m-%d %H:%M') - # Override step to ensure we get the start point - # But wait, '1d' step might skip. - # If start==end, we want exactly one point. - # We can't use '1' count in API easily via URL params without STEP_SIZE? - # Let's just use the provided step. - - logger.info(f"Querying Horizons (httpx) for body {body_id} from {start_str} to {end_str}") - - url = "https://ssd.jpl.nasa.gov/api/horizons.api" - cmd_val = f"'{body_id}'" if not body_id.startswith("'") else body_id - - params = { - "format": "text", - "COMMAND": cmd_val, - "OBJ_DATA": "NO", - "MAKE_EPHEM": "YES", - "EPHEM_TYPE": "VECTORS", - "CENTER": self.location, - "START_TIME": start_str, - "STOP_TIME": end_str, - "STEP_SIZE": step, - "CSV_FORMAT": "YES" - } - - # Configure proxy if available - client_kwargs = {"timeout": settings.nasa_api_timeout} - if settings.proxy_dict: - client_kwargs["proxies"] = settings.proxy_dict - async with httpx.AsyncClient(**client_kwargs) as client: response = await client.get(url, params=params) @@ -358,20 +148,20 @@ class HorizonsService: def _parse_vectors(self, text: str) -> list[Position]: """ Parse Horizons CSV output for vector data - + Format looks like: $$SOE 2460676.500000000, A.D. 2025-Jan-01 00:00:00.0000, 9.776737278236609E-01, -1.726677228793678E-01, -1.636678733289160E-05, ... $$EOE """ positions = [] - + # Extract data block between $$SOE and $$EOE match = re.search(r'\$\$SOE(.*?)\$\$EOE', text, re.DOTALL) if not match: logger.warning("No data block ($$SOE...$$EOE) found in Horizons response") - # Log a snippet of text for debugging - logger.debug(f"Response snippet: {text[:200]}...") + # Log full response for debugging + logger.info(f"Full response for debugging:\n{text}") return [] data_block = match.group(1).strip() @@ -413,7 +203,7 @@ class HorizonsService: except ValueError as e: logger.warning(f"Failed to parse line: {line}. Error: {e}") continue - + return positions async def search_body_by_name(self, name: str, db: AsyncSession) -> dict: @@ -436,10 +226,11 @@ class HorizonsService: "CENTER": "@ssb" # Search from Solar System Barycenter for consistent object IDs } - timeout = await self._get_timeout(db) + timeout = settings.nasa_api_timeout client_kwargs = {"timeout": timeout} if settings.proxy_dict: client_kwargs["proxies"] = settings.proxy_dict + logger.info(f"Using proxy for NASA API: {settings.proxy_dict}") async with httpx.AsyncClient(**client_kwargs) as client: response = await client.get(url, params=params) @@ -449,6 +240,9 @@ class HorizonsService: response_text = response.text + # Log full response for debugging (temporarily) + logger.info(f"Full NASA API response for '{name}':\n{response_text}") + # Check for "Ambiguous target name" if "Ambiguous target name" in response_text: logger.warning(f"Ambiguous target name for: {name}") @@ -470,14 +264,53 @@ class HorizonsService: "error": "未找到匹配的天体,请检查名称或 ID" } - # Parse canonical name and ID from response (e.g., "Target body name: Jupiter Barycenter (599)") - target_name_match = re.search(r"Target body name: (.+?)\s+\((\-?\d+)\)", response_text) + # Try multiple parsing patterns for different response formats + # Pattern 1: "Target body name: Jupiter Barycenter (599)" + target_name_match = re.search(r"Target body name:\s*(.+?)\s+\((\-?\d+)\)", response_text) + + if not target_name_match: + # Pattern 2: " Revised: Mar 12, 2021 Ganymede / (Jupiter) 503" + # This pattern appears in the header section of many bodies + revised_match = re.search(r"Revised:.*?\s{2,}(.+?)\s{2,}(\-?\d+)\s*$", response_text, re.MULTILINE) + if revised_match: + full_name = revised_match.group(1).strip() + numeric_id = revised_match.group(2).strip() + short_name = full_name.split('/')[0].strip() # Remove parent body info like "/ (Jupiter)" + + logger.info(f"Found target (pattern 2): {full_name} with ID: {numeric_id}") + return { + "success": True, + "id": numeric_id, + "name": short_name, + "full_name": full_name, + "error": None + } + + if not target_name_match: + # Pattern 3: Look for body name in title section (works for comets and other objects) + # Example: "JPL/HORIZONS ATLAS (C/2025 N1) 2025-Dec-" + title_match = re.search(r"JPL/HORIZONS\s+(.+?)\s{2,}", response_text) + if title_match: + full_name = title_match.group(1).strip() + # For this pattern, the ID was in the original COMMAND, use it + numeric_id = name.strip("'\"") + short_name = full_name.split('(')[0].strip() + + logger.info(f"Found target (pattern 3): {full_name} with ID: {numeric_id}") + return { + "success": True, + "id": numeric_id, + "name": short_name, + "full_name": full_name, + "error": None + } + if target_name_match: full_name = target_name_match.group(1).strip() numeric_id = target_name_match.group(2).strip() short_name = full_name.split('(')[0].strip() # Remove any part after '(' - logger.info(f"Found target: {full_name} with ID: {numeric_id}") + logger.info(f"Found target (pattern 1): {full_name} with ID: {numeric_id}") return { "success": True, "id": numeric_id, @@ -487,7 +320,7 @@ class HorizonsService: } else: # Fallback if specific pattern not found, might be a valid but weird response - logger.warning(f"Could not parse target name/ID from response for: {name}. Response snippet: {response_text[:200]}") + logger.warning(f"Could not parse target name/ID from response for: {name}. Response snippet: {response_text[:500]}") return { "success": False, "id": None, diff --git a/backend/app/services/nasa_worker.py b/backend/app/services/nasa_worker.py index 088249e..6c62b42 100644 --- a/backend/app/services/nasa_worker.py +++ b/backend/app/services/nasa_worker.py @@ -62,7 +62,6 @@ async def download_positions_task(task_id: int, body_ids: List[str], dates: List # Download positions = await horizons_service.get_body_positions( body_id=body_id, - db=db, start_time=target_date, end_time=target_date, step="1d" diff --git a/backend/app/services/orbit_service.py b/backend/app/services/orbit_service.py index 09094a2..ed03798 100644 --- a/backend/app/services/orbit_service.py +++ b/backend/app/services/orbit_service.py @@ -152,7 +152,6 @@ class OrbitService: # Get positions from Horizons (synchronous call) positions = await horizons_service.get_body_positions( body_id=body_id, - db=session, start_time=start_time, end_time=end_time, step=f"{step_days}d" diff --git a/backend/scripts/check_db_status.py b/backend/scripts/check_db_status.py new file mode 100644 index 0000000..6918b6f --- /dev/null +++ b/backend/scripts/check_db_status.py @@ -0,0 +1,68 @@ +""" +Check database status: bodies, positions, resources +""" +import asyncio +import os +import sys +from datetime import datetime + +# Add backend directory to path +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + +from app.database import get_db +from app.models.db.celestial_body import CelestialBody +from app.models.db.position import Position +from app.models.db.resource import Resource +from sqlalchemy import select, func + +async def check_status(): + """Check database status""" + print("🔍 Checking database status...") + + async for session in get_db(): + try: + # 1. Check Celestial Bodies + stmt = select(func.count(CelestialBody.id)) + result = await session.execute(stmt) + body_count = result.scalar() + print(f"✅ Celestial Bodies: {body_count}") + + # 2. Check Positions + stmt = select(func.count(Position.id)) + result = await session.execute(stmt) + position_count = result.scalar() + print(f"✅ Total Positions: {position_count}") + + # Check positions for Sun (10) and Earth (399) + for body_id in ['10', '399']: + stmt = select(func.count(Position.id)).where(Position.body_id == body_id) + result = await session.execute(stmt) + count = result.scalar() + print(f" - Positions for {body_id}: {count}") + + if count > 0: + # Get latest position date + stmt = select(func.max(Position.time)).where(Position.body_id == body_id) + result = await session.execute(stmt) + latest_date = result.scalar() + print(f" Latest date: {latest_date}") + + # 3. Check Resources + stmt = select(func.count(Resource.id)) + result = await session.execute(stmt) + resource_count = result.scalar() + print(f"✅ Total Resources: {resource_count}") + + # Check resources for Sun (10) + stmt = select(Resource).where(Resource.body_id == '10') + result = await session.execute(stmt) + resources = result.scalars().all() + print(f" - Resources for Sun (10): {len(resources)}") + for r in resources: + print(f" - {r.resource_type}: {r.file_path}") + + finally: + break + +if __name__ == "__main__": + asyncio.run(check_status()) diff --git a/backend/scripts/check_sun_data.py b/backend/scripts/check_sun_data.py new file mode 100644 index 0000000..851c4b8 --- /dev/null +++ b/backend/scripts/check_sun_data.py @@ -0,0 +1,50 @@ +import asyncio +import os +import sys +from datetime import datetime + +# Add backend directory to path +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + +from app.database import get_db +from app.models.db import Position +from sqlalchemy import select, func + +async def check_sun_data(): + """Check data for 2025-12-04 00:00:00""" + async for session in get_db(): + try: + target_time = datetime(2025, 12, 4, 0, 0, 0) + print(f"Checking data for all bodies at {target_time}...") + + # Get all bodies + from app.models.db.celestial_body import CelestialBody + stmt = select(CelestialBody.id, CelestialBody.name, CelestialBody.type).where(CelestialBody.is_active != False) + result = await session.execute(stmt) + all_bodies = result.all() + print(f"Total active bodies: {len(all_bodies)}") + + # Check positions for each + missing_bodies = [] + for body_id, body_name, body_type in all_bodies: + stmt = select(func.count(Position.id)).where( + Position.body_id == body_id, + Position.time == target_time + ) + result = await session.execute(stmt) + count = result.scalar() + if count == 0: + missing_bodies.append(f"{body_name} ({body_id}) [{body_type}]") + + if missing_bodies: + print(f"❌ Missing data for {len(missing_bodies)} bodies:") + for b in missing_bodies: + print(f" - {b}") + else: + print("✅ All active bodies have data for this time!") + + finally: + break + +if __name__ == "__main__": + asyncio.run(check_sun_data()) diff --git a/backend/scripts/fix_sun_data.py b/backend/scripts/fix_sun_data.py new file mode 100644 index 0000000..d44ce6e --- /dev/null +++ b/backend/scripts/fix_sun_data.py @@ -0,0 +1,58 @@ +""" +Fix missing Sun position +""" +import asyncio +import os +import sys +from datetime import datetime + +# Add backend directory to path +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + +from app.database import get_db +from app.models.db import Position + +async def fix_sun_position(): + """Insert missing position for Sun at 2025-12-04 00:00:00""" + async for session in get_db(): + try: + target_time = datetime(2025, 12, 4, 0, 0, 0) + print(f"Fixing Sun position for {target_time}...") + + # Check if it exists first (double check) + from sqlalchemy import select, func + stmt = select(func.count(Position.id)).where( + Position.body_id == '10', + Position.time == target_time + ) + result = await session.execute(stmt) + count = result.scalar() + + if count > 0: + print("✅ Position already exists!") + return + + # Insert + new_pos = Position( + body_id='10', + time=target_time, + x=0.0, + y=0.0, + z=0.0, + vx=0.0, + vy=0.0, + vz=0.0, + source='calculated' + ) + session.add(new_pos) + await session.commit() + print("✅ Successfully inserted Sun position!") + + except Exception as e: + print(f"❌ Error: {e}") + await session.rollback() + finally: + break + +if __name__ == "__main__": + asyncio.run(fix_sun_position()) diff --git a/backend/scripts/inspect_sun.py b/backend/scripts/inspect_sun.py new file mode 100644 index 0000000..039477c --- /dev/null +++ b/backend/scripts/inspect_sun.py @@ -0,0 +1,39 @@ +import asyncio +import os +import sys +from sqlalchemy import select +from datetime import datetime + +# Add backend directory to path +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + +from app.database import get_db +from app.models.db import Position + +async def inspect_sun_positions(): + async for session in get_db(): + try: + # List all positions for Sun + stmt = select(Position.time).where(Position.body_id == '10').order_by(Position.time.desc()).limit(10) + result = await session.execute(stmt) + times = result.scalars().all() + + print("Recent Sun positions:") + for t in times: + print(f" - {t} (type: {type(t)})") + + # Check specifically for 2025-12-04 + target = datetime(2025, 12, 4, 0, 0, 0) + stmt = select(Position).where( + Position.body_id == '10', + Position.time == target + ) + result = await session.execute(stmt) + pos = result.scalar() + print(f"\nExact match for {target}: {pos}") + + finally: + break + +if __name__ == "__main__": + asyncio.run(inspect_sun_positions()) diff --git a/backend/scripts/reset_positions.py b/backend/scripts/reset_positions.py new file mode 100644 index 0000000..3a917cb --- /dev/null +++ b/backend/scripts/reset_positions.py @@ -0,0 +1,53 @@ +""" +Reset position data to fix units (KM -> AU) +""" +import asyncio +import os +import sys + +# Add backend directory to path +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) + +from app.database import get_db +from app.models.db import Position +from app.services.redis_cache import redis_cache +from sqlalchemy import text + +async def reset_data(): + """Clear positions and cache to force re-fetch in AU""" + print("🧹 Clearing old data (KM) to prepare for AU...") + + async for session in get_db(): + try: + # Clear positions table + print(" Truncating positions table...") + await session.execute(text("TRUNCATE TABLE positions RESTART IDENTITY CASCADE")) + + # Clear nasa_cache table (if it exists as a table, or if it's just redis?) + # nasa_cache is in db models? + # Let's check models/db directory... + # It seems nasa_cache is a table based on `nasa_cache_service`. + print(" Truncating nasa_cache table...") + try: + await session.execute(text("TRUNCATE TABLE nasa_cache RESTART IDENTITY CASCADE")) + except Exception as e: + print(f" (Note: nasa_cache might not exist or failed: {e})") + + await session.commit() + print("✅ Database tables cleared.") + + # Clear Redis + await redis_cache.connect() + await redis_cache.clear_pattern("positions:*") + await redis_cache.clear_pattern("nasa:*") + print("✅ Redis cache cleared.") + await redis_cache.disconnect() + + except Exception as e: + print(f"❌ Error: {e}") + await session.rollback() + finally: + break + +if __name__ == "__main__": + asyncio.run(reset_data()) diff --git a/backend/upload/texture/2k_saturn_ring.jpg b/backend/upload/texture/2k_saturn_ring.jpg new file mode 100644 index 0000000..cc9de41 Binary files /dev/null and b/backend/upload/texture/2k_saturn_ring.jpg differ diff --git a/frontend/package.json b/frontend/package.json index 03f7026..08bcc4f 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -18,9 +18,13 @@ "axios": "^1.13.2", "html2canvas": "^1.4.1", "lucide-react": "^0.555.0", + "markdown-it": "^14.1.0", "react": "^19.2.0", "react-dom": "^19.2.0", + "react-markdown": "^10.1.0", + "react-markdown-editor-lite": "^1.3.4", "react-router-dom": "^7.9.6", + "remark-gfm": "^4.0.1", "three": "^0.181.2" }, "devDependencies": { diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index fa38102..7e634be 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -14,6 +14,7 @@ import { InterstellarTicker } from './components/InterstellarTicker'; import { ControlPanel } from './components/ControlPanel'; import { AuthModal } from './components/AuthModal'; import { MessageBoard } from './components/MessageBoard'; +import { BodyDetailOverlay } from './components/BodyDetailOverlay'; // Import the new overlay component import { auth } from './utils/auth'; import type { CelestialBody } from './types'; import { useToast } from './contexts/ToastContext'; @@ -32,6 +33,7 @@ function App() { const [showOrbits, setShowOrbits] = useState(true); const [isSoundOn, setIsSoundOn] = useState(false); const [showMessageBoard, setShowMessageBoard] = useState(false); + const [showDetailOverlayId, setShowDetailOverlayId] = useState(null); // State for detail overlay // Initialize state from localStorage useEffect(() => { @@ -75,6 +77,19 @@ function App() { const loading = isTimelineMode ? historicalLoading : realTimeLoading; const error = isTimelineMode ? historicalError : realTimeError; + // Debug: log bodies when they change + useEffect(() => { + console.log('[App] Bodies updated:', { + isTimelineMode, + totalBodies: bodies.length, + bodiesWithPositions: bodies.filter(b => b.positions && b.positions.length > 0).length, + bodyTypes: bodies.reduce((acc, b) => { + acc[b.type] = (acc[b.type] || 0) + 1; + return acc; + }, {} as Record) + }); + }, [bodies, isTimelineMode]); + const [selectedBody, setSelectedBody] = useState(null); const { trajectoryPositions } = useTrajectory(selectedBody); @@ -94,6 +109,11 @@ function App() { } }, [isTimelineMode, cutoffDate]); + // Handle viewing body details + const handleViewDetails = useCallback((body: CelestialBody) => { + setShowDetailOverlayId(body.id); + }, []); + // Filter probes and planets from all bodies const probes = bodies.filter((b) => b.type === 'probe'); const planets = bodies.filter((b) => @@ -213,6 +233,7 @@ function App() { onBodySelect={handleBodySelect} resetTrigger={resetTrigger} toast={toast} + onViewDetails={handleViewDetails} /> {/* Timeline Controller */} @@ -241,6 +262,12 @@ function App() { )} + + {/* Body Detail Overlay */} + setShowDetailOverlayId(null)} + /> ); } diff --git a/frontend/src/components/BodyDetailOverlay.tsx b/frontend/src/components/BodyDetailOverlay.tsx new file mode 100644 index 0000000..fe86cef --- /dev/null +++ b/frontend/src/components/BodyDetailOverlay.tsx @@ -0,0 +1,145 @@ +import { useRef, useEffect, useState } from 'react'; +import { createPortal } from 'react-dom'; +import { XCircle } from 'lucide-react'; +import { Canvas, useFrame } from '@react-three/fiber'; +import { OrbitControls } from '@react-three/drei'; +import ReactMarkdown from 'react-markdown'; +import remarkGfm from 'remark-gfm'; + +import { request } from '../utils/request'; +import { useToast } from '../contexts/ToastContext'; +import { BodyViewer } from './BodyViewer'; +import type { CelestialBody as CelestialBodyType } from '../types'; + +interface BodyDetailOverlayProps { + bodyId: string | null; + onClose: () => void; +} + +// Custom camera control for automatic rotation +function AutoRotateCamera() { + useFrame((state) => { + state.camera.position.x = Math.sin(state.clock.elapsedTime * 0.1) * 3; + state.camera.position.z = Math.cos(state.clock.elapsedTime * 0.1) * 3; + state.camera.lookAt(0, 0, 0); + }); + return null; +} + + +export function BodyDetailOverlay({ bodyId, onClose }: BodyDetailOverlayProps) { + const [bodyData, setBodyData] = useState(null); + const [loading, setLoading] = useState(false); + const toast = useToast(); + + useEffect(() => { + if (!bodyId) { + setBodyData(null); + return; + } + + setLoading(true); + request.get(`/celestial/info/${bodyId}`) + .then(response => { + setBodyData(response.data); + }) + .catch(error => { + console.error("Failed to fetch body details:", error); + toast.error("加载天体详情失败"); + onClose(); // Close overlay on error + }) + .finally(() => { + setLoading(false); + }); + }, [bodyId, onClose, toast]); + + if (!bodyId || !bodyData) { + return null; + } + + // Create portal to render outside the main app div + return createPortal( +
+
+ {/* Close Button */} + + + {/* Left Panel: 3D Viewer */} +
+ {loading ? ( +
加载中...
+ ) : ( + + + + + {/* Frontal light */} + {/* Back light */} + + {/* Auto rotate for presentation */} + + + + + )} +
+ + {/* Right Panel: Details */} +
+

{bodyData.name_zh || bodyData.name}

+

{bodyData.name}

+ +
+ 类型: {bodyData.type} + {bodyData.description && <> | {bodyData.description}} +
+ + {bodyData.details ? ( +
+

, + h2: ({node, ...props}) =>

, + h3: ({node, ...props}) =>

, + p: ({node, ...props}) =>

, + ul: ({node, ...props}) =>